BaseTools: Clean up source files

1. Do not use tab characters
2. No trailing white space in one line
3. All files must end with CRLF

Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Liming Gao <liming.gao@intel.com>
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
This commit is contained in:
Liming Gao
2018-07-05 17:40:04 +08:00
parent 39456d00f3
commit f7496d7173
289 changed files with 10647 additions and 10647 deletions

View File

@@ -241,7 +241,7 @@ class WorkspaceAutoGen(AutoGen):
super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._Init = True
## Initialize WorkspaceAutoGen
#
# @param WorkspaceDir Root directory of workspace
@@ -310,7 +310,7 @@ class WorkspaceAutoGen(AutoGen):
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
# parse FDF file to get PCDs in it, if any
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
@@ -763,7 +763,7 @@ class WorkspaceAutoGen(AutoGen):
## _CheckDuplicateInFV() method
#
# Check whether there is duplicate modules/files exist in FV section.
# Check whether there is duplicate modules/files exist in FV section.
# The check base on the file GUID;
#
def _CheckDuplicateInFV(self, Fdf):
@@ -794,7 +794,7 @@ class WorkspaceAutoGen(AutoGen):
Module.Guid.upper()),
ExtraData=self.FdfFile)
#
# Some INF files not have entity in DSC file.
# Some INF files not have entity in DSC file.
#
if not InfFoundFlag:
if FfsFile.InfFileName.find('$') == -1:
@@ -804,7 +804,7 @@ class WorkspaceAutoGen(AutoGen):
PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir)
#
# Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
# Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
# BuildObject from one of AutoGenObjectList is enough.
#
InfObj = self.AutoGenObjectList[0].BuildDatabase.WorkspaceDb.BuildObject[PathClassObj, TAB_ARCH_COMMON, self.BuildTarget, self.ToolChain]
@@ -823,7 +823,7 @@ class WorkspaceAutoGen(AutoGen):
if FfsFile.NameGuid is not None:
#
# If the NameGuid reference a PCD name.
# If the NameGuid reference a PCD name.
# The style must match: PCD(xxxx.yyy)
#
if gPCDAsGuidPattern.match(FfsFile.NameGuid):
@@ -900,7 +900,7 @@ class WorkspaceAutoGen(AutoGen):
for Pcd in Pa.Platform.Pcds:
PcdType = Pa.Platform.Pcds[Pcd].Type
# If no PCD type, this PCD comes from FDF
# If no PCD type, this PCD comes from FDF
if not PcdType:
continue
@@ -992,14 +992,14 @@ class WorkspaceAutoGen(AutoGen):
## Check the PCDs token value conflict in each DEC file.
#
# Will cause build break and raise error message while two PCDs conflict.
#
#
# @return None
#
def _CheckAllPcdsTokenValueConflict(self):
for Pa in self.AutoGenObjectList:
for Package in Pa.PackageList:
PcdList = Package.Pcds.values()
PcdList.sort(lambda x, y: cmp(int(x.TokenValue, 0), int(y.TokenValue, 0)))
PcdList.sort(lambda x, y: cmp(int(x.TokenValue, 0), int(y.TokenValue, 0)))
Count = 0
while (Count < len(PcdList) - 1) :
Item = PcdList[Count]
@@ -1124,20 +1124,20 @@ class PlatformAutoGen(AutoGen):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
# Used to store all PCDs for both PEI and DXE phase, in order to generate
# Used to store all PCDs for both PEI and DXE phase, in order to generate
# correct PCD database
#
#
_DynaPcdList_ = []
_NonDynaPcdList_ = []
_PlatformPcds = {}
#
# The priority list while override build option
# The priority list while override build option
#
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
@@ -1289,9 +1289,9 @@ class PlatformAutoGen(AutoGen):
#
def CollectFixedAtBuildPcds(self):
for LibAuto in self.LibraryAutoGenList:
FixedAtBuildPcds = {}
ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto._ReferenceModules:
FixedAtBuildPcds = {}
ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto._ReferenceModules:
for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if key not in FixedAtBuildPcds:
@@ -1299,7 +1299,7 @@ class PlatformAutoGen(AutoGen):
FixedAtBuildPcds[key] = Pcd.DefaultValue
else:
if FixedAtBuildPcds[key] != Pcd.DefaultValue:
ShareFixedAtBuildPcdsSameValue[key] = False
ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
@@ -1308,7 +1308,7 @@ class PlatformAutoGen(AutoGen):
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
def CollectVariables(self, DynamicPcdSet):
@@ -1405,7 +1405,7 @@ class PlatformAutoGen(AutoGen):
for F in self.Platform.Modules.keys():
M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile)
#GuidValue.update(M.Guids)
self.Platform.Modules[F].M = M
for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
@@ -1417,27 +1417,27 @@ class PlatformAutoGen(AutoGen):
if M.IsBinaryModule == True:
PcdFromModule.IsFromBinaryInf = True
# Check the PCD from DSC or not
# Check the PCD from DSC or not
PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
if F.Path not in FdfModuleList:
# If one of the Source built modules listed in the DSC is not listed
# in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
# access method (it is only listed in the DEC file that declares the
# If one of the Source built modules listed in the DSC is not listed
# in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamic), then build tool will report warning message
# notify the PI that they are attempting to build a module that must
# be included in a flash image in order to be functional. These Dynamic
# PCD will not be added into the Database unless it is used by other
# notify the PI that they are attempting to build a module that must
# be included in a flash image in order to be functional. These Dynamic
# PCD will not be added into the Database unless it is used by other
# modules that are included in the FDF file.
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
PcdFromModule.IsFromBinaryInf == False:
# Print warning message to let the developer make a determine.
continue
# If one of the Source built modules listed in the DSC is not listed in
# FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
# If one of the Source built modules listed in the DSC is not listed in
# FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
# PCD to the Platform's PCD Database.
if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
continue
@@ -1465,14 +1465,14 @@ class PlatformAutoGen(AutoGen):
PcdFromModule.Pending = False
self._NonDynaPcdList_.append (PcdFromModule)
DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
# add the PCD from modules that listed in FDF but not in DSC to Database
# add the PCD from modules that listed in FDF but not in DSC to Database
for InfName in FdfModuleList:
if InfName not in DscModuleSet:
InfClass = PathClass(InfName)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
# If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
# for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
# For binary module, if in current arch, we need to list the PCDs into database.
# If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
# for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
# For binary module, if in current arch, we need to list the PCDs into database.
if not M.IsSupportedArch:
continue
# Override the module PCD setting by platform setting
@@ -1497,20 +1497,20 @@ class PlatformAutoGen(AutoGen):
self._NonDynaPcdList_.append(PcdFromModule)
if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
# Overwrite the phase of any the same PCD existing, if Phase is PEI.
# It is to solve the case that a dynamic PCD used by a PEM module/PEI
# It is to solve the case that a dynamic PCD used by a PEM module/PEI
# module & DXE module at a same time.
# Overwrite the type of the PCDs in source INF by the type of AsBuild
# INF file as DynamicEx.
# INF file as DynamicEx.
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
self._DynaPcdList_[Index].Type = PcdFromModule.Type
for PcdFromModule in self._NonDynaPcdList_:
# If a PCD is not listed in the DSC file, but binary INF files used by
# this platform all (that use this PCD) list the PCD in a [PatchPcds]
# section, AND all source INF files used by this platform the build
# that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
# If a PCD is not listed in the DSC file, but binary INF files used by
# this platform all (that use this PCD) list the PCD in a [PatchPcds]
# section, AND all source INF files used by this platform the build
# that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
# section, then the tools must NOT add the PCD to the Platform's PCD
# Database; the build must assign the access method for this PCD as
# Database; the build must assign the access method for this PCD as
# PcdsPatchableInModule.
if PcdFromModule not in self._DynaPcdList_:
continue
@@ -1533,7 +1533,7 @@ class PlatformAutoGen(AutoGen):
self._DynamicPcdList = self._DynaPcdList_
#
# Sort dynamic PCD list to:
# 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
# 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
# try to be put header of dynamicd List
# 2) If PCD is HII type, the PCD item should be put after unicode type PCD
#
@@ -1554,7 +1554,7 @@ class PlatformAutoGen(AutoGen):
if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
self._PlatformPcds[item].DatumType = TAB_VOID
if (self.Workspace.ArchList[-1] == self.Arch):
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.values()[0]
@@ -1637,7 +1637,7 @@ class PlatformAutoGen(AutoGen):
#
# Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage.
#
#
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
@@ -1659,8 +1659,8 @@ class PlatformAutoGen(AutoGen):
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
for (SkuName, Sku) in SkuObjList:
Sku.VpdOffset = Sku.VpdOffset.strip()
Sku.VpdOffset = Sku.VpdOffset.strip()
# Need to iterate DEC pcd information to get the value & datumtype
for eachDec in self.PackageList:
for DecPcd in eachDec.Pcds:
@@ -1671,8 +1671,8 @@ class PlatformAutoGen(AutoGen):
EdkLogger.warn("build", "Unreferenced vpd pcd used!",
File=self.MetaFile, \
ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
%(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
%(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
@@ -1680,7 +1680,7 @@ class PlatformAutoGen(AutoGen):
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
Sku.VpdOffset = Sku.VpdOffset.strip()
@@ -1711,7 +1711,7 @@ class PlatformAutoGen(AutoGen):
VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True
NeedProcessVpdMapFile = True
if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
UnicodePcdArray.add(DscPcdEntry)
elif len(Sku.VariableName) > 0:
@@ -1723,7 +1723,7 @@ class PlatformAutoGen(AutoGen):
VpdSkuMap[DscPcd] = SkuValueMap
if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0:
@@ -2071,9 +2071,9 @@ class PlatformAutoGen(AutoGen):
self._PcdTokenNumber = OrderedDict()
TokenNumber = 1
#
# Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
# Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
# Such as:
#
#
# Dynamic PCD:
# TokenNumber 0 ~ 10
# DynamicEx PCD:
@@ -2388,7 +2388,7 @@ class PlatformAutoGen(AutoGen):
# @param Options Options to be expanded
#
# @retval options Options expanded
#
#
def _ExpandBuildOption(self, Options, ModuleStyle=None):
BuildOptions = {}
FamilyMatch = False
@@ -2414,9 +2414,9 @@ class PlatformAutoGen(AutoGen):
if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
#
# Use the highest priority value.
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = OverrideList.keys()
@@ -2427,7 +2427,7 @@ class PlatformAutoGen(AutoGen):
NextKey = KeyList[Index1 + Index + 1]
#
# Compare two Key, if one is included by another, choose the higher priority one
#
#
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
if (Target1 == Target2 or Target1 == "*" or Target2 == "*") and\
(ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*") and\
@@ -2441,7 +2441,7 @@ class PlatformAutoGen(AutoGen):
else:
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
@@ -2639,7 +2639,7 @@ class ModuleAutoGen(AutoGen):
% (MetaFile, Arch))
return None
return obj
## Initialize ModuleAutoGen
#
# @param Workspace EdkIIWorkspaceBuild object
@@ -2737,13 +2737,13 @@ class ModuleAutoGen(AutoGen):
self.AutoGenDepSet = set()
## The Modules referenced to this Library
# Only Library has this attribute
self._ReferenceModules = []
self._ReferenceModules = []
## Store the FixedAtBuild Pcds
#
#
self._FixedAtBuildPcds = []
self.ConstPcd = {}
@@ -2759,8 +2759,8 @@ class ModuleAutoGen(AutoGen):
continue
if Pcd not in self._FixedAtBuildPcds:
self._FixedAtBuildPcds.append(Pcd)
return self._FixedAtBuildPcds
return self._FixedAtBuildPcds
def _GetUniqueBaseName(self):
BaseName = self.Name
@@ -2959,7 +2959,7 @@ class ModuleAutoGen(AutoGen):
continue
PackageList.append(Package)
return PackageList
## Get the depex string
#
# @return : a string contain all depex expresion.
@@ -2988,7 +2988,7 @@ class ModuleAutoGen(AutoGen):
(Arch.upper() == self.Arch.upper() and \
ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
DepexList.append({(Arch, ModuleType): DepexExpr})
#the type of build module is USER_DEFINED.
if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
for Depex in DepexList:
@@ -2999,7 +2999,7 @@ class ModuleAutoGen(AutoGen):
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return DepexStr
#the type of build module not is USER_DEFINED.
Count = 0
for Depex in DepexList:
@@ -3019,7 +3019,7 @@ class ModuleAutoGen(AutoGen):
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return '[Depex.%s]\n# ' % self.Arch + DepexStr
## Merge dependency expression
#
# @retval list The token list of the dependency expression after parsed
@@ -3155,14 +3155,14 @@ class ModuleAutoGen(AutoGen):
#
self._BuildOptionIncPathList = []
return self._BuildOptionIncPathList
BuildOptionIncPathList = []
for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
try:
FlagOption = self.BuildOption[Tool]['FLAGS']
except KeyError:
FlagOption = ''
if self.PlatformInfo.ToolChainFamily != 'RVCT':
IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
else:
@@ -3175,7 +3175,7 @@ class ModuleAutoGen(AutoGen):
IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
#
# EDK II modules must not reference header files outside of the packages they depend on or
# EDK II modules must not reference header files outside of the packages they depend on or
# within the module's directory tree. Report error if violation.
#
if self.AutoGenVersion >= 0x00010005:
@@ -3187,13 +3187,13 @@ class ModuleAutoGen(AutoGen):
ExtraData=ErrMsg,
File=str(self.MetaFile))
BuildOptionIncPathList += IncPathList
self._BuildOptionIncPathList = BuildOptionIncPathList
return self._BuildOptionIncPathList
## Return a list of files which can be built from source
#
# What kind of files can be built is determined by build rules in
@@ -3247,7 +3247,7 @@ class ModuleAutoGen(AutoGen):
Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
for Ext in Order_Dict[F][1:]:
RemoveList.append(F + Ext)
for item in RemoveList:
FileList.remove(item)
@@ -3690,12 +3690,12 @@ class ModuleAutoGen(AutoGen):
for SourceFile in self.Module.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
elif SourceFile.Type.upper() == ".UNI" :
#
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
@@ -3727,7 +3727,7 @@ class ModuleAutoGen(AutoGen):
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@@ -3738,13 +3738,13 @@ class ModuleAutoGen(AutoGen):
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
fStringIO.write(''.join(VfrGuid))
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
try :
try :
fInputfile.write (fStringIO.getvalue())
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
@@ -3764,15 +3764,15 @@ class ModuleAutoGen(AutoGen):
if self.IsAsBuiltInfCreated:
return
# Skip the following code for EDK I inf
if self.AutoGenVersion < 0x00010005:
return
# Skip the following code for libraries
if self.IsLibrary:
return
# Skip the following code for modules with no source files
if not self.SourceFileList:
return
@@ -3780,7 +3780,7 @@ class ModuleAutoGen(AutoGen):
# Skip the following code for modules without any binary files
if self.BinaryFileList:
return
### TODO: How to handles mixed source and binary modules
# Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
@@ -4054,7 +4054,7 @@ class ModuleAutoGen(AutoGen):
UsageIndex = Index
break
if UsageIndex != -1:
PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
else:
PcdCommentList.append('## UNDEFINED ' + HiiInfo)
PcdComments = '\n '.join(PcdCommentList)
@@ -4069,7 +4069,7 @@ class ModuleAutoGen(AutoGen):
# Generated LibraryClasses section in comments.
for Library in self.LibraryAutoGenList:
AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
# Generated UserExtensions TianoCore section.
# All tianocore user extensions are copied.
UserExtStr = ''
@@ -4083,12 +4083,12 @@ class ModuleAutoGen(AutoGen):
# Generated depex expression section in comments.
DepexExpresion = self._GetDepexExpresionString()
AsBuiltInfDict['depexsection_item'] = DepexExpresion if DepexExpresion else ''
AsBuiltInf = TemplateString()
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
self.IsAsBuiltInfCreated = True
if GlobalData.gBinCacheDest:
self.CopyModuleToCache()
@@ -4408,7 +4408,7 @@ class ModuleAutoGen(AutoGen):
BuildOption = property(_GetModuleBuildOption)
BuildOptionIncPathList = property(_GetBuildOptionIncPathList)
BuildCommand = property(_GetBuildCommand)
FixedAtBuildPcds = property(_GetFixedAtBuildPcds)
UniqueBaseName = property(_GetUniqueBaseName)

View File

@@ -360,7 +360,7 @@ class BuildRule:
# Clean up the line and replace path separator with native one
Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
self.RuleContent[Index] = Line
# find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) != -1:
if Line.find("=") != -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():

View File

@@ -784,7 +784,7 @@ gModuleTypeHeaderFile = {
SUP_MODULE_USER_DEFINED : [gBasicHeaderFile]
}
## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
# the TokenName and Guid comparison to avoid define name collisions.
#
# @param Info The ModuleAutoGen object
@@ -804,7 +804,7 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
return
AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
# AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
# Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
# Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
# Use the Guid pointer to see if it matches any of the token space GUIDs.
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
@@ -822,15 +822,15 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
Index = Index + 1
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
TokenCNameList.add(TokenCName)
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
if TokenCName in TokenCNameList:
@@ -848,14 +848,14 @@ def DynExPcdTokenNumberMapping(Info, AutoGenH):
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
# Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
# Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
# Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
# can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
# COMPAREGUID() will only be used if the Guid passed in is local to the module.
@@ -890,22 +890,22 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Pcd.PcdValueFromComm:
Pcd.DefaultValue = Pcd.PcdValueFromComm
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
TokenNumber = int(Pcd.TokenValue, 0)
# Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
# Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
# different Guids but same TokenCName
PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
@@ -929,7 +929,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
@@ -1044,7 +1044,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
"Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if not Value.endswith('U'):
Value += 'U'
Value += 'U'
elif Pcd.DatumType == TAB_UINT8:
if ValueNumber < 0:
EdkLogger.error("build", AUTOGEN_ERROR,
@@ -1102,7 +1102,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
else:
PcdValueName = '_PCD_VALUE_' + TokenCName
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
#
# For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
@@ -1115,7 +1115,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
PcdDataSize = Pcd.GetPcdSize()
if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
@@ -1132,10 +1132,10 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
@@ -1143,7 +1143,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
@@ -1190,13 +1190,13 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
TokenNumber = int(Pcd.TokenValue, 0)
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
@@ -1230,7 +1230,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
if PcdItemType in PCD_DYNAMIC_EX_TYPE_SET:
PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
@@ -1312,7 +1312,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
@@ -1323,7 +1323,7 @@ def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
ConstFixedPcd = False
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info._ReferenceModules)):
ConstFixedPcd = True
@@ -1656,7 +1656,7 @@ def CreatePcdCode(Info, AutoGenC, AutoGenH):
for Pcd in Info.ModulePcdList:
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
SkuMgr = Info.Workspace.Platform.SkuIdMgr
AutoGenH.Append("\n// Definition of SkuId Array\n")
AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
@@ -1666,7 +1666,7 @@ def CreatePcdCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
GuidType = "EFI_GUID"
GuidType = "EFI_GUID"
for Item in TokenSpaceList:
AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
@@ -2016,7 +2016,7 @@ def CreateHeaderCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in gModuleTypeHeaderFile:
AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
#
# if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
# if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
# As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
#
if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:

View File

@@ -182,10 +182,10 @@ typedef struct {
//UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
//TABLE_OFFSET LocalTokenNumberTableOffset;
//TABLE_OFFSET ExMapTableOffset;
//TABLE_OFFSET GuidTableOffset;
//TABLE_OFFSET GuidTableOffset;
//TABLE_OFFSET StringTableOffset;
//TABLE_OFFSET SizeTableOffset;
//TABLE_OFFSET SkuIdTableOffset;
//TABLE_OFFSET SkuIdTableOffset;
//TABLE_OFFSET PcdNameTableOffset;
//UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
//UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
@@ -237,11 +237,11 @@ ${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
## DbItemList
#
# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
# RawDataList is the RawData that may need some kind of calculation or transformation,
# RawDataList is the RawData that may need some kind of calculation or transformation,
# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
# will be written to the database.
# will be written to the database.
#
class DbItemList:
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -309,7 +309,7 @@ class DbItemList:
## DbExMapTblItemList
#
# The class holds the ExMap table
# The class holds the ExMap table
#
class DbExMapTblItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -319,15 +319,15 @@ class DbExMapTblItemList (DbItemList):
Buffer = ''
PackStr = "=LHH"
for Datas in self.RawDataList:
Buffer += pack(PackStr,
Buffer += pack(PackStr,
GetIntegerValue(Datas[0]),
GetIntegerValue(Datas[1]),
GetIntegerValue(Datas[2]))
GetIntegerValue(Datas[2]))
return Buffer
## DbComItemList
#
# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
# ItemSize multiply the ItemCount.
#
class DbComItemList (DbItemList):
@@ -345,7 +345,7 @@ class DbComItemList (DbItemList):
else:
assert(Index < len(self.RawDataList))
for ItemIndex in xrange(Index):
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
return Offset
@@ -373,12 +373,12 @@ class DbComItemList (DbItemList):
Buffer += pack(PackStr, GetIntegerValue(SingleData))
else:
Buffer += pack(PackStr, GetIntegerValue(Data))
return Buffer
## DbVariableTableItemList
#
# The class holds the Variable header value table
# The class holds the Variable header value table
#
class DbVariableTableItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -389,7 +389,7 @@ class DbVariableTableItemList (DbComItemList):
Buffer = ''
for DataList in self.RawDataList:
for Data in DataList:
Buffer += pack(PackStr,
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]),
GetIntegerValue(Data[2]),
@@ -402,7 +402,7 @@ class DbVariableTableItemList (DbComItemList):
class DbStringHeadTableItemList(DbItemList):
def __init__(self,ItemSize,DataList=None,RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetInterOffset(self, Index):
Offset = 0
if self.ItemSize == 0:
@@ -435,11 +435,11 @@ class DbStringHeadTableItemList(DbItemList):
self.ListSize += len(Datas) * self.ItemSize
else:
self.ListSize += self.ItemSize
return self.ListSize
return self.ListSize
## DbSkuHeadTableItemList
#
# The class holds the Sku header value table
# The class holds the Sku header value table
#
class DbSkuHeadTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -449,14 +449,14 @@ class DbSkuHeadTableItemList (DbItemList):
PackStr = "=LL"
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]))
return Buffer
## DbSizeTableItemList
#
# The class holds the size table
# The class holds the size table
#
class DbSizeTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
@@ -471,16 +471,16 @@ class DbSizeTableItemList (DbItemList):
PackStr = "=H"
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
for subData in Data[1]:
Buffer += pack(PackStr,
Buffer += pack(PackStr,
GetIntegerValue(subData))
return Buffer
## DbStringItemList
#
# The class holds the string table
# The class holds the string table
#
class DbStringItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
@@ -490,7 +490,7 @@ class DbStringItemList (DbComItemList):
RawDataList = []
if LenList is None:
LenList = []
assert(len(RawDataList) == len(LenList))
DataList = []
# adjust DataList according to the LenList
@@ -549,7 +549,7 @@ def GetMatchedIndex(Key1, List1, Key2, List2):
return Index
else:
StartPos = Index + 1
return -1
@@ -557,7 +557,7 @@ def GetMatchedIndex(Key1, List1, Key2, List2):
# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
#
# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
#
#
# @retval A list object of integer items
#
def StringArrayToList(StringArray):
@@ -569,7 +569,7 @@ def StringArrayToList(StringArray):
## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
#
# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
#
#
# @retval A integer representation of the TokenType
#
def GetTokenTypeValue(TokenType):
@@ -596,7 +596,7 @@ def GetTokenTypeValue(TokenType):
## construct the external Pcd database using data from Dict
#
# @param Dict A dictionary contains Pcd related tables
#
#
# @retval Buffer A byte stream of the Pcd database
#
def BuildExDataBase(Dict):
@@ -625,26 +625,26 @@ def BuildExDataBase(Dict):
NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
StringTableValue = Dict['STRING_TABLE_DB_VALUE']
# when calcute the offset, should use StringTableLen instead of StringTableValue, as string maxium len may be different with actual len
StringTableLen = Dict['STRING_TABLE_LENGTH']
DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
PcdTokenTable = Dict['PCD_TOKENSPACE']
PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
PcdCNameTable = Dict['PCD_CNAME']
PcdCNameLen = Dict['PCD_CNAME_LENGTH']
PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
@@ -663,7 +663,7 @@ def BuildExDataBase(Dict):
DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
# Unit Db Items
UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
@@ -676,12 +676,12 @@ def BuildExDataBase(Dict):
UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
"LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
"SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
"VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
@@ -690,21 +690,21 @@ def BuildExDataBase(Dict):
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
# VardefValueBoolean is the last table in the init table items
InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
# The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
FixedHeaderLen = 80
# Get offset of SkuId table in the database
# Get offset of SkuId table in the database
SkuIdTableOffset = FixedHeaderLen
for DbIndex in xrange(len(DbTotal)):
if DbTotal[DbIndex] is SkuidValue:
break
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
# Get offset of SkuValue table in the database
# Get offset of SkuValue table in the database
# Fix up the LocalTokenNumberTable, SkuHeader table
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
@@ -725,11 +725,11 @@ def BuildExDataBase(Dict):
TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
# if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
# resolve variable table offset
# resolve variable table offset
for VariableEntries in VariableTable:
skuindex = 0
for VariableEntryPerSku in VariableEntries:
@@ -747,7 +747,7 @@ def BuildExDataBase(Dict):
else:
assert(False)
if isinstance(VariableRefTable[0], list):
DbOffset += skuindex * 4
DbOffset += skuindex * 4
skuindex += 1
if DbIndex >= InitTableNum:
assert(False)
@@ -775,17 +775,17 @@ def BuildExDataBase(Dict):
DbTotalLength += DbItemTotal[DbIndex].GetListSize()
if not Dict['PCD_INFO_FLAG']:
DbPcdNameOffset = 0
DbPcdNameOffset = 0
LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
Pad = 0xDA
UninitDataBaseSize = 0
for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
UninitDataBaseSize += Item.GetListSize()
if (DbTotalLength - UninitDataBaseSize) % 8:
DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
# Construct the database buffer
@@ -812,7 +812,7 @@ def BuildExDataBase(Dict):
Buffer += b
b = pack('=L', ExMapTableOffset)
Buffer += b
b = pack('=L', GuidTableOffset)
@@ -836,7 +836,7 @@ def BuildExDataBase(Dict):
Buffer += b
b = pack('=H', GuidTableCount)
Buffer += b
b = pack('=B', Pad)
Buffer += b
@@ -845,18 +845,18 @@ def BuildExDataBase(Dict):
Buffer += b
Buffer += b
Buffer += b
Index = 0
for Item in DbItemTotal:
Index +=1
b = Item.PackData()
Buffer += b
Buffer += b
if Index == InitTableNum:
if len(Buffer) % 8:
for num in range(8 - len(Buffer) % 8):
b = pack('=B', Pad)
Buffer += b
break
break
return Buffer
## Create code for PCD database
@@ -1010,7 +1010,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
'SYSTEM_SKU_ID_VALUE' : '0U'
}
SkuObj = Platform.Platform.SkuIdMgr
Dict['SYSTEM_SKU_ID_VALUE'] = 0 if SkuObj.SkuUsageType == SkuObj.SINGLE else Platform.Platform.SkuIds[SkuObj.SystemSkuId][0]
@@ -1028,7 +1028,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
Dict[Init+'_VALUE_' + DatumType] = []
Dict[Init+'_DB_VALUE_'+DatumType] = []
for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
Dict[Type + '_CNAME_DECL'] = []
Dict[Type + '_GUID_DECL'] = []
@@ -1038,7 +1038,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['STRING_DB_VALUE'] = []
Dict['VPD_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
Dict['STRING_TABLE_INDEX'] = []
Dict['STRING_TABLE_LENGTH'] = []
Dict['STRING_TABLE_CNAME'] = []
@@ -1061,19 +1061,19 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
Dict['PCD_TOKENSPACE'] = []
Dict['PCD_CNAME'] = []
Dict['PCD_CNAME'] = []
Dict['PCD_TOKENSPACE_LENGTH'] = []
Dict['PCD_CNAME_LENGTH'] = []
Dict['PCD_TOKENSPACE_OFFSET'] = []
Dict['PCD_CNAME_OFFSET'] = []
Dict['PCD_TOKENSPACE_MAP'] = []
Dict['PCD_NAME_OFFSET'] = []
Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
PCD_STRING_INDEX_MAP = {}
StringTableIndex = 0
StringTableSize = 0
NumberOfLocalTokens = 0
@@ -1141,8 +1141,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if len(Pcd.SkuInfoList) > 1:
NumberOfSkuEnabledPcd += 1
SkuIdIndex = 1
SkuIdIndex = 1
VariableHeadList = []
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
@@ -1150,9 +1150,9 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if SkuId is None or SkuId == '':
continue
SkuIdIndex += 1
if len(Sku.VariableName) > 0:
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
@@ -1203,7 +1203,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
VariableHeadList.append(VariableHeadStringIndex)
VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
# store VariableGuid to GuidTable and get the VariableHeadGuidIndex
@@ -1214,11 +1214,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
VariableHeadGuidIndex, Sku.VariableOffset))
else:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
VariableHeadGuidIndex, Sku.VariableOffset))
Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
@@ -1231,7 +1231,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
# warning under linux building environment.
#
Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
if Pcd.DatumType == TAB_UINT64:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
@@ -1264,13 +1264,13 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Pcd.InitString = 'INIT'
VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
VpdDbOffsetList.append(Sku.VpdOffset)
# Also add the VOID* string of VPD PCD to SizeTable
# Also add the VOID* string of VPD PCD to SizeTable
if Pcd.DatumType == TAB_VOID:
NumberOfSizeItems += 1
# For VPD type of PCD, its current size is equal to its MAX size.
VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
continue
if Pcd.DatumType == TAB_VOID:
Pcd.TokenTypeList.append('PCD_TYPE_STRING')
Pcd.InitString = 'INIT'
@@ -1297,7 +1297,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue.split(","))
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
if Pcd.MaxDatumSize != '':
@@ -1336,10 +1336,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
ValueList.append(Sku.DefaultValue + "U")
elif Pcd.DatumType == "BOOLEAN":
if Sku.DefaultValue in ["1", "0"]:
ValueList.append(Sku.DefaultValue + "U")
ValueList.append(Sku.DefaultValue + "U")
else:
ValueList.append(Sku.DefaultValue)
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
@@ -1348,8 +1348,8 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
@@ -1382,7 +1382,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
else:
Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
if Phase == 'PEI':
NumberOfLocalTokens = NumberOfPeiLocalTokens
if Phase == 'DXE':
@@ -1394,7 +1394,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
SkuEnablePcdIndex = 0
for Pcd in ReorderedDynPcdList:
@@ -1419,7 +1419,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
#
# following four Dict items hold the information for LocalTokenNumberTable
#
@@ -1430,7 +1430,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
if Platform.Platform.PcdInfoFlag:
TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
@@ -1439,10 +1439,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
CNameBinArray = StringToArray('"' + CName + '"' )
Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
# search the Offset and Table, used by LocalTokenNumberTableOffset
@@ -1468,7 +1468,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if Pcd.InitString == 'UNINIT':
Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
else:
Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
#
@@ -1478,10 +1478,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
else:
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Phase == 'DXE':
GeneratedTokenNumber += NumberOfPeiLocalTokens
#
@@ -1493,7 +1493,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
# Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
# to the EXMAPPING_TABLE.
#
Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
@@ -1504,12 +1504,12 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
TokenSpaceIndex = StringTableSize
for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
for index in range(len(Dict['PCD_TOKENSPACE'])):
StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
StringTableIndex += 1
for index in range(len(Dict['PCD_CNAME'])):
Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
Dict['PCD_NAME_OFFSET'].append(StringTableSize)
StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
@@ -1552,15 +1552,15 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
if NumberOfSizeItems != 0:
Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
if NumberOfSkuEnabledPcd != 0:
if NumberOfSkuEnabledPcd != 0:
Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
if AvailableSkuNumber not in Dict['SKUID_VALUE']:
Dict['SKUID_VALUE'].append(AvailableSkuNumber)
Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
if NumberOfLocalTokens == 0:
AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
@@ -1573,11 +1573,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
SizeCurLenTempList = []
SizeMaxLenTempList = []
ReOrderFlag = True
if len(Dict['SIZE_TABLE_CNAME']) == 1:
if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
ReOrderFlag = False
if ReOrderFlag:
for Count in range(len(Dict['TOKEN_CNAME'])):
for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
@@ -1587,15 +1587,15 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
# print Phase
Buffer = BuildExDataBase(Dict)

View File

@@ -17,14 +17,14 @@
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.DataType import *
class InfSectionParser():
def __init__(self, FilePath):
self._FilePath = FilePath
self._FileSectionDataList = []
self._ParserInf()
def _ParserInf(self):
FileLinesList = []
UserExtFind = False
@@ -32,12 +32,12 @@ class InfSectionParser():
FileLastLine = False
SectionLine = ''
SectionData = []
try:
FileLinesList = open(self._FilePath, "r", 0).readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
for Index in range(0, len(FileLinesList)):
line = str(FileLinesList[Index]).strip()
if Index + 1 == len(FileLinesList):
@@ -52,7 +52,7 @@ class InfSectionParser():
SectionLine = line
UserExtFind = True
FindEnd = False
if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
NextLine[-1] == TAB_SECTION_END) or FileLastLine:
UserExtFind = False
@@ -60,7 +60,7 @@ class InfSectionParser():
self._FileSectionDataList.append({SectionLine: SectionData[:]})
del SectionData[:]
SectionLine = ''
# Get user extension TianoCore data
#
# @return: a list include some dictionary that key is section and value is a list contain all data.

View File

@@ -1,5 +1,5 @@
## @file
# This file is used to parse a strings file and create or add to a string database
# This file is used to parse a strings file and create or add to a string database
# file.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
@@ -144,7 +144,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Str = WriteLine(Str, Line)
UnusedStr = ''
#Group the referred/Unused STRING token together.
#Group the referred/Unused STRING token together.
for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
Name = StringItem.StringName
@@ -265,16 +265,16 @@ def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
PrimaryTag = Language[0:Language.find('-')].lower()
else:
PrimaryTag = Language
if len(PrimaryTag) == 3:
PrimaryTag = LangConvTable.get(PrimaryTag)
for UniLanguage in UniLanguageList:
if UniLanguage.find('-') != -1:
UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
else:
UniLanguagePrimaryTag = UniLanguage
if len(UniLanguagePrimaryTag) == 3:
UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
@@ -307,7 +307,7 @@ def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible mode
# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
# @param FilterInfo Platform language filter information
# @param FilterInfo Platform language filter information
#
# @retval Str: A string of .c file content
#
@@ -325,14 +325,14 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
else:
# EDK module is using ISO639-2 format filter, convert to the RFC4646 format
LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
UniLanguageList = []
for IndexI in range(len(UniObjectClass.LanguageDef)):
UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
#
# Create lines for each language's strings
#
@@ -340,7 +340,7 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
Language = UniObjectClass.LanguageDef[IndexI][0]
if Language not in UniLanguageListFiltered:
continue
StringBuffer = BytesIO()
StrStringValue = ''
ArrayLength = 0
@@ -403,7 +403,7 @@ def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer,
# Add an EFI_HII_SIBT_END at last
#
Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
#
# Create binary UNI string
#
@@ -458,7 +458,7 @@ def CreateCFileEnd():
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible Mode
# @param FilterInfo Platform language filter information
# @param FilterInfo Platform language filter information
#
# @retval CFile: A string of complete .c file
#
@@ -544,7 +544,7 @@ def SearchString(UniObjectClass, FileList, IsCompatibleMode):
# This function is used for UEFI2.1 spec
#
#
def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
if len(UniFilList) > 0:
if ShellMode:
#

View File

@@ -284,7 +284,7 @@ class UniFileClassObject(object):
if not IsLangInDef:
#
# The found STRING tokens will be added into new language string list
# so that the unique STRING identifier is reserved for all languages in the package list.
# so that the unique STRING identifier is reserved for all languages in the package list.
#
FirstLangName = self.LanguageDef[0][0]
if LangName != FirstLangName:
@@ -411,10 +411,10 @@ class UniFileClassObject(object):
#
# Ignore empty line
#
if len(Line) == 0:
continue
if len(Line) == 0:
continue
Line = Line.replace(u'/langdef', u'#langdef')
Line = Line.replace(u'/string', u'#string')
Line = Line.replace(u'/language', u'#language')
@@ -429,8 +429,8 @@ class UniFileClassObject(object):
Line = Line.replace(u'\\r', CR)
Line = Line.replace(u'\\t', u' ')
Line = Line.replace(u'\t', u' ')
Line = Line.replace(u'\\"', u'"')
Line = Line.replace(u"\\'", u"'")
Line = Line.replace(u'\\"', u'"')
Line = Line.replace(u"\\'", u"'")
Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
StartPos = Line.find(u'\\x')
@@ -570,7 +570,7 @@ class UniFileClassObject(object):
else:
EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
% (Language, Name, self.File))
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
self.OrderedStringDict[Language] = {}
@@ -592,7 +592,7 @@ class UniFileClassObject(object):
for LangName in self.LanguageDef:
#
# New STRING token will be added into all language string lists.
# so that the unique STRING identifier is reserved for all languages in the package list.
# so that the unique STRING identifier is reserved for all languages in the package list.
#
if LangName[0] != Language:
if UseOtherLangDef != '':

View File

@@ -24,7 +24,7 @@ from Common.DataType import *
class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
def __init__(self):
self.var_check_info = []
def push_back(self, var_check_tab):
for tab in self.var_check_info:
if tab.equal(var_check_tab):
@@ -32,9 +32,9 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
break
else:
self.var_check_info.append(var_check_tab)
def dump(self, dest, Phase):
if not os.path.isabs(dest):
return
if not os.path.exists(dest):
@@ -161,7 +161,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
b = pack("=B", var_check_tab.pad)
Buffer += b
realLength += 1
DbFile = BytesIO()
if Phase == 'DXE' and os.path.exists(BinFilePath):
BinFile = open(BinFilePath, "rb")
@@ -175,7 +175,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
Buffer = BinBuffer + Buffer
DbFile.write(Buffer)
SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
class VAR_CHECK_PCD_VARIABLE_TAB(object):
pad = 0xDA
@@ -193,26 +193,26 @@ class VAR_CHECK_PCD_VARIABLE_TAB(object):
def UpdateSize(self):
self.HeaderLength = 32 + len(self.Name.split(","))
self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
def GetValidTabLen(self):
validtablen = 0
for item in self.validtab:
validtablen += item.Length
return validtablen
validtablen += item.Length
return validtablen
def SetAttributes(self, attributes):
self.Attributes = attributes
def push_back(self, valid_obj):
if valid_obj is not None:
self.validtab.append(valid_obj)
def equal(self, varchecktab):
if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
return True
else:
return False
def merge(self, varchecktab):
for validobj in varchecktab.validtab:
if validobj in self.validtab:
@@ -235,10 +235,10 @@ class VAR_CHECK_PCD_VALID_OBJ(object):
except:
self.StorageWidth = 0
self.ValidData = False
def __eq__(self, validObj):
def __eq__(self, validObj):
return validObj and self.VarOffset == validObj.VarOffset
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validlist, PcdDataType):
super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
@@ -246,7 +246,7 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
valid_num_list = []
for item in self.rawdata:
valid_num_list.extend(item.split(','))
for valid_num in valid_num_list:
valid_num = valid_num.strip()
@@ -255,10 +255,10 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
else:
self.data.add(int(valid_num))
self.Length = 5 + len(self.data) * self.StorageWidth
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validrange, PcdDataType):
super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
@@ -275,7 +275,7 @@ class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
for obj in rangelist.pop():
self.data.add((obj.start, obj.end))
self.Length = 5 + len(self.data) * 2 * self.StorageWidth
def GetValidationObject(PcdClass, VarOffset):
if PcdClass.validateranges:

View File

@@ -1,12 +1,12 @@
## @file
# Intel Binary Product Data Generation Tool (Intel BPDG).
# This tool provide a simple process for the creation of a binary file containing read-only
# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
# in VPD sections. It also provide an option for specifying an alternate name for a mapping
# file of PCD layout for use during the build when the platform integrator selects to use
# This tool provide a simple process for the creation of a binary file containing read-only
# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
# in VPD sections. It also provide an option for specifying an alternate name for a mapping
# file of PCD layout for use during the build when the platform integrator selects to use
# automatic offset calculation.
#
# Copyright (c) 2010 - 2016, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -47,26 +47,26 @@ VERSION = (st.LBL_BPDG_VERSION + " Build " + gBUILD_VERSION)
#
def main():
global Options, Args
# Initialize log system
EdkLogger.Initialize()
EdkLogger.Initialize()
Options, Args = MyOptionParser()
ReturnCode = 0
if Options.opt_verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Options.debug_level is not None:
EdkLogger.SetLevel(Options.debug_level + 1)
EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
if Options.bin_filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
if Options.filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
if Options.opt_force is not None:
@@ -76,8 +76,8 @@ def main():
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
None)
None)
return ReturnCode
@@ -87,8 +87,8 @@ def main():
#
# @retval options A optparse.Values object containing the parsed options
# @retval args Target of BPDG command
#
def MyOptionParser():
#
def MyOptionParser():
#
# Process command line firstly.
#
@@ -106,10 +106,10 @@ def MyOptionParser():
parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename',
help=st.MSG_OPTION_VPD_FILENAME)
parser.add_option('-m', '--map-filename', action='store', dest='filename',
help=st.MSG_OPTION_MAP_FILENAME)
help=st.MSG_OPTION_MAP_FILENAME)
parser.add_option('-f', '--force', action='store_true', dest='opt_force',
help=st.MSG_OPTION_FORCE)
help=st.MSG_OPTION_FORCE)
(options, args) = parser.parse_args()
if len(args) == 0:
EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!")
@@ -118,7 +118,7 @@ def MyOptionParser():
return options, args
## Start BPDG and call the main functions
## Start BPDG and call the main functions
#
# This method mainly focus on call GenVPD class member functions to complete
# BPDG's target. It will process VpdFile override, and provide the interface file
@@ -137,19 +137,19 @@ def StartBpdg(InputFileName, MapFileName, VpdFileName, Force):
choice = sys.stdin.readline()
if choice.strip().lower() not in ['y', 'yes', '']:
return
GenVPD = GenVpd.GenVPD (InputFileName, MapFileName, VpdFileName)
EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
EdkLogger.info('%-24s = %s' % ("VPD output map file: ", MapFileName))
EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
GenVPD.ParserInputFile()
GenVPD.FormatFileLine()
GenVPD.FixVpdOffset()
GenVPD.GenerateVpdFile(MapFileName, VpdFileName)
EdkLogger.info("- Vpd pcd fixed done! -")
EdkLogger.info("- Vpd pcd fixed done! -")
if __name__ == '__main__':
r = main()
@@ -157,4 +157,4 @@ if __name__ == '__main__':
if r < 0 or r > 127: r = 1
sys.exit(r)

View File

@@ -31,10 +31,10 @@ _FORMAT_CHAR = {1: 'B',
## The VPD PCD data structure for store and process each VPD PCD entry.
#
# This class contain method to format and pack pcd's value.
# This class contain method to format and pack pcd's value.
#
class PcdEntry:
def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None, Alignment=None):
self.PcdCName = PcdCName.strip()
self.SkuId = SkuId.strip()
@@ -47,7 +47,7 @@ class PcdEntry:
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
self.Alignment = Alignment
if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
@@ -63,13 +63,13 @@ class PcdEntry:
self._GenOffsetValue ()
## Analyze the string value to judge the PCD's datum type equal to Boolean or not.
#
#
# @param ValueString PCD's value
# @param Size PCD's size
#
#
# @retval True PCD's datum type is Boolean
# @retval False PCD's datum type is not Boolean.
#
# @retval False PCD's datum type is not Boolean.
#
def _IsBoolean(self, ValueString, Size):
if (Size == "1"):
if ValueString.upper() in ["TRUE", "FALSE"]:
@@ -80,10 +80,10 @@ class PcdEntry:
return False
## Convert the PCD's value from string to integer.
#
#
# This function will try to convert the Offset value form string to integer
# for both hexadecimal and decimal.
#
#
def _GenOffsetValue(self):
if self.PcdOffset != "*" :
try:
@@ -96,10 +96,10 @@ class PcdEntry:
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
## Pack Boolean type VPD PCD's value form string to binary type.
#
#
# @param ValueString The boolean type string for pack.
#
#
#
#
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try:
@@ -115,10 +115,10 @@ class PcdEntry:
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack Integer type VPD PCD's value form string to binary type.
#
#
# @param ValueString The Integer type string for pack.
#
#
#
#
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
@@ -170,7 +170,7 @@ class PcdEntry:
# 3: {bytearray}, only support byte-array.
#
# @param ValueString The Integer type string for pack.
#
#
def _PackPtrValue(self, ValueString, Size):
if ValueString.startswith('L"') or ValueString.startswith("L'"):
self._PackUnicode(ValueString, Size)
@@ -183,9 +183,9 @@ class PcdEntry:
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
## Pack an Ascii PCD value.
#
#
# An Ascii string for a PCD should be in format as ""/''.
#
#
def _PackString(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
@@ -198,7 +198,7 @@ class PcdEntry:
QuotedFlag = False
ValueString = ValueString[1:-1]
# No null-terminator in 'string'
# No null-terminator in 'string'
if (QuotedFlag and len(ValueString) + 1 > Size) or (not QuotedFlag and len(ValueString) > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
@@ -209,9 +209,9 @@ class PcdEntry:
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack a byte-array PCD value.
#
#
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
#
#
def _PackByteArray(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
@@ -261,7 +261,7 @@ class PcdEntry:
self.PcdValue = ReturnArray.tolist()
## Pack a unicode PCD value into byte array.
#
#
# A unicode string for a PCD should be in format as L""/L''.
#
def _PackUnicode(self, UnicodeString, Size):
@@ -271,7 +271,7 @@ class PcdEntry:
QuotedFlag = True
if UnicodeString.startswith("L'"):
QuotedFlag = False
QuotedFlag = False
UnicodeString = UnicodeString[2:-1]
# No null-terminator in L'string'
@@ -304,7 +304,7 @@ class PcdEntry:
# 2. Format the input file data to remove unused lines;
# 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file;
#
#
class GenVPD :
## Constructor of DscBuildData
#
@@ -334,9 +334,9 @@ class GenVPD :
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's
# Parser the input file which is generated by the build tool. Convert the value of each pcd's
# from string to it's real format. Also remove the useless line in the input file.
#
#
def ParserInputFile (self):
count = 0
for line in self.FileLinesList:
@@ -390,7 +390,7 @@ class GenVPD :
#
# After remove the useless line, if there are no data remain in the file line list,
# Report warning messages to user's.
#
#
if len(self.FileLinesList) == 0 :
EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.")
@@ -480,14 +480,14 @@ class GenVPD :
continue
##
# This function used to create a clean list only contain useful information and reorganized to make it
# This function used to create a clean list only contain useful information and reorganized to make it
# easy to be sorted
#
def FormatFileLine (self) :
for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != '*' :
# Use pcd's Offset value as key, and pcd's Value as value
# Use pcd's Offset value as key, and pcd's Value as value
self.PcdFixedOffsetSizeList.append(eachPcd)
else :
# Use pcd's CName as key, and pcd's Size as value
@@ -497,11 +497,11 @@ class GenVPD :
##
# This function is use to fix the offset value which the not specified in the map file.
# Usually it use the star (meaning any offset) character in the offset field
#
#
def FixVpdOffset (self):
# At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset
# value is "*" to insert into.
# value is "*" to insert into.
self.PcdFixedOffsetSizeList.sort(lambda x, y: cmp(x.PcdBinOffset, y.PcdBinOffset))
@@ -530,57 +530,57 @@ class GenVPD :
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdOccupySize
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return
# Check the offset of VPD type pcd's offset start from 0.
# Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None)
# Judge whether the offset in fixed pcd offset list is overlapped or not.
lenOfList = len(self.PcdFixedOffsetSizeList)
count = 0
count = 0
while (count < lenOfList - 1) :
PcdNow = self.PcdFixedOffsetSizeList[count]
PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Overlapped
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Has free space, raise a warning message
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
LastOffset = self.PcdFixedOffsetSizeList[0].PcdBinOffset
FixOffsetSizeListCount = 0
lenOfList = len(self.PcdFixedOffsetSizeList)
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
##
# Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
#
# Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
#
while (FixOffsetSizeListCount < lenOfList) :
eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
NowOffset = eachFixedPcd.PcdBinOffset
# Has free space
# Has free space
if LastOffset < NowOffset :
if lenOfUnfixedList != 0 :
countOfUnfixedList = 0
@@ -598,42 +598,42 @@ class GenVPD :
eachUnfixedPcd.PcdBinOffset = LastOffset
# Insert this pcd into fixed offset pcd list.
self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(countOfUnfixedList)
# After item added, should enlarge the length of fixed pcd offset list
lenOfList += 1
lenOfList += 1
FixOffsetSizeListCount += 1
# Decrease the un-fixed pcd offset list's length
lenOfUnfixedList -= 1
# Modify the last offset value
LastOffset += needFixPcdSize
# Modify the last offset value
LastOffset += needFixPcdSize
else :
# It can not insert into those two pcds, need to check still has other space can store it.
LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdOccupySize
FixOffsetSizeListCount += 1
break
# Set the FixOffsetSizeListCount = lenOfList for quit the loop
else :
FixOffsetSizeListCount = lenOfList
# No free space, smoothly connect with previous pcd.
FixOffsetSizeListCount = lenOfList
# No free space, smoothly connect with previous pcd.
elif LastOffset == NowOffset :
LastOffset = NowOffset + eachFixedPcd.PcdOccupySize
FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it overlapped.
# Usually it will not enter into this thunk, if so, means it overlapped.
else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
# Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
# Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
lenOfList = len(self.PcdFixedOffsetSizeList)
while (lenOfUnfixedList > 0) :
@@ -641,23 +641,23 @@ class GenVPD :
# The last pcd instance
LastPcd = self.PcdFixedOffsetSizeList[lenOfList-1]
NeedFixPcd = self.PcdUnknownOffsetList[0]
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset / NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
# Insert this pcd into fixed offset pcd list's tail.
self.PcdFixedOffsetSizeList.insert(lenOfList, NeedFixPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(0)
lenOfList += 1
lenOfUnfixedList -= 1
lenOfUnfixedList -= 1
##
# Write the final data into output files.
#
#
def GenerateVpdFile (self, MapFileName, BinFileName):
#Open an VPD file to process
@@ -705,4 +705,4 @@ class GenVPD :
fStringIO.close ()
fVpdFile.close ()
fMapFile.close ()

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to define strings used in the BPDG tool
#
# Copyright (c) 2010 - 2016, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -31,8 +31,8 @@ MAP_FILE_COMMENT_TEMPLATE = \
# THIS IS AUTO-GENERATED FILE BY BPDG TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform fixed/adjusted by BPDG tool.
#
# Copyright (c) 2010 -2016, Intel Corporation. All rights reserved.<BR>
#
# Copyright (c) 2010 -2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -50,18 +50,18 @@ LBL_BPDG_VERSION = (u"1.0")
LBL_BPDG_USAGE = \
(
"""BPDG options -o Filename.bin -m Filename.map Filename.txt
Copyright (c) 2010 - 2016, Intel Corporation All Rights Reserved.
Copyright (c) 2010 - 2018, Intel Corporation All Rights Reserved.
Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)
Required Flags:
-o BIN_FILENAME, --vpd-filename=BIN_FILENAME
Specify the file name for the VPD binary file
-m FILENAME, --map-filename=FILENAME
Generate file name for consumption during the build that contains
the mapping of Pcd name, offset, datum size and value derived
Generate file name for consumption during the build that contains
the mapping of Pcd name, offset, datum size and value derived
from the input file and any automatic calculations.
"""
"""
)
MSG_OPTION_HELP = ("Show this help message and exit.")

View File

@@ -2,15 +2,15 @@
#
# This file is for build version number auto generation
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
gBUILD_VERSION = ""
gBUILD_VERSION = "Developer Build based on Revision: Unknown"

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create a database used by ECC tool
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -33,7 +33,7 @@ from Table.TableDsc import TableDsc
# This class defined the build databse
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the ECC database
#
@@ -54,7 +54,7 @@ class Database(object):
self.TblInf = TableInf(self.Cur)
self.TblDec = TableDec(self.Cur)
self.TblDsc = TableDsc(self.Cur)
## Initialize build database
#
# 1. Delete all old existing tables
@@ -69,7 +69,7 @@ class Database(object):
# self.TblDataModel.Drop()
# self.TblDsc.Drop()
# self.TblFile.Drop()
#
# Create new tables
#
@@ -78,7 +78,7 @@ class Database(object):
self.TblInf.Create()
self.TblDec.Create()
self.TblDsc.Create()
#
# Initialize table DataModel
#
@@ -91,10 +91,10 @@ class Database(object):
#
def QueryTable(self, Table):
Table.Query()
## Close entire database
#
# Commit all first
# Commit all first
# Close the connection and cursor
#
def Close(self):
@@ -110,11 +110,11 @@ class Database(object):
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
Db = Database(DATABASE_PATH)
Db.InitDatabase()
Db.QueryTable(Db.TblDataModel)
Db.QueryTable(Db.TblDataModel)
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblDsc)
Db.Close()

View File

@@ -63,11 +63,11 @@ gDependencyDatabase = {} # arch : {file path : [dependent files list]}
_TempInfs = []
def GetVariableOffset(mapfilepath, efifilepath, varnames):
""" Parse map file to get variable offset in current EFI file
""" Parse map file to get variable offset in current EFI file
@param mapfilepath Map file absolution path
@param efifilepath: EFI binary file full path
@param varnames iteratable container whose elements are variable names to be searched
@return List whos elements are tuple with variable name and raw offset
"""
lines = []
@@ -77,7 +77,7 @@ def GetVariableOffset(mapfilepath, efifilepath, varnames):
f.close()
except:
return None
if len(lines) == 0: return None
firstline = lines[0].strip()
if (firstline.startswith("Archive member included ") and
@@ -177,7 +177,7 @@ def _parseGeneral(lines, efifilepath, varnames):
continue
if line.startswith("entry point at"):
status = 3
continue
continue
if status == 1 and len(line) != 0:
m = secReGeneral.match(line)
assert m is not None, "Fail to parse the section in map file , line is %s" % line
@@ -257,7 +257,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
#
# A temporary INF is copied to database path which must have write permission
# The temporary will be removed at the end of build
# In case of name conflict, the file name is
# In case of name conflict, the file name is
# FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
#
TempFullPath = os.path.join(DbDir,
@@ -268,7 +268,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
#
# To build same module more than once, the module path with FILE_GUID overridden has
# the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
# in DSC which is used as relative path by C files and other files in INF.
# in DSC which is used as relative path by C files and other files in INF.
# A trick was used: all module paths are PathClass instances, after the initialization
# of PathClass, the PathClass.Path is overridden by the temporary INF path.
#
@@ -287,7 +287,7 @@ def ProcessDuplicatedInf(Path, BaseName, Workspace):
# If file exists, compare contents
#
if os.path.exists(TempFullPath):
with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
if f1.read() == f2.read():
return RtPath
_TempInfs.append(TempFullPath)
@@ -1545,29 +1545,29 @@ def AnalyzeDscPcd(Setting, PcdType, DataType=''):
# Used to avoid split issue while the value string contain "|" character
#
# @param[in] Setting: A String contain value/datum type/token number information;
#
# @retval ValueList: A List contain value, datum type and toke number.
#
# @retval ValueList: A List contain value, datum type and toke number.
#
def AnalyzePcdData(Setting):
ValueList = ['', '', '']
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting)
ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
if ValueUpdateFlag:
ValueList[0] = PtrValue[0]
return ValueList
return ValueList
## check format of PCD value against its the datum type
#
# For PCD value setting
@@ -1770,7 +1770,7 @@ class PathClass(object):
OtherKey = Other.Path
else:
OtherKey = str(Other)
SelfKey = self.Path
if SelfKey == OtherKey:
return 0
@@ -1908,7 +1908,7 @@ class PeImageClass():
def _ByteListToStr(self, ByteList):
String = ''
for index in range(len(ByteList)):
if ByteList[index] == 0:
if ByteList[index] == 0:
break
String += chr(ByteList[index])
return String
@@ -1945,11 +1945,11 @@ class DefaultStore():
if sid == minid:
return name
class SkuClass():
DEFAULT = 0
SINGLE = 1
MULTIPLE =2
def __init__(self,SkuIdentifier='', SkuIds=None):
if SkuIds is None:
SkuIds = {}
@@ -1961,7 +1961,7 @@ class SkuClass():
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
% (SkuName, SkuId))
self.AvailableSkuIds = sdict()
self.SkuIdSet = []
self.SkuIdNumberSet = []
@@ -1975,10 +1975,10 @@ class SkuClass():
self.SkuIdSet = SkuIds.keys()
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
else:
r = SkuIdentifier.split('|')
r = SkuIdentifier.split('|')
self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))]
k = None
try:
try:
self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet]
except Exception:
EdkLogger.error("build", PARAMETER_INVALID,
@@ -2027,7 +2027,7 @@ class SkuClass():
skuorderset = []
for skuname in self.SkuIdSet:
skuorderset.append(self.GetSkuChain(skuname))
skuorder = []
for index in range(max(len(item) for item in skuorderset)):
for subset in skuorderset:
@@ -2039,8 +2039,8 @@ class SkuClass():
return skuorder
def __SkuUsageType(self):
def __SkuUsageType(self):
if self.__SkuIdentifier.upper() == "ALL":
return SkuClass.MULTIPLE
@@ -2073,7 +2073,7 @@ class SkuClass():
return ArrayStr
def __GetAvailableSkuIds(self):
return self.AvailableSkuIds
def __GetSystemSkuID(self):
if self.__SkuUsageType() == SkuClass.SINGLE:
if len(self.SkuIdSet) == 1:

View File

@@ -4,7 +4,7 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2015 - 2016, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -20,16 +20,16 @@ from Common.DataType import TAB_WORKSPACE
## MultipleWorkspace
#
# This class manage multiple workspace behavior
#
#
# @param class:
#
# @var WORKSPACE: defined the current WORKSPACE
# @var PACKAGES_PATH: defined the other WORKSAPCE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
#
#
class MultipleWorkspace(object):
WORKSPACE = ''
PACKAGES_PATH = None
## convertPackagePath()
#
# Convert path to match workspace.
@@ -59,7 +59,7 @@ class MultipleWorkspace(object):
cls.PACKAGES_PATH = [cls.convertPackagePath (Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)]
else:
cls.PACKAGES_PATH = []
## join()
#
# rewrite os.path.join function
@@ -79,7 +79,7 @@ class MultipleWorkspace(object):
return Path
Path = os.path.join(Ws, *p)
return Path
## relpath()
#
# rewrite os.path.relpath function
@@ -98,7 +98,7 @@ class MultipleWorkspace(object):
if Path.lower().startswith(Ws.lower()):
Path = os.path.relpath(Path, Ws)
return Path
## getWs()
#
# get valid workspace for the path
@@ -117,7 +117,7 @@ class MultipleWorkspace(object):
if os.path.exists(absPath):
return Pkg
return Ws
## handleWsMacro()
#
# handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it.
@@ -143,7 +143,7 @@ class MultipleWorkspace(object):
PathList[i] = str[0:MacroStartPos] + Path
PathStr = ' '.join(PathList)
return PathStr
## getPkgPath()
#
# get all package pathes.
@@ -153,4 +153,4 @@ class MultipleWorkspace(object):
@classmethod
def getPkgPath(cls):
return cls.PACKAGES_PATH

View File

@@ -43,7 +43,7 @@ ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOO
class RangeObject(object):
def __init__(self, start, end, empty = False):
if int(start) < int(end):
self.start = int(start)
self.end = int(end)
@@ -55,24 +55,24 @@ class RangeObject(object):
class RangeContainer(object):
def __init__(self):
self.rangelist = []
def push(self, RangeObject):
self.rangelist.append(RangeObject)
self.rangelist = sorted(self.rangelist, key = lambda rangeobj : rangeobj.start)
self.merge()
def pop(self):
for item in self.rangelist:
yield item
def __clean__(self):
def __clean__(self):
newrangelist = []
for rangeobj in self.rangelist:
if rangeobj.empty == True:
continue
else:
newrangelist.append(rangeobj)
self.rangelist = newrangelist
self.rangelist = newrangelist
def merge(self):
self.__clean__()
for i in range(0, len(self.rangelist) - 1):
@@ -80,23 +80,23 @@ class RangeContainer(object):
continue
else:
self.rangelist[i + 1].start = self.rangelist[i].start
self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
self.rangelist[i].empty = True
self.__clean__()
def dump(self):
print("----------------------")
rangelist = ""
for object in self.rangelist:
rangelist = rangelist + "[%d , %d]" % (object.start, object.end)
print(rangelist)
class XOROperatorObject(object):
def __init__(self):
class XOROperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "XOR ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -108,9 +108,9 @@ class XOROperatorObject(object):
return rangeId
class LEOperatorObject(object):
def __init__(self):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "LE ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -120,22 +120,22 @@ class LEOperatorObject(object):
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class LTOperatorObject(object):
def __init__(self):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "LT ..."
raise BadExpression(ERR_SNYTAX % Expr)
Expr = "LT ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, int(Operand) - 1))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
return rangeId1
class GEOperatorObject(object):
def __init__(self):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GE ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -143,12 +143,12 @@ class GEOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
return rangeId1
class GTOperatorObject(object):
def __init__(self):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GT ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -156,12 +156,12 @@ class GTOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
return rangeId1
class EQOperatorObject(object):
def __init__(self):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "EQ ..."
raise BadExpression(ERR_SNYTAX % Expr)
@@ -169,8 +169,8 @@ class EQOperatorObject(object):
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), int(Operand)))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
return rangeId1
def GetOperatorObject(Operator):
if Operator == '>':
return GTOperatorObject()
@@ -214,8 +214,8 @@ class RangeExpression(BaseExpression):
NumberDict[HexNumber] = Number
for HexNum in NumberDict:
expr = expr.replace(HexNum, NumberDict[HexNum])
rangedict = {}
rangedict = {}
for validrange in self.RangePattern.findall(expr):
start, end = validrange.split(" - ")
start = start.strip()
@@ -225,19 +225,19 @@ class RangeExpression(BaseExpression):
rangeContainer.push(RangeObject(start, end))
self.operanddict[str(rangeid)] = rangeContainer
rangedict[validrange] = str(rangeid)
for validrange in rangedict:
expr = expr.replace(validrange, rangedict[validrange])
self._Expr = expr
self._Expr = expr
return expr
def EvalRange(self, Operator, Oprand):
operatorobj = GetOperatorObject(Operator)
return operatorobj.Calculate(Oprand, self.PcdDataType, self.operanddict)
def Rangeintersection(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
@@ -266,35 +266,35 @@ class RangeExpression(BaseExpression):
elif end1 >= end2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(start2, end2))
self.operanddict[rangeid] = rangeContainer
# rangeContainer.dump()
return rangeid
def Rangecollections(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
rangeContainer = RangeContainer()
for rangeobj in rangeContainer2.pop():
rangeContainer.push(rangeobj)
for rangeobj in rangeContainer1.pop():
rangeContainer.push(rangeobj)
rangeid = str(uuid.uuid1())
self.operanddict[rangeid] = rangeContainer
# rangeContainer.dump()
return rangeid
def NegtiveRange(self, Oprand1):
rangeContainer1 = self.operanddict[Oprand1]
rangeids = []
for rangeobj in rangeContainer1.pop():
rangeContainer = RangeContainer()
rangeid = str(uuid.uuid1())
@@ -321,13 +321,13 @@ class RangeExpression(BaseExpression):
re = self.Rangeintersection(rangeids[0], rangeids[1])
for i in range(2, len(rangeids)):
re = self.Rangeintersection(re, rangeids[i])
rangeid2 = str(uuid.uuid1())
self.operanddict[rangeid2] = self.operanddict[re]
return rangeid2
def Eval(self, Operator, Oprand1, Oprand2 = None):
if Operator in ["!", "NOT", "not"]:
if not gGuidPattern.match(Oprand1.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
@@ -338,7 +338,7 @@ class RangeExpression(BaseExpression):
elif Operator == 'and' :
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
return self.Rangeintersection(Oprand1, Oprand2)
return self.Rangeintersection(Oprand1, Oprand2)
elif Operator == 'or':
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
@@ -369,11 +369,11 @@ class RangeExpression(BaseExpression):
self._Len = len(self._Expr)
self._Token = ''
self._WarnExcept = None
# Literal token without any conversion
self._LiteralToken = ''
# store the operand object
self.operanddict = {}
# The Pcd max value depends on PcdDataType
@@ -393,9 +393,9 @@ class RangeExpression(BaseExpression):
self._Depth = Depth
self._Expr = self._Expr.strip()
self.preProcessRangeExpr(self._Expr)
# check if the expression does not need to evaluate
if RealValue and Depth == 0:
self._Token = self._Expr
@@ -407,12 +407,12 @@ class RangeExpression(BaseExpression):
Val = self._OrExpr()
RealVal = Val
RangeIdList = RealVal.split("or")
RangeList = []
for rangeid in RangeIdList:
RangeList.append(self.operanddict[rangeid.strip()])
return RangeList
# Template function to parse binary operators which have same precedence

View File

@@ -839,7 +839,7 @@ def StringToArray(String):
return "{%s,0x00}" % ",".join(C.strip() for C in String[1:-1].split(','))
else:
return "{%s}" % ",".join(C.strip() for C in String[1:-1].split(','))
else:
if len(String.split()) % 2:
return '{%s,0}' % ','.join(String.split())

View File

@@ -1,9 +1,9 @@
# # @file
#
#
# This file is used to handle the variable attributes and property information
#
#
# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -12,7 +12,7 @@
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
class VariableAttributes(object):
EFI_VARIABLE_NON_VOLATILE = 0x00000001
EFI_VARIABLE_BOOTSERVICE_ACCESS = 0x00000002
@@ -24,22 +24,22 @@ class VariableAttributes(object):
"RT":EFI_VARIABLE_RUNTIME_ACCESS,
"RO":VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
}
def __init__(self):
pass
@staticmethod
def GetVarAttributes(var_attr_str):
VarAttr = 0x00000000
VarProp = 0x00000000
attr_list = var_attr_str.split(",")
for attr in attr_list:
attr = attr.strip()
if attr == 'RO':
VarProp = VariableAttributes.VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
else:
VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
return VarAttr, VarProp
@staticmethod
def ValidateVarAttributes(var_attr_str):

View File

@@ -1,9 +1,9 @@
## @file
#
#
# This package manage the VPD PCD information file which will be generated
# by build tool's autogen.
# The VPD PCD information file will be input for third-party BPDG tool which
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
#
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
@@ -33,8 +33,8 @@ FILE_COMMENT_TEMPLATE = \
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform collected by build.exe.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -70,17 +70,17 @@ FILE_COMMENT_TEMPLATE = \
#
class VpdInfoFile:
_rVpdPcdLine = None
_rVpdPcdLine = None
## Constructor
def __init__(self):
## Dictionary for VPD in following format
#
# Key : PcdClassObject instance.
# Key : PcdClassObject instance.
# @see BuildClassObject.PcdClassObject
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
self._VpdArray = {}
self._VpdInfo = {}
## Add a VPD PCD collected from platform's autogen when building.
#
# @param vpds The list of VPD PCD collected for a platform.
@@ -91,40 +91,40 @@ class VpdInfoFile:
def Add(self, Vpd, skuname, Offset):
if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
if Vpd not in self._VpdArray:
#
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
#
self._VpdArray[Vpd] = {}
self._VpdArray[Vpd].update({skuname:Offset})
## Generate VPD PCD information into a text file
#
#
# If parameter FilePath is invalid, then assert.
# If
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
if not (FilePath is not None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys())
@@ -155,15 +155,15 @@ class VpdInfoFile:
try:
fd = open(FilePath, "r")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
Lines = fd.readlines()
for Line in Lines:
Line = Line.strip()
if len(Line) == 0 or Line.startswith("#"):
continue
#
# the line must follow output format defined in BPDG spec.
#
@@ -173,9 +173,9 @@ class VpdInfoFile:
TokenSpaceName, PcdTokenName = PcdName.split(".")
except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
Found = False
if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
self._VpdInfo[(TokenSpaceName, PcdTokenName)] = []
self._VpdInfo[(TokenSpaceName, PcdTokenName)].append((SkuId, Offset, Value))
@@ -188,62 +188,62 @@ class VpdInfoFile:
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip() and sku == SkuId:
if self._VpdArray[VpdObject][sku] == "*":
if Offset == "*":
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
self._VpdArray[VpdObject][sku] = Offset
Found = True
if not Found:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
## Get count of VPD PCD collected from platform's autogen when building.
#
# @return The integer count value
# @return The integer count value
def GetCount(self):
Count = 0
for OffsetList in self._VpdArray.values():
Count += len(OffsetList)
return Count
## Get an offset value for a given VPD PCD
#
# Because BPDG only support one Sku, so only return offset for SKU default.
# Because BPDG only support one Sku, so only return offset for SKU default.
#
# @param vpd A given VPD PCD
# @param vpd A given VPD PCD
def GetOffset(self, vpd):
if vpd not in self._VpdArray:
return None
if len(self._VpdArray[vpd]) == 0:
return None
return self._VpdArray[vpd]
def GetVpdInfo(self, arg):
(PcdTokenName, TokenSpaceName) = arg
return self._VpdInfo.get((TokenSpaceName, PcdTokenName))
## Call external BPDG tool to process VPD file
#
#
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
assert ToolPath is not None, "Invalid parameter ToolPath"
assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
try:
PopenObject = subprocess.Popen(' '.join([ToolPath,
'-o', OutputBinFileName,
'-o', OutputBinFileName,
'-m', OutputMapFileName,
'-q',
'-f',
VpdFileName]),
stdout=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr= subprocess.PIPE,
shell=True)
except Exception as X:
@@ -252,11 +252,11 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
print(out)
while PopenObject.returncode is None :
PopenObject.wait()
if PopenObject.returncode != 0:
if PopenObject.returncode != 0:
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
(PopenObject.returncode, str(error)))
return PopenObject.returncode

View File

@@ -14,7 +14,7 @@
## SkuInfoClass
#
# This class defined SkuInfo item used in Module/Platform/Package files
#
#
# @param object: Inherited from object class
# @param SkuIdName: Input value for SkuIdName, default is ''
# @param SkuId: Input value for SkuId, default is ''
@@ -35,11 +35,11 @@
# @var DefaultValue: To store value for DefaultValue
#
class SkuInfoClass(object):
def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
HiiDefaultValue = '', VpdOffset = '', DefaultValue = '', VariableGuidValue = '', VariableAttribute = '', DefaultStore = None):
self.SkuIdName = SkuIdName
self.SkuId = SkuId
#
# Used by Hii
#
@@ -52,17 +52,17 @@ class SkuInfoClass(object):
self.HiiDefaultValue = HiiDefaultValue
self.VariableAttribute = VariableAttribute
self.DefaultStoreDict = DefaultStore
#
# Used by Vpd
#
self.VpdOffset = VpdOffset
#
# Used by Default
#
self.DefaultValue = DefaultValue
## Convert the class to a string
#
# Convert each member of the class to string

View File

@@ -1,7 +1,7 @@
## @file
# classes represent data in FDF
#
# Copyright (c) 2007 - 2013, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -83,7 +83,7 @@ class RegionClassObject:
## FFS data in FDF
#
#
#
class FfsClassObject:
## The constructor
#
@@ -98,7 +98,7 @@ class FfsClassObject:
## FILE statement data in FDF
#
#
#
class FileStatementClassObject (FfsClassObject) :
## The constructor
#
@@ -149,7 +149,7 @@ class AprioriSectionClassObject:
## section data in FDF
#
#
#
class SectionClassObject:
## The constructor
#
@@ -157,10 +157,10 @@ class SectionClassObject:
#
def __init__(self):
self.Alignment = None
## Depex expression section in FDF
#
#
#
class DepexSectionClassObject (SectionClassObject):
## The constructor
#
@@ -186,7 +186,7 @@ class CompressSectionClassObject (SectionClassObject) :
## Data section data in FDF
#
#
#
class DataSectionClassObject (SectionClassObject):
## The constructor
#
@@ -220,7 +220,7 @@ class EfiSectionClassObject (SectionClassObject):
## FV image section data in FDF
#
#
#
class FvImageSectionClassObject (SectionClassObject):
## The constructor
#
@@ -237,7 +237,7 @@ class FvImageSectionClassObject (SectionClassObject):
## GUIDed section data in FDF
#
#
#
class GuidSectionClassObject (SectionClassObject) :
## The constructor
#
@@ -270,7 +270,7 @@ class UiSectionClassObject (SectionClassObject):
## Version section data in FDF
#
#
#
class VerSectionClassObject (SectionClassObject):
## The constructor
#
@@ -305,7 +305,7 @@ class RuleClassObject :
## Complex rule data in FDF
#
#
#
class RuleComplexFileClassObject(RuleClassObject) :
## The constructor
#
@@ -343,7 +343,7 @@ class RuleFileExtensionClassObject(RuleClassObject):
## Capsule data in FDF
#
#
#
class CapsuleClassObject :
## The constructor
#
@@ -380,7 +380,7 @@ class VtfClassObject :
## VTF component data in FDF
#
#
#
class ComponentStatementClassObject :
## The constructor
#
@@ -396,7 +396,7 @@ class ComponentStatementClassObject :
self.CompSym = None
self.CompSize = None
self.FilePos = None
## OptionROM data in FDF
#
#
@@ -408,4 +408,4 @@ class OptionRomClassObject:
def __init__(self):
self.DriverName = None
self.FfsList = []

View File

@@ -2,7 +2,7 @@
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
@@ -10,7 +10,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -4341,7 +4341,7 @@ class CLexer(Lexer):
u"\12\uffff"
)
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
@@ -4479,7 +4479,7 @@ class CLexer(Lexer):
u"\u0192\uffff"
)
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
@@ -4943,5 +4943,5 @@ class CLexer(Lexer):
# class definition for DFA #35
DFA35 = DFA

File diff suppressed because it is too large Load Diff

View File

@@ -563,17 +563,17 @@ class Check(object):
op = open(FullName).readlines()
FileLinesList = op
LineNo = 0
CurrentSection = MODEL_UNKNOWN
CurrentSection = MODEL_UNKNOWN
HeaderSectionLines = []
HeaderCommentStart = False
HeaderCommentStart = False
HeaderCommentEnd = False
for Line in FileLinesList:
LineNo = LineNo + 1
Line = Line.strip()
if (LineNo < len(FileLinesList) - 1):
NextLine = FileLinesList[LineNo].strip()
#
# blank line
#
@@ -600,8 +600,8 @@ class Check(object):
#
HeaderSectionLines.append((Line, LineNo))
HeaderCommentStart = True
continue
continue
#
# Collect Header content.
#
@@ -635,7 +635,7 @@ class Check(object):
if EccGlobalData.gConfig.HeaderCheckFileCommentEnd == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFunctionHeader(self):
@@ -827,7 +827,7 @@ class Check(object):
for FilePath in FilePathList:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, OtherMsg="The Library Class [%s] is duplicated in '%s' line %s and line %s." % (Record[1], FilePath, Record[3], Record[4]), BelongsToTable='Dsc', BelongsToItem=Record[0])
# Check the header file in Include\Library directory whether be defined in the package DEC file.
def MetaDataFileCheckLibraryDefinedInDec(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryDefinedInDec == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
@@ -842,9 +842,9 @@ class Check(object):
if not LibraryDec:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, LibraryInInf):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, \
OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
BelongsToTable='Inf', BelongsToItem=ID)
# Check whether an Inf file is specified in the FDF file, but not in the Dsc file, then the Inf file must be for a Binary module only
def MetaDataFileCheckBinaryInfInFdf(self):
if EccGlobalData.gConfig.MetaDataFileCheckBinaryInfInFdf == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
@@ -1244,7 +1244,7 @@ class Check(object):
group by A.ID
""" % (Table.Table, Table.Table, Model, Model)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ErrorID, Record[2]):
EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[2]), BelongsToTable=Table.Table, BelongsToItem=Record[0])

View File

@@ -1,7 +1,7 @@
## @file
# fragments of source file
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -161,5 +161,5 @@ class FunctionCalling:
self.FuncName = Name
self.ParamList = Param
self.StartPos = Begin
self.EndPos = End
self.EndPos = End

View File

@@ -47,7 +47,7 @@ from ParserWarning import Warning
T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
@@ -59,7 +59,7 @@ SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
#
# GetNext*** procedures mean these procedures will get next token first, then make judgement.
# Get*** procedures mean these procedures will make judgement on current token only.
#
#
class CodeFragmentCollector:
## The constructor
#
@@ -89,7 +89,7 @@ class CodeFragmentCollector:
SizeOfLastLine = NumberOfLines
if NumberOfLines > 0:
SizeOfLastLine = len(self.Profile.FileLinesList[-1])
if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
return True
elif self.CurrentLineNumber > NumberOfLines:
@@ -111,7 +111,7 @@ class CodeFragmentCollector:
return True
else:
return False
## Rewind() method
#
# Reset file data buffer to the initial state
@@ -121,7 +121,7 @@ class CodeFragmentCollector:
def Rewind(self):
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
## __UndoOneChar() method
#
# Go back one char in the file buffer
@@ -129,9 +129,9 @@ class CodeFragmentCollector:
# @param self The object pointer
# @retval True Successfully go back one char
# @retval False Not able to go back one char as file beginning reached
#
#
def __UndoOneChar(self):
if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
return False
elif self.CurrentOffsetWithinLine == 0:
@@ -140,13 +140,13 @@ class CodeFragmentCollector:
else:
self.CurrentOffsetWithinLine -= 1
return True
## __GetOneChar() method
#
# Move forward one char in the file buffer
#
# @param self The object pointer
#
#
def __GetOneChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
self.CurrentLineNumber += 1
@@ -160,13 +160,13 @@ class CodeFragmentCollector:
#
# @param self The object pointer
# @retval Char Current char
#
#
def __CurrentChar(self):
CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
# if CurrentChar > 255:
# raise Warning("Non-Ascii char found At Line %d, offset %d" % (self.CurrentLineNumber, self.CurrentOffsetWithinLine), self.FileName, self.CurrentLineNumber)
return CurrentChar
## __NextChar() method
#
# Get the one char pass the char pointed to by the file buffer pointer
@@ -179,7 +179,7 @@ class CodeFragmentCollector:
return self.Profile.FileLinesList[self.CurrentLineNumber][0]
else:
return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
## __SetCurrentCharValue() method
#
# Modify the value of current char
@@ -189,7 +189,7 @@ class CodeFragmentCollector:
#
def __SetCurrentCharValue(self, Value):
self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
## __SetCharValue() method
#
# Modify the value of current char
@@ -199,7 +199,7 @@ class CodeFragmentCollector:
#
def __SetCharValue(self, Line, Offset, Value):
self.Profile.FileLinesList[Line - 1][Offset] = Value
## __CurrentLine() method
#
# Get the list that contains current line contents
@@ -209,7 +209,7 @@ class CodeFragmentCollector:
#
def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
## __InsertComma() method
#
# Insert ',' to replace PP
@@ -218,24 +218,24 @@ class CodeFragmentCollector:
# @retval List current line contents
#
def __InsertComma(self, Line):
if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
return
if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
return
self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
## PreprocessFile() method
#
# Preprocess file contents, replace comments with spaces.
@@ -244,7 +244,7 @@ class CodeFragmentCollector:
# !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
#
# @param self The object pointer
#
#
def PreprocessFile(self):
self.Rewind()
@@ -256,14 +256,14 @@ class CodeFragmentCollector:
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
InCharLiteral = False
InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
@@ -274,9 +274,9 @@ class CodeFragmentCollector:
PPExtend = True
else:
PPExtend = False
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
@@ -291,17 +291,17 @@ class CodeFragmentCollector:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
@@ -315,7 +315,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
InComment = False
# set comments to spaces
elif InComment:
elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
@@ -341,7 +341,7 @@ class CodeFragmentCollector:
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
HashComment = True
HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
@@ -355,9 +355,9 @@ class CodeFragmentCollector:
InComment = True
else:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
@@ -378,14 +378,14 @@ class CodeFragmentCollector:
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
InCharLiteral = False
InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
@@ -396,9 +396,9 @@ class CodeFragmentCollector:
PPExtend = True
else:
PPExtend = False
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
@@ -413,17 +413,17 @@ class CodeFragmentCollector:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
@@ -437,7 +437,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
InComment = False
# set comments to spaces
elif InComment:
elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
@@ -463,7 +463,7 @@ class CodeFragmentCollector:
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
HashComment = True
HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
@@ -479,7 +479,7 @@ class CodeFragmentCollector:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
@@ -507,7 +507,7 @@ class CodeFragmentCollector:
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
def ParseFileWithClearedPPDirective(self):
self.PreprocessFileWithClear()
# restore from ListOfList to ListOfString
@@ -520,7 +520,7 @@ class CodeFragmentCollector:
tStream = antlr3.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
def CleanFileProfileBuffer(self):
FileProfile.CommentList = []
FileProfile.PPDirectiveList = []
@@ -531,61 +531,61 @@ class CodeFragmentCollector:
FileProfile.StructUnionDefinitionList = []
FileProfile.TypedefDefinitionList = []
FileProfile.FunctionCallingList = []
def PrintFragments(self):
print('################# ' + self.FileName + '#####################')
print('/****************************************/')
print('/*************** COMMENTS ***************/')
print('/****************************************/')
for comment in FileProfile.CommentList:
print(str(comment.StartPos) + comment.Content)
print('/****************************************/')
print('/********* PREPROCESS DIRECTIVES ********/')
print('/****************************************/')
for pp in FileProfile.PPDirectiveList:
print(str(pp.StartPos) + pp.Content)
print('/****************************************/')
print('/********* VARIABLE DECLARATIONS ********/')
print('/****************************************/')
for var in FileProfile.VariableDeclarationList:
print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
print('/****************************************/')
print('/********* FUNCTION DEFINITIONS *********/')
print('/****************************************/')
for func in FileProfile.FunctionDefinitionList:
print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
print('/****************************************/')
print('/************ ENUMERATIONS **************/')
print('/****************************************/')
for enum in FileProfile.EnumerationDefinitionList:
print(str(enum.StartPos) + enum.Content)
print('/****************************************/')
print('/*********** STRUCTS/UNIONS *************/')
print('/****************************************/')
for su in FileProfile.StructUnionDefinitionList:
print(str(su.StartPos) + su.Content)
print('/****************************************/')
print('/********* PREDICATE EXPRESSIONS ********/')
print('/****************************************/')
for predexp in FileProfile.PredicateExpressionList:
print(str(predexp.StartPos) + predexp.Content)
print('/****************************************/')
print('/************** TYPEDEFS ****************/')
print('/****************************************/')
for typedef in FileProfile.TypedefDefinitionList:
print(str(typedef.StartPos) + typedef.ToType)
if __name__ == "__main__":
collector = CodeFragmentCollector(sys.argv[1])
collector.PreprocessFile()
print("For Test.")

View File

@@ -215,7 +215,7 @@ class Configuration(object):
self.HeaderCheckCFileCommentReferenceFormat = 1
# Check whether C File header Comment have the License immediately after the ""Copyright"" line
self.HeaderCheckCFileCommentLicenseFormat = 1
## C Function Layout Checking
self.CFunctionLayoutCheckAll = 0
@@ -352,7 +352,7 @@ class Configuration(object):
self.MetaDataFileCheckModuleFilePpiFormat = 1
# Check Pcd Format in INF files
self.MetaDataFileCheckModuleFilePcdFormat = 1
# Check UNI file
self.UniCheckAll = 0
# Check INF or DEC file whether defined the localized information in the associated UNI file.
@@ -374,16 +374,16 @@ class Configuration(object):
# The directory listed here will not be parsed, split with ','
self.SkipDirList = []
# The file listed here will not be parsed, split with ','
self.SkipFileList = []
# A list for binary file ext name
self.BinaryExtList = []
# A list for only scanned folders
self.ScanOnlyDirList = []
# A list for Copyright format
self.Copyright = []

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to be the main entrance of ECC tool
#
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -51,7 +51,7 @@ class Ecc(object):
# Version and Copyright
self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
self.Copyright = "Copyright (c) 2009 - 2016, Intel Corporation All rights reserved."
self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved."
self.InitDefaultConfigIni()
self.OutputFile = 'output.txt'
@@ -66,17 +66,17 @@ class Ecc(object):
# Parse the options and args
self.ParseOption()
EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
#
# Check EFI_SOURCE (Edk build convention). EDK_SOURCE will always point to ECP
#
WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
os.environ["WORKSPACE"] = WorkspaceDir
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
if "ECP_SOURCE" not in os.environ:
os.environ["ECP_SOURCE"] = mws.join(WorkspaceDir, GlobalData.gEdkCompatibilityPkg)
if "EFI_SOURCE" not in os.environ:
@@ -90,11 +90,11 @@ class Ecc(object):
EfiSourceDir = os.path.normcase(os.path.normpath(os.environ["EFI_SOURCE"]))
EdkSourceDir = os.path.normcase(os.path.normpath(os.environ["EDK_SOURCE"]))
EcpSourceDir = os.path.normcase(os.path.normpath(os.environ["ECP_SOURCE"]))
os.environ["EFI_SOURCE"] = EfiSourceDir
os.environ["EDK_SOURCE"] = EdkSourceDir
os.environ["ECP_SOURCE"] = EcpSourceDir
GlobalData.gWorkspace = WorkspaceDir
GlobalData.gEfiSource = EfiSourceDir
GlobalData.gEdkSource = EdkSourceDir
@@ -104,7 +104,7 @@ class Ecc(object):
GlobalData.gGlobalDefines["EFI_SOURCE"] = EfiSourceDir
GlobalData.gGlobalDefines["EDK_SOURCE"] = EdkSourceDir
GlobalData.gGlobalDefines["ECP_SOURCE"] = EcpSourceDir
EdkLogger.info("Loading ECC configuration ... done")
# Generate checkpoints list
EccGlobalData.gConfig = Configuration(self.ConfigFile)
@@ -120,11 +120,11 @@ class Ecc(object):
# Get files real name in workspace dir
#
GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace)
# Build ECC database
# self.BuildDatabase()
self.DetectOnlyScanDirs()
# Start to check
self.Check()
@@ -160,8 +160,8 @@ class Ecc(object):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Use -f option need to fill specific folders in config.ini file")
else:
self.BuildDatabase()
## BuildDatabase
#
# Build the database for target
@@ -172,7 +172,7 @@ class Ecc(object):
EccGlobalData.gDb.TblReport.Create()
# Build database
if self.IsInit:
if self.IsInit:
if self.ScanMetaData:
EdkLogger.quiet("Building database for Meta Data File ...")
self.BuildMetaDataFileDatabase(SpeciDirs)
@@ -198,7 +198,7 @@ class Ecc(object):
if SpecificDirs is None:
ScanFolders.append(EccGlobalData.gTarget)
else:
for specificDir in SpecificDirs:
for specificDir in SpecificDirs:
ScanFolders.append(os.path.join(EccGlobalData.gTarget, specificDir))
EdkLogger.quiet("Building database for meta data files ...")
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
@@ -219,7 +219,7 @@ class Ecc(object):
# symlinks to directories are treated as directories
Dirs.remove(Dir)
Dirs.append(Dirname)
for File in Files:
if len(File) > 4 and File[-4:].upper() == ".DEC":
Filename = os.path.normpath(os.path.join(Root, File))

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to save global datas used by ECC tool
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,4 +24,4 @@ gIdentifierTableList = []
gCFileList = []
gHFileList = []
gUFileList = []
gException = None
gException = None

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to parse exception items found by ECC tool
#
# Copyright (c) 2009 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,12 +24,12 @@ class ExceptionXml(object):
self.KeyWord = ''
self.ErrorID = ''
self.FilePath = ''
def FromXml(self, Item, Key):
self.KeyWord = XmlElement(Item, '%s/KeyWord' % Key)
self.ErrorID = XmlElement(Item, '%s/ErrorID' % Key)
self.FilePath = os.path.normpath(XmlElement(Item, '%s/FilePath' % Key))
def __str__(self):
return 'ErrorID = %s KeyWord = %s FilePath = %s' %(self.ErrorID, self.KeyWord, self.FilePath)
@@ -37,22 +37,22 @@ class ExceptionXml(object):
class ExceptionListXml(object):
def __init__(self):
self.List = []
def FromXmlFile(self, FilePath):
XmlContent = XmlParseFile(FilePath)
for Item in XmlList(XmlContent, '/ExceptionList/Exception'):
Exp = ExceptionXml()
Exp.FromXml(Item, 'Exception')
self.List.append(Exp)
def ToList(self):
RtnList = []
for Item in self.List:
#RtnList.append((Item.ErrorID, Item.KeyWord, Item.FilePath))
RtnList.append((Item.ErrorID, Item.KeyWord))
return RtnList
def __str__(self):
RtnStr = ''
if self.List:
@@ -71,7 +71,7 @@ class ExceptionCheck(object):
if FilePath and os.path.isfile(FilePath):
self.ExceptionListXml.FromXmlFile(FilePath)
self.ExceptionList = self.ExceptionListXml.ToList()
def IsException(self, ErrorID, KeyWord, FileID=-1):
if (str(ErrorID), KeyWord.replace('\r\n', '\n')) in self.ExceptionList:
return True

View File

@@ -1,7 +1,7 @@
## @file
# fragments of source file
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -36,7 +36,7 @@ FunctionCallingList = []
# May raise Exception when opening file.
#
class FileProfile :
## The constructor
#
# @param self The object pointer
@@ -54,5 +54,5 @@ class FileProfile :
except IOError:
raise Warning("Error when opening file %s" % FileName)

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to define common parser functions for meta-data
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -87,16 +87,16 @@ def GetTableList(FileModelList, Table, Db):
# @param FileName: FileName of the comment
#
def ParseHeaderCommentSection(CommentList, FileName = None):
Abstract = ''
Description = ''
Copyright = ''
License = ''
EndOfLine = "\n"
STR_HEADER_COMMENT_START = "@file"
#
# used to indicate the state of processing header comment section of dec,
# used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
@@ -117,11 +117,11 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
if _IsCopyrightLine(Line):
Last = Index
break
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
if not Line.startswith('#') and Line:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
@@ -131,14 +131,14 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
# if there are blank lines between License or Description, keep them as they would be
# if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
@@ -152,39 +152,39 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
if not Comment:
Abstract = ''
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif _IsCopyrightLine(Comment):
elif _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
#
if _IsCopyrightLine(Comment):
#
if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Description += Comment + EndOfLine
Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
if _IsCopyrightLine(Comment):
if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
else:
#
# Contents after copyright line are license, those non-copyright lines in between
# copyright line will be discarded
# copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_LICENSE
HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
if not Copyright.strip():
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
@@ -198,19 +198,19 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
for Result in ResultSet:
Msg = 'Header comment section must have license information'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
if not Abstract.strip() or Abstract.find('Component description file') > -1:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Header comment section must have Abstract information.'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
@@ -218,11 +218,11 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent):
Result = True
return Result
@@ -232,7 +232,7 @@ def _IsCopyrightLine (LineContent):
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content,
# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString2(Line, CommentCharacter='#', AllowCppStyleComment=False):

View File

@@ -92,7 +92,7 @@ def ParseMacro(Parser):
elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList = [Type, Name, Value]
return MacroParser
@@ -334,7 +334,7 @@ class MetaFileParser(object):
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
Name, Value = self._ValueList[1], self._ValueList[2]
# Sometimes, we need to make differences between EDK and EDK2 modules
# Sometimes, we need to make differences between EDK and EDK2 modules
if Name == 'INF_VERSION':
try:
self._Version = int(Value, 0)
@@ -354,7 +354,7 @@ class MetaFileParser(object):
UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
if os.path.exists(UniFile):
self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=False)
if isinstance(self, InfParser) and self._Version < 0x00010005:
# EDK module allows using defines as macros
self._FileLocalMacros[Name] = Value
@@ -390,7 +390,7 @@ class MetaFileParser(object):
return Macros
## Get section Macros that are applicable to current line, which may come from other sections
## Get section Macros that are applicable to current line, which may come from other sections
## that share the same name while scope is wider
def _GetApplicableSectionMacro(self):
Macros = {}
@@ -473,7 +473,7 @@ class InfParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
# parse the file line by line
IsFindBlockComment = False
@@ -591,7 +591,7 @@ class InfParser(MetaFileParser):
)
Usage = ''
if IsFindBlockComment:
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
self._Done()
@@ -818,7 +818,7 @@ class DscParser(MetaFileParser):
# the owner item
#
self._IdMapping = {-1:-1}
self.TblFile = EccGlobalData.gDb.TblFile
self.FileID = -1
@@ -838,8 +838,8 @@ class DscParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
for Index in range(0, len(Content)):
Line = CleanString(Content[Index])
# skip empty line
@@ -850,7 +850,7 @@ class DscParser(MetaFileParser):
self._LineIndex = Index
if self._InSubsection and self._Owner[-1] == -1:
self._Owner.append(self._LastItem)
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionType = MODEL_META_DATA_SECTION_HEADER
@@ -960,7 +960,7 @@ class DscParser(MetaFileParser):
elif self._From > 0:
EdkLogger.error('Parser', FORMAT_INVALID,
"No '!include' allowed in included file",
ExtraData=self._CurrentLine, File=self.MetaFile,
ExtraData=self._CurrentLine, File=self.MetaFile,
Line=self._LineIndex+1)
#
@@ -1154,7 +1154,7 @@ class DscParser(MetaFileParser):
MODEL_META_DATA_USER_EXTENSION : self._Skip,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._Skip,
}
self._RawTable = self._Table
self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
self._DirectiveStack = []
@@ -1184,7 +1184,7 @@ class DscParser(MetaFileParser):
try:
Processer[self._ItemType]()
except EvaluationException as Excpt:
#
#
# Only catch expression evaluation error here. We need to report
# the precise number of line on which the error occurred
#
@@ -1194,11 +1194,11 @@ class DscParser(MetaFileParser):
# Line=self._LineIndex+1)
except MacroException as Excpt:
EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
if self._ValueList is None:
continue
continue
NewOwner = self._IdMapping.get(Owner, -1)
self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
@@ -1221,7 +1221,7 @@ class DscParser(MetaFileParser):
self._IdMapping[Id] = self._LastItem
RecordList = self._Table.GetAll()
self._RawTable.Drop()
self._Table.Drop()
for Record in RecordList:
@@ -1255,7 +1255,7 @@ class DscParser(MetaFileParser):
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
continue
continue
self._Symbols[Name] = Value
Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
@@ -1263,12 +1263,12 @@ class DscParser(MetaFileParser):
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value):
continue
continue
Name = TokenSpaceGuid+'.'+PcdName
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
continue
continue
self._Symbols[Name] = Value
def __ProcessDefine(self):
@@ -1288,13 +1288,13 @@ class DscParser(MetaFileParser):
SectionLocalMacros[Name] = Value
elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
GlobalData.gEdkGlobal[Name] = Value
#
# Keyword in [Defines] section can be used as Macros
#
if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
self._FileLocalMacros[Name] = Value
self._ValueList = [Type, Name, Value]
def __ProcessDirective(self):
@@ -1309,12 +1309,12 @@ class DscParser(MetaFileParser):
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
except WrnExpression as Excpt:
#
#
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
Result = Excpt.result
except BadExpression as Exc:
@@ -1365,14 +1365,14 @@ class DscParser(MetaFileParser):
#
elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys():
__IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE']
__IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE']
__IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE']
#
# Allow using MACROs comes from [Defines] section to keep compatible.
# Allow using MACROs comes from [Defines] section to keep compatible.
#
__IncludeMacros.update(self._Macros)
IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
#
# First search the include file under the same directory as DSC file
@@ -1386,14 +1386,14 @@ class DscParser(MetaFileParser):
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
if ErrorCode != 0:
EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
Line=self._LineIndex+1, ExtraData=ErrorInfo1 + "\n"+ ErrorInfo2)
self._FileWithError = IncludedFile1
IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, True)
Owner = self._Content[self._ContentIndex-1][0]
Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
Owner=Owner, From=Owner)
# set the parser status with current status
@@ -1417,7 +1417,7 @@ class DscParser(MetaFileParser):
self._Content.pop(self._ContentIndex-1)
self._ValueList = None
self._ContentIndex -= 1
def __ProcessSkuId(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
for Value in self._ValueList]
@@ -1434,22 +1434,22 @@ class DscParser(MetaFileParser):
# PCD value can be an expression
#
if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
PcdValue = ValueList[0]
PcdValue = ValueList[0]
try:
ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValueList[0] = Value.result
ValueList[0] = Value.result
else:
PcdValue = ValueList[-1]
try:
ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValueList[-1] = Value.result
if ValueList[-1] == 'True':
ValueList[-1] = '1'
if ValueList[-1] == 'False':
ValueList[-1] = '0'
ValueList[-1] = '0'
self._ValueList[2] = '|'.join(ValueList)
@@ -1548,7 +1548,7 @@ class DecParser(MetaFileParser):
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
for Index in range(0, len(Content)):
Line, Comment = CleanString2(Content[Index])
self._CurrentLine = Line
@@ -1750,19 +1750,19 @@ class DecParser(MetaFileParser):
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(TokenList[1])
# Has VOID* type string, may contain "|" character in the string.
# Has VOID* type string, may contain "|" character in the string.
if len(PtrValue) != 0:
ptrValueList = re.sub(ValueRe, '', TokenList[1])
ValueList = GetSplitValueList(ptrValueList)
ValueList[0] = PtrValue[0]
else:
ValueList = GetSplitValueList(TokenList[1])
# check if there's enough datum information given
if len(ValueList) != 3:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
@@ -1792,7 +1792,7 @@ class DecParser(MetaFileParser):
if not IsValid:
EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
File=self.MetaFile, Line=self._LineIndex+1)
if EccGlobalData.gConfig.UniCheckPCDInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# check Description, Prompt information
PatternDesc = re.compile('##\s*([\x21-\x7E\s]*)', re.S)
@@ -1903,7 +1903,7 @@ class DecParser(MetaFileParser):
## Fdf
#
# This class defined the structure used in Fdf object
#
#
# @param Filename: Input value for Ffilename of Fdf file, default is None
# @param WorkspaceDir: Input value for current workspace directory, default is None
#
@@ -1911,7 +1911,7 @@ class Fdf(object):
def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
self.WorkspaceDir = WorkspaceDir
self.IsToDatabase = IsToDatabase
self.Cur = Database.Cur
self.TblFile = Database.TblFile
self.TblFdf = Database.TblFdf
@@ -1938,15 +1938,15 @@ class Fdf(object):
self.FileList[Filename] = FileID
return self.FileList[Filename]
## Load Fdf file
#
# Load the file if it exists
#
# @param Filename: Input value for filename of Fdf file
#
def LoadFdfFile(self, Filename):
def LoadFdfFile(self, Filename):
FileList = []
#
# Parse Fdf file
@@ -1991,7 +1991,7 @@ class UniParser(object):
self.FileIn = None
self.Missing = []
self.__read()
def __read(self):
try:
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_8').read()
@@ -2001,7 +2001,7 @@ class UniParser(object):
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16_le').read()
except IOError:
self.FileIn = ""
def Start(self):
if self.IsModuleUni:
if self.IsExtraUni:
@@ -2021,7 +2021,7 @@ class UniParser(object):
self.PrintLog('STR_PACKAGE_ABSTRACT', PackageAbstract)
PackageDescription = self.CheckKeyValid('STR_PACKAGE_DESCRIPTION')
self.PrintLog('STR_PACKAGE_DESCRIPTION', PackageDescription)
def CheckKeyValid(self, Key, Contents=None):
if not Contents:
Contents = self.FileIn
@@ -2029,7 +2029,7 @@ class UniParser(object):
if KeyPattern.search(Contents):
return True
return False
def CheckPcdInfo(self, PcdCName):
PromptKey = 'STR_%s_PROMPT' % PcdCName.replace('.', '_')
PcdPrompt = self.CheckKeyValid(PromptKey)
@@ -2037,7 +2037,7 @@ class UniParser(object):
HelpKey = 'STR_%s_HELP' % PcdCName.replace('.', '_')
PcdHelp = self.CheckKeyValid(HelpKey)
self.PrintLog(HelpKey, PcdHelp)
def PrintLog(self, Key, Value):
if not Value and Key not in self.Missing:
Msg = '%s is missing in the %s file.' % (Key, self.FileName)

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase a meta file table
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -25,7 +25,7 @@ from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE
MODEL_FILE_OTHERS
class MetaFileTable(Table):
## Constructor
## Constructor
def __init__(self, Cursor, MetaFile, FileType, TableName, Temporary = False):
self.MetaFile = MetaFile
self.TblFile = EccGlobalData.gDb.TblFile
@@ -88,30 +88,30 @@ class ModuleTable(MetaFileTable):
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0, Usage=''):
(Value1, Value2, Value3, Usage, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Usage, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Usage,
Scope1,
self,
Model,
Value1,
Value2,
Value3,
Usage,
Scope1,
Scope2,
BelongsToItem,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Platform The Platform attribute of Record
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Platform The Platform attribute of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -171,28 +171,28 @@ class PackageTable(MetaFileTable):
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Scope1,
self,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
@@ -252,32 +252,32 @@ class PlatformTable(MetaFileTable):
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Scope1,
self,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
BelongsToItem,
BelongsToFile,
FromItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
ConditionString = "Model=%s AND Enabled>0" % Model

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to be the warning class of ECC tool
#
# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -27,4 +27,4 @@ class Warning (Exception):
self.message = Str
self.FileName = File
self.LineNumber = Line
self.ToolName = 'ECC PP'
self.ToolName = 'ECC PP'

View File

@@ -2,7 +2,7 @@
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -33,7 +33,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
@@ -49,7 +49,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
## Get a list of XML nodes using XPath style syntax.

View File

@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Xml
'''
'''

View File

@@ -2349,13 +2349,13 @@ def CheckFileHeaderDoxygenComments(FullFileName):
if (len(CommentStrListTemp) <= 1):
# For Mac
CommentStrListTemp = CommentStr.split('\r')
# Skip the content before the file header
# Skip the content before the file header
for CommentLine in CommentStrListTemp:
if CommentLine.strip().startswith('/** @file'):
FileStartFlag = True
if FileStartFlag == True:
CommentStrList.append(CommentLine)
ID = Result[1]
Index = 0
if CommentStrList and CommentStrList[0].strip().startswith('/** @file'):
@@ -2378,7 +2378,7 @@ def CheckFileHeaderDoxygenComments(FullFileName):
if EccGlobalData.gConfig.HeaderCheckCFileCommentStartSpacesNum == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if CommentLine.startswith('/** @file') == False and CommentLine.startswith('**/') == False and CommentLine.strip() and CommentLine.startswith(' ') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment content should start with two spaces at each line', FileTable, ID)
CommentLine = CommentLine.strip()
if CommentLine.startswith('Copyright'):
NoCopyrightFlag = False
@@ -2403,9 +2403,9 @@ def CheckFileHeaderDoxygenComments(FullFileName):
# Check whether C File header Comment's each reference at list should begin with a bullet character.
if EccGlobalData.gConfig.HeaderCheckCFileCommentReferenceFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if RefListFlag == True:
if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
if NoHeaderCommentStartFlag:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
return

View File

@@ -2,7 +2,7 @@
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
@@ -10,7 +10,7 @@ from antlr3.compat import set, frozenset
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
@@ -4341,7 +4341,7 @@ class CLexer(Lexer):
u"\12\uffff"
)
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
@@ -4479,7 +4479,7 @@ class CLexer(Lexer):
u"\u0192\uffff"
)
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
@@ -4943,5 +4943,5 @@ class CLexer(Lexer):
# class definition for DFA #35
DFA35 = DFA

File diff suppressed because it is too large Load Diff

View File

@@ -1491,7 +1491,7 @@ class MultipleFv(FirmwareVolume):
Fv.frombuffer(Buf, 0, len(Buf))
self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size])
self.FfsDict.append(Fv.FfsDict)
self.FfsDict.append(Fv.FfsDict)
## Class Eot
#
@@ -1510,7 +1510,7 @@ class Eot(object):
# Version and Copyright
self.VersionNumber = ("0.02" + " " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
self.Copyright = "Copyright (c) 2008 - 2010, Intel Corporation All rights reserved."
self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved."
self.Report = Report
self.IsInit = IsInit
@@ -1522,7 +1522,7 @@ class Eot(object):
self.FvFileList = FvFileList
self.MapFileList = MapFileList
self.Dispatch = Dispatch
# Check workspace environment
if "EFI_SOURCE" not in os.environ:
if "EDK_SOURCE" not in os.environ:
@@ -1562,13 +1562,13 @@ class Eot(object):
if not os.path.isfile(MapFile):
EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile)
EotGlobalData.gMAP_FILE.append(MapFile)
# Generate source file list
self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList)
# Generate guid list of dec file list
self.ParseDecFile(self.DecFileList)
# Generate guid list from GUID list file
self.ParseGuidList(self.GuidList)
@@ -1628,7 +1628,7 @@ class Eot(object):
if len(list) == 2:
EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip())
## ParseGuidList() method
#
# Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue}
@@ -1643,7 +1643,7 @@ class Eot(object):
for Line in open(Path):
(GuidName, GuidValue) = Line.split()
EotGlobalData.gGuidDict[GuidName] = GuidValue
## ConvertLogFile() method
#
# Parse a real running log file to get real dispatch order
@@ -1999,7 +1999,7 @@ class Eot(object):
if Options.FvFileList:
self.FvFileList = Options.FvFileList
if Options.MapFileList:
self.MapFileList = Options.FvMapFileList
@@ -2011,7 +2011,7 @@ class Eot(object):
if Options.DecFileList:
self.DecFileList = Options.DecFileList
if Options.GuidList:
self.GuidList = Options.GuidList

View File

@@ -1,7 +1,7 @@
## @file
# Warning information of Eot
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -23,4 +23,4 @@ class Warning (Exception):
self.message = Str
self.FileName = File
self.LineNumber = Line
self.ToolName = 'EOT'
self.ToolName = 'EOT'

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create report for Eot tool
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -276,13 +276,13 @@ class Report(object):
</tr>
<tr id='Ffs%s' style='display:none;'>
<td colspan="4"><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, FfsPath, FfsName, FfsGuid, FfsOffset, FfsType, self.FfsIndex)
if self.DispatchList:
if FfsObj.Type in [0x04, 0x06]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "P", FfsName, FfsPath))
if FfsObj.Type in [0x05, 0x07, 0x08, 0x0A]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "D", FfsName, FfsPath))
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('%s\n' %FfsName)

View File

@@ -1,7 +1,7 @@
## @file
# name value pair
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -25,4 +25,4 @@ class Attribute:
# @param self The object pointer
def __init__(self):
self.Name = None
self.Value = None
self.Value = None

View File

@@ -205,7 +205,7 @@ class Capsule (CapsuleClassObject) :
return GenFds.ImageBinDict[self.UiCapsuleName.upper() + 'cap']
GenFdsGlobalVariable.InfLogger( "\nGenerate %s Capsule" %self.UiCapsuleName)
if ('CAPSULE_GUID' in self.TokensDict and
if ('CAPSULE_GUID' in self.TokensDict and
uuid.UUID(self.TokensDict['CAPSULE_GUID']) == uuid.UUID('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')):
return self.GenFmpCapsule()

View File

@@ -32,13 +32,13 @@ class CapsuleData:
# @param self The object pointer
def __init__(self):
pass
## generate capsule data
#
# @param self The object pointer
def GenCapsuleSubItem(self):
pass
## FFS class for capsule data
#
#
@@ -119,7 +119,7 @@ class CapsuleFd (CapsuleData):
else:
FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)
return FdFile
## AnyFile class for capsule data
#
#
@@ -139,7 +139,7 @@ class CapsuleAnyFile (CapsuleData):
#
def GenCapsuleSubItem(self):
return self.FileName
## Afile class for capsule data
#
#
@@ -208,11 +208,11 @@ class CapsulePayload(CapsuleData):
Guid = self.ImageTypeId.split('-')
Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
int(self.Version, 16),
int(Guid[0], 16),
int(Guid[1], 16),
int(Guid[2], 16),
int(Guid[3][-4:-2], 16),
int(Guid[3][-2:], 16),
int(Guid[0], 16),
int(Guid[1], 16),
int(Guid[2], 16),
int(Guid[3][-4:-2], 16),
int(Guid[3][-2:], 16),
int(Guid[4][-12:-10], 16),
int(Guid[4][-10:-8], 16),
int(Guid[4][-8:-6], 16),

View File

@@ -55,7 +55,7 @@ class EfiSection (EfiSectionClassObject):
# @retval tuple (Generated file name list, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :
if self.FileName is not None and self.FileName.startswith('PCD('):
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
"""Prepare the parameter of GenSection"""
@@ -155,7 +155,7 @@ class EfiSection (EfiSectionClassObject):
BuildNumTuple = tuple()
BuildNumString = ' ' + ' '.join(BuildNumTuple)
#if VerString == '' and
#if VerString == '' and
if BuildNumString == '':
if self.Optional == True :
GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
@@ -240,7 +240,7 @@ class EfiSection (EfiSectionClassObject):
Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + Ffs.SectionSuffix.get(SectionType))
File = GenFdsGlobalVariable.MacroExtend(File, Dict)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
ImageObj = PeImageClass (File)
@@ -284,7 +284,7 @@ class EfiSection (EfiSectionClassObject):
IsMakefile = IsMakefile
)
File = StrippedFile
"""For TE Section call GenFw to generate TE image"""
if SectionType == BINARY_FILE_TYPE_TE:

View File

@@ -103,7 +103,7 @@ class FD(FDClassObject):
pass
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
FdBuffer = BytesIO('')
PreviousRegionStart = -1
PreviousRegionSize = 1

View File

@@ -173,7 +173,7 @@ class IncludeFileProfile :
self.InsertAdjust = 0
self.IncludeFileList = []
self.Level = 1 # first level include file
def GetTotalLines(self):
TotalLines = self.InsertAdjust + len(self.FileLinesList)
@@ -191,7 +191,7 @@ class IncludeFileProfile :
def GetLineInFile(self, Line):
if not self.IsLineInFile (Line):
return (self.FileName, -1)
InsertedLines = self.InsertStartLineNumber
for Profile in self.IncludeFileList:
@@ -233,7 +233,7 @@ class FileProfile :
# ECC will use this Dict and List information
self.PcdFileLineDict = {}
self.InfFileLineList = []
self.FdDict = {}
self.FdNameNotSet = False
self.FvDict = {}
@@ -339,11 +339,11 @@ class FdfParser:
#
# @param self The object pointer
# @param DestLine Optional new destination line number.
# @param DestOffset Optional new destination offset.
# @param DestOffset Optional new destination offset.
#
def Rewind(self, DestLine = 1, DestOffset = 0):
self.CurrentLineNumber = DestLine
self.CurrentOffsetWithinLine = DestOffset
def Rewind(self, DestLine = 1, DestOffset = 0):
self.CurrentLineNumber = DestLine
self.CurrentOffsetWithinLine = DestOffset
## __UndoOneChar() method
#
@@ -459,7 +459,7 @@ class FdfParser:
if MacroName.startswith('!'):
NotFlag = True
MacroName = MacroName[1:].strip()
if not MacroName.startswith('$(') or not MacroName.endswith(')'):
raise Warning("Macro name expected(Please use '$(%(Token)s)' if '%(Token)s' is a macro.)" % {"Token" : MacroName},
self.FileName, self.CurrentLineNumber)
@@ -599,7 +599,7 @@ class FdfParser:
# @param self The object pointer
#
def PreprocessIncludeFile(self):
# nested include support
# nested include support
Processed = False
MacroDict = {}
while self.__GetNextToken():
@@ -664,7 +664,7 @@ class FdfParser:
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode = IncludedFile1.Validate()[0]
if ErrorCode != 0:
raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace),
raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace),
self.FileName, self.CurrentLineNumber)
if not IsValidInclude (IncludedFile1.Path, self.CurrentLineNumber):
@@ -707,18 +707,18 @@ class FdfParser:
Processed = False
# Preprocess done.
self.Rewind()
@staticmethod
def __GetIfListCurrentItemStat(IfList):
if len(IfList) == 0:
return True
for Item in IfList:
if Item[1] == False:
return False
return True
## PreprocessConditionalStatement() method
#
# Preprocess conditional statement.
@@ -778,7 +778,7 @@ class FdfParser:
Macro = self.__Token
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
Value = self.__GetExpression()
self.__SetMacroValue(Macro, Value)
self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
@@ -808,7 +808,7 @@ class FdfParser:
CondLabel = self.__Token
Expression = self.__GetExpression()
if CondLabel == '!if':
ConditionSatisfied = self.__EvaluateConditional(Expression, IfList[-1][0][0] + 1, 'eval')
else:
@@ -819,7 +819,7 @@ class FdfParser:
BranchDetermined = ConditionSatisfied
IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
if ConditionSatisfied:
self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
elif self.__Token in ('!elseif', '!else'):
ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
if len(IfList) <= 0:
@@ -891,7 +891,7 @@ class FdfParser:
ScopeMacro = self.__MacroDict[TAB_COMMON, TAB_COMMON, TAB_COMMON]
if ScopeMacro:
MacroDict.update(ScopeMacro)
# Section macro
ScopeMacro = self.__MacroDict[
self.__CurSection[0],
@@ -923,12 +923,12 @@ class FdfParser:
else:
return ValueExpression(Expression, MacroPcdDict)()
except WrnExpression as Excpt:
#
#
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
File=self.FileName, ExtraData=self.__CurrentLine(),
File=self.FileName, ExtraData=self.__CurrentLine(),
Line=Line)
return Excpt.result
except Exception as Excpt:
@@ -947,7 +947,7 @@ class FdfParser:
raise Warning(str(Excpt), self.FileName, Line)
else:
if Expression.startswith('$(') and Expression[-1] == ')':
Expression = Expression[2:-1]
Expression = Expression[2:-1]
return Expression in MacroPcdDict
## __IsToken() method
@@ -1432,9 +1432,9 @@ class FdfParser:
self.__UndoToken()
self.__GetSetStatement(None)
continue
Macro = self.__Token
if not self.__IsToken("="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextToken() or self.__Token.startswith('['):
@@ -1492,7 +1492,7 @@ class FdfParser:
else:
raise Warning("expected FdName in [FD.] section", self.FileName, self.CurrentLineNumber)
self.CurrentFdName = FdName.upper()
if self.CurrentFdName in self.Profile.FdDict:
raise Warning("Unexpected the same FD name", self.FileName, self.CurrentLineNumber)
@@ -1578,12 +1578,12 @@ class FdfParser:
if self.__IsKeyword( "BaseAddress"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextHexNumber():
raise Warning("expected Hex base address", self.FileName, self.CurrentLineNumber)
Obj.BaseAddress = self.__Token
if self.__IsToken( "|"):
pcdPair = self.__GetNextPcdName()
Obj.BaseAddressPcd = pcdPair
@@ -1595,7 +1595,7 @@ class FdfParser:
if self.__IsKeyword( "Size"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextHexNumber():
raise Warning("expected Hex size", self.FileName, self.CurrentLineNumber)
@@ -1612,13 +1612,13 @@ class FdfParser:
if self.__IsKeyword( "ErasePolarity"):
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextToken():
raise Warning("expected Erase Polarity", self.FileName, self.CurrentLineNumber)
if self.__Token != "1" and self.__Token != "0":
raise Warning("expected 1 or 0 Erase Polarity", self.FileName, self.CurrentLineNumber)
Obj.ErasePolarity = self.__Token
return True
@@ -1666,7 +1666,7 @@ class FdfParser:
IsBlock = False
while self.__GetBlockStatement(Obj):
IsBlock = True
Item = Obj.BlockSizeList[-1]
if Item[0] is None or Item[1] is None:
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
@@ -1835,7 +1835,7 @@ class FdfParser:
# @retval False Not able to find
#
def __GetRegionLayout(self, Fd):
Offset = self.__CalcRegionExpr()
Offset = self.__CalcRegionExpr()
if Offset is None:
return False
@@ -2151,9 +2151,9 @@ class FdfParser:
while True:
self.__GetSetStatements(FvObj)
if not (self.__GetBlockStatement(FvObj) or self.__GetFvBaseAddress(FvObj) or
self.__GetFvForceRebase(FvObj) or self.__GetFvAlignment(FvObj) or
self.__GetFvAttributes(FvObj) or self.__GetFvNameGuid(FvObj) or
if not (self.__GetBlockStatement(FvObj) or self.__GetFvBaseAddress(FvObj) or
self.__GetFvForceRebase(FvObj) or self.__GetFvAlignment(FvObj) or
self.__GetFvAttributes(FvObj) or self.__GetFvNameGuid(FvObj) or
self.__GetFvExtEntryStatement(FvObj) or self.__GetFvNameString(FvObj)):
break
@@ -2198,7 +2198,7 @@ class FdfParser:
raise Warning("Unknown alignment value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
Obj.FvAlignment = self.__Token
return True
## __GetFvBaseAddress() method
#
# Get BaseAddress for FV
@@ -2222,8 +2222,8 @@ class FdfParser:
if not BaseAddrValuePattern.match(self.__Token.upper()):
raise Warning("Unknown FV base address value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
Obj.FvBaseAddress = self.__Token
return True
return True
## __GetFvForceRebase() method
#
# Get FvForceRebase for FV
@@ -2246,14 +2246,14 @@ class FdfParser:
if self.__Token.upper() not in ["TRUE", "FALSE", "0", "0X0", "0X00", "1", "0X1", "0X01"]:
raise Warning("Unknown FvForceRebase value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
if self.__Token.upper() in ["TRUE", "1", "0X1", "0X01"]:
Obj.FvForceRebase = True
elif self.__Token.upper() in ["FALSE", "0", "0X0", "0X00"]:
Obj.FvForceRebase = False
else:
Obj.FvForceRebase = None
return True
@@ -2288,7 +2288,7 @@ class FdfParser:
FvObj.FvAttributeDict[name] = self.__Token
return IsWordToken
## __GetFvNameGuid() method
#
# Get FV GUID for FV
@@ -2334,7 +2334,7 @@ class FdfParser:
if not self.__IsKeyword ("TYPE"):
raise Warning("expected 'TYPE'", self.FileName, self.CurrentLineNumber)
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
@@ -2355,7 +2355,7 @@ class FdfParser:
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__IsToken( "{"):
raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
@@ -2386,13 +2386,13 @@ class FdfParser:
FvObj.FvExtEntryData.append(DataString)
if self.__Token == 'FILE':
if not self.__IsToken( "="):
raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
if not self.__GetNextToken():
raise Warning("expected FV Extension Entry file path At Line ", self.FileName, self.CurrentLineNumber)
FvObj.FvExtEntryData.append(self.__Token)
if not self.__IsToken( "}"):
@@ -2555,7 +2555,7 @@ class FdfParser:
raise Warning("expected ARCH name", self.FileName, self.CurrentLineNumber)
FfsInfObj.UseArch = self.__Token
if self.__GetNextToken():
p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\))')
if p.match(self.__Token) and p.match(self.__Token).span()[1] == len(self.__Token):
@@ -2596,7 +2596,7 @@ class FdfParser:
self.__UndoToken()
self.__UndoToken()
return False
FfsFileObj = FfsFileStatement.FileStatement()
FfsFileObj.FvFileType = self.__Token
@@ -2613,9 +2613,9 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
FfsFileObj.NameGuid = self.__Token
self.__GetFilePart( FfsFileObj, MacroDict.copy())
if ForCapsule:
@@ -2891,7 +2891,7 @@ class FdfParser:
else:
VerSectionObj.FileName = self.__Token
Obj.SectionList.append(VerSectionObj)
elif self.__IsKeyword( BINARY_FILE_TYPE_UI):
if AlignValue == 'Auto':
raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
@@ -3345,7 +3345,7 @@ class FdfParser:
Value = self.__Token.strip()
else:
Value = self.__Token.strip()
Obj.TokensDict[Name] = Value
Obj.TokensDict[Name] = Value
if not self.__GetNextToken():
return False
self.__UndoToken()
@@ -3487,7 +3487,7 @@ class FdfParser:
if not self.__GetNextToken():
raise Warning("expected File name", self.FileName, self.CurrentLineNumber)
AnyFileName = self.__Token
self.__VerifyFile(AnyFileName)
@@ -3520,7 +3520,7 @@ class FdfParser:
else:
CapsuleObj.CapsuleDataList.append(CapsuleAnyFile)
return True
## __GetAfileStatement() method
#
# Get Afile for capsule
@@ -3540,14 +3540,14 @@ class FdfParser:
if not self.__GetNextToken():
raise Warning("expected Afile name", self.FileName, self.CurrentLineNumber)
AfileName = self.__Token
AfileBaseName = os.path.basename(AfileName)
if os.path.splitext(AfileBaseName)[1] not in [".bin", ".BIN", ".Bin", ".dat", ".DAT", ".Dat", ".data", ".DATA", ".Data"]:
raise Warning('invalid binary file type, should be one of "bin",BINARY_FILE_TYPE_BIN,"Bin","dat","DAT","Dat","data","DATA","Data"', \
self.FileName, self.CurrentLineNumber)
if not os.path.isabs(AfileName):
AfileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(AfileName)
self.__VerifyFile(AfileName)
@@ -3701,7 +3701,7 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
NameGuid = self.__Token
KeepReloc = None
@@ -3963,11 +3963,11 @@ class FdfParser:
elif self.__GetNextToken():
if self.__Token not in ("}", "COMPAT16", BINARY_FILE_TYPE_PE32, BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE, "FV_IMAGE", "RAW", BINARY_FILE_TYPE_DXE_DEPEX,\
BINARY_FILE_TYPE_UI, "VERSION", BINARY_FILE_TYPE_PEI_DEPEX, BINARY_FILE_TYPE_GUID, BINARY_FILE_TYPE_SMM_DEPEX):
if self.__Token.startswith('PCD'):
self.__UndoToken()
self.__GetNextWord()
if self.__Token == 'PCD':
if not self.__IsToken( "("):
raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
@@ -3975,9 +3975,9 @@ class FdfParser:
if not self.__IsToken( ")"):
raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
EfiSectionObj.FileName = self.__Token
EfiSectionObj.FileName = self.__Token
else:
self.__UndoToken()
else:
@@ -4364,7 +4364,7 @@ class FdfParser:
self.SectionParser(S)
self.__UndoToken()
return False
self.__UndoToken()
if not self.__IsToken("[OptionRom.", True):
raise Warning("Unknown Keyword '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
@@ -4383,7 +4383,7 @@ class FdfParser:
isFile = self.__GetOptRomFileStatement(OptRomObj)
if not isInf and not isFile:
break
return True
## __GetOptRomInfStatement() method
@@ -4424,9 +4424,9 @@ class FdfParser:
else:
self.Profile.InfDict['ArchTBD'].append(ffsInf.InfFileName)
self.__GetOptRomOverrides (ffsInf)
Obj.FfsList.append(ffsInf)
return True
@@ -4488,7 +4488,7 @@ class FdfParser:
EdkLogger.error("FdfParser", FORMAT_INVALID, File=self.FileName, Line=self.CurrentLineNumber)
Obj.OverrideAttribs = Overrides
## __GetOptRomFileStatement() method
#
# Get FILE statements
@@ -4520,7 +4520,7 @@ class FdfParser:
if FfsFileObj.FileType == 'EFI':
self.__GetOptRomOverrides(FfsFileObj)
Obj.FfsList.append(FfsFileObj)
return True
@@ -4562,7 +4562,7 @@ class FdfParser:
if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
RefFvList.append (CapsuleDataObj.FvName.upper())
elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
RefFdList.append (CapsuleDataObj.FdName.upper())
RefFdList.append (CapsuleDataObj.FdName.upper())
elif CapsuleDataObj.Ffs is not None:
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
@@ -4657,7 +4657,7 @@ class FdfParser:
RefFvStack = []
RefFvStack.append(FvName)
FdAnalyzedList = []
Index = 0
while RefFvStack != [] and Index < MaxLength:
Index = Index + 1
@@ -4710,7 +4710,7 @@ class FdfParser:
RefCapStack.append(CapName)
FdAnalyzedList = []
FvAnalyzedList = []
Index = 0
while RefCapStack != [] and Index < MaxLength:
Index = Index + 1

View File

@@ -39,7 +39,7 @@ class Ffs(FDClassObject):
SUP_MODULE_MM_STANDALONE : 'EFI_FV_FILETYPE_MM_STANDALONE',
SUP_MODULE_MM_CORE_STANDALONE : 'EFI_FV_FILETYPE_MM_CORE_STANDALONE'
}
# mapping between section type in FDF and file suffix
SectionSuffix = {
BINARY_FILE_TYPE_PE32 : '.pe32',
@@ -51,14 +51,14 @@ class Ffs(FDClassObject):
'COMPAT16' : '.com16',
'RAW' : '.raw',
'FREEFORM_SUBTYPE_GUID': '.guid',
'SUBTYPE_GUID' : '.guid',
'SUBTYPE_GUID' : '.guid',
'FV_IMAGE' : 'fv.sec',
'COMPRESS' : '.com',
'GUIDED' : '.guided',
BINARY_FILE_TYPE_PEI_DEPEX : '.dpx',
BINARY_FILE_TYPE_SMM_DEPEX : '.dpx'
}
## The constructor
#
# @param self The object pointer

View File

@@ -58,7 +58,7 @@ class FileStatement (FileStatementClassObject) :
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
if self.NameGuid is not None and self.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
if len(PcdValue) == 0:
@@ -71,7 +71,7 @@ class FileStatement (FileStatementClassObject) :
EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
% (self.NameGuid))
self.NameGuid = RegistryGuidStr
Str = self.NameGuid
if FvName:
Str += FvName

View File

@@ -225,7 +225,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
if self.ModuleType == SUP_MODULE_SMM_CORE and int(self.PiSpecVersion, 16) < 0x0001000A:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
if self.ModuleType == SUP_MODULE_MM_CORE_STANDALONE and int(self.PiSpecVersion, 16) < 0x00010032:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
@@ -374,13 +374,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
def PatchEfiFile(self, EfiFile, FileType):
#
# If the module does not have any patches, then return path to input file
#
#
if not self.PatchPcds:
return EfiFile
#
# Only patch file if FileType is PE32 or ModuleType is USER_DEFINED
#
#
if FileType != BINARY_FILE_TYPE_PE32 and self.ModuleType != SUP_MODULE_USER_DEFINED:
return EfiFile
@@ -398,7 +398,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# If a different file from the same module has already been patched, then generate an error
#
#
if self.PatchedBinFile:
EdkLogger.error("GenFds", GENFDS_ERROR,
'Only one binary file can be patched:\n'
@@ -408,12 +408,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# Copy unpatched file contents to output file location to perform patching
#
#
CopyLongFilePath(EfiFile, Output)
#
# Apply patches to patched output file
#
#
for Pcd, Value in self.PatchPcds:
RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Value, Pcd.MaxDatumSize)
if RetVal:
@@ -421,12 +421,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
# Save the path of the patched output file
#
#
self.PatchedBinFile = Output
#
# Return path to patched output file
#
#
return Output
## GenFfs() method
@@ -448,14 +448,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
Arch = self.GetCurrentArch()
SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName);
DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
SrcFileDir = "."
SrcPath = os.path.dirname(SrcFile)
SrcFileName = os.path.basename(SrcFile)
SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
DestPath = os.path.dirname(DestFile)
DestFileName = os.path.basename(DestFile)
DestFileBase, DestFileExt = os.path.splitext(DestFileName)
DestFileBase, DestFileExt = os.path.splitext(DestFileName)
self.MacroDict = {
# source file
"${src}" : SrcFile,
@@ -473,7 +473,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
}
#
# Allow binary type module not specify override rule in FDF file.
#
#
if len(self.BinFileList) > 0:
if self.Rule is None or self.Rule == "":
self.Rule = "BINARY"
@@ -534,7 +534,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
'$(NAMED_GUID)' : self.ModuleGuid
}
String = GenFdsGlobalVariable.MacroExtend(String, MacroDict)
String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
return String
## __GetRule__() method
@@ -960,14 +960,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
Sect.FvAddr = FvChildAddr
if FvParentAddr is not None and isinstance(Sect, GuidSection):
Sect.FvParentAddr = FvParentAddr
if Rule.KeyStringList != []:
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile)
else :
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile)
if not HasGeneratedFlag:
UniVfrOffsetFileSection = ""
UniVfrOffsetFileSection = ""
ModuleFileName = mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName)
InfData = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(ModuleFileName), self.CurrentArch]
#
@@ -978,16 +978,16 @@ class FfsInfStatement(FfsInfStatementClassObject):
for SourceFile in InfData.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
if SourceFile.Type.upper() == ".UNI" :
#
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.BaseName + "Strings")
if len(VfrUniBaseName) > 0:
if IsMakefile:
if InfData.BuildType != 'UEFI_HII':
@@ -1023,7 +1023,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if UniVfrOffsetFileSection:
SectList.append(UniVfrOffsetFileSection)
HasGeneratedFlag = True
for SecName in SectList :
SectFiles.append(SecName)
SectAlignments.append(Align)
@@ -1071,12 +1071,12 @@ class FfsInfStatement(FfsInfStatementClassObject):
# @param self The object pointer
# @param VfrUniBaseName A name list contain the UNI/INF object name.
# @retval RetValue A list contain offset of UNI/INF object.
#
#
def __GetBuildOutputMapFileVfrUniInfo(self, VfrUniBaseName):
MapFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".map")
EfiFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".efi")
return GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
## __GenUniVfrOffsetFile() method
#
# Generate the offset file for the module which contain VFR or UNI file.
@@ -1089,7 +1089,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# Use a instance of StringIO to cache data
fStringIO = BytesIO('')
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
#
@@ -1099,7 +1099,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
fStringIO.write(''.join(UniGuid))
fStringIO.write(''.join(UniGuid))
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
@@ -1110,11 +1110,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
fStringIO.write(''.join(VfrGuid))
type (Item[1])
fStringIO.write(''.join(VfrGuid))
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
@@ -1122,7 +1122,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue())
except:
EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close ()

View File

@@ -53,7 +53,7 @@ class FV (FvClassObject):
self.FvForceRebase = None
self.FvRegionInFD = None
self.UsedSizeEnable = False
## AddToBuffer()
#
# Generate Fv and add it to the Buffer
@@ -72,7 +72,7 @@ class FV (FvClassObject):
if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict:
return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
#
# Check whether FV in Capsule is in FD flash region.
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
@@ -92,7 +92,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
if self.FvBaseAddress is not None:
BaseAddress = self.FvBaseAddress
if not Flag:
@@ -289,7 +289,7 @@ class FV (FvClassObject):
if not self._GetBlockSize():
#set default block size is 1
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)
for BlockSize in self.BlockSizeList :
if BlockSize[0] is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
@@ -331,7 +331,7 @@ class FV (FvClassObject):
self.FvAlignment.strip() + \
" = TRUE" + \
TAB_LINE_BREAK)
#
# Generate FV extension header file
#
@@ -387,7 +387,7 @@ class FV (FvClassObject):
TotalSize += (Size + 4)
FvExtFile.seek(0)
Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
Buffer += FvExtFile.read()
Buffer += FvExtFile.read()
FvExtFile.close()
if self.FvExtEntryType[Index] == 'DATA':
ByteList = self.FvExtEntryData[Index].split(',')
@@ -418,7 +418,7 @@ class FV (FvClassObject):
FvExtHeaderFileName + \
TAB_LINE_BREAK)
#
# Add [Files]
#

View File

@@ -47,7 +47,7 @@ from struct import unpack
## Version and Copyright
versionNumber = "1.0" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + versionNumber
__copyright__ = "Copyright (c) 2007 - 2017, Intel Corporation All rights reserved."
__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
## Tool entrance method
#
@@ -72,10 +72,10 @@ def main():
if Options.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
GenFdsGlobalVariable.VerboseMode = True
if Options.FixedAddress is not None:
GenFdsGlobalVariable.FixedLoadAddress = True
if Options.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
if Options.debug is not None:
@@ -100,7 +100,7 @@ def main():
if Options.GenfdsMultiThread:
GenFdsGlobalVariable.EnableGenfdsMultiThread = True
os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(GenFdsGlobalVariable.WorkSpaceDir, PackagesPath)
@@ -228,7 +228,7 @@ def main():
GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
BuildWorkSpace = WorkspaceDatabase(GlobalData.gDatabasePath)
BuildWorkSpace.InitDatabase()
#
# Get files real name in workspace dir
#
@@ -244,7 +244,7 @@ def main():
TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, Options.BuildTarget, Options.ToolChain].SupArchList) & set(ArchList)
if len(TargetArchList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON].SupArchList)))
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, Options.BuildTarget, Options.ToolChain].OutputDirectory)
GenFdsGlobalVariable.PlatformName = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, Options.BuildTarget, Options.ToolChain].PlatformName
@@ -551,7 +551,7 @@ class GenFds :
Buffer = BytesIO('')
FvObj.AddToBuffer(Buffer)
Buffer.close()
if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
@@ -617,7 +617,7 @@ class GenFds :
# @retval None
#
def DisplayFvSpaceInfo(FdfParser):
FvSpaceInfoList = []
MaxFvNameLength = 0
for FvName in FdfParser.Profile.FvDict:
@@ -644,10 +644,10 @@ class GenFds :
if NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
FreeFound = True
Free = NameValue[1].strip()
if TotalFound and UsedFound and FreeFound:
FvSpaceInfoList.append((FvName, Total, Used, Free))
GenFdsGlobalVariable.InfLogger('\nFV Space Information')
for FvSpaceInfo in FvSpaceInfoList:
Name = FvSpaceInfo[0]
@@ -675,18 +675,18 @@ class GenFds :
if PcdObj.TokenCName == 'PcdBsBaseAddress':
PcdValue = PcdObj.DefaultValue
break
if PcdValue == '':
return
Int64PcdValue = long(PcdValue, 0)
if Int64PcdValue == 0 or Int64PcdValue < -1:
if Int64PcdValue == 0 or Int64PcdValue < -1:
return
TopAddress = 0
if Int64PcdValue > 0:
TopAddress = Int64PcdValue
ModuleDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Modules
for Key in ModuleDict:
ModuleObj = BuildDb.BuildObject[Key, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]

View File

@@ -65,7 +65,7 @@ class GenFdsGlobalVariable:
FdfFileTimeStamp = 0
FixedLoadAddress = False
PlatformName = ''
BuildRuleFamily = "MSFT"
ToolChainFamily = "MSFT"
__BuildRuleDatabase = None
@@ -75,7 +75,7 @@ class GenFdsGlobalVariable:
CopyList = []
ModuleFile = ''
EnableGenfdsMultiThread = False
#
# The list whose element are flags to indicate if large FFS or SECTION files exist in FV.
# At the beginning of each generation of FV, false flag is appended to the list,
@@ -90,7 +90,7 @@ class GenFdsGlobalVariable:
LARGE_FILE_SIZE = 0x1000000
SectionHeader = struct.Struct("3B 1B")
## LoadBuildRule
#
@staticmethod
@@ -117,7 +117,7 @@ class GenFdsGlobalVariable:
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]:
GenFdsGlobalVariable.BuildRuleFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]
if DataType.TAB_TOD_DEFINES_FAMILY in ToolDefinition \
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]:
@@ -229,11 +229,11 @@ class GenFdsGlobalVariable:
while Index < len(SourceList):
Source = SourceList[Index]
Index = Index + 1
if File.IsBinary and File == Source and Inf.Binaries is not None and File in Inf.Binaries:
# Skip all files that are not binary libraries
if not Inf.LibraryClass:
continue
continue
RuleObject = BuildRules[DataType.TAB_DEFAULT_BINARY_FILE]
elif FileType in BuildRules:
RuleObject = BuildRules[FileType]
@@ -244,15 +244,15 @@ class GenFdsGlobalVariable:
if LastTarget:
TargetList.add(str(LastTarget))
break
FileType = RuleObject.SourceFileType
# stop at STATIC_LIBRARY for library
if Inf.LibraryClass and FileType == DataType.TAB_STATIC_LIBRARY:
if LastTarget:
TargetList.add(str(LastTarget))
break
Target = RuleObject.Apply(Source)
if not Target:
if LastTarget:
@@ -261,11 +261,11 @@ class GenFdsGlobalVariable:
elif not Target.Outputs:
# Only do build for target with outputs
TargetList.add(str(Target))
# to avoid cyclic rule
if FileType in RuleChain:
break
RuleChain.append(FileType)
SourceList.extend(Target.Outputs)
LastTarget = Target
@@ -645,19 +645,19 @@ class GenFdsGlobalVariable:
@staticmethod
def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None,
Revision=None, DeviceId=None, VendorId=None, IsMakefile=False):
InputList = []
InputList = []
Cmd = ["EfiRom"]
if len(EfiInput) > 0:
if Compress:
Cmd.append("-ec")
else:
Cmd.append("-e")
for EfiFile in EfiInput:
Cmd.append(EfiFile)
InputList.append (EfiFile)
if len(BinaryInput) > 0:
Cmd.append("-b")
for BinFile in BinaryInput:
@@ -668,7 +668,7 @@ class GenFdsGlobalVariable:
if not GenFdsGlobalVariable.NeedsUpdate(Output, InputList) and not IsMakefile:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
if ClassCode is not None:
Cmd += ("-l", ClassCode)
if Revision is not None:
@@ -811,7 +811,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
PcdValue = PcdObj.DefaultValue
return PcdValue
@@ -827,7 +827,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
PcdValue = PcdObj.DefaultValue
return PcdValue

View File

@@ -76,7 +76,7 @@ class GuidSection(GuidSectionClassObject) :
FvAddrIsSet = True
else:
FvAddrIsSet = False
if self.ProcessRequired in ("TRUE", "1"):
if self.FvAddr != []:
#no use FvAddr when the image is processed.

View File

@@ -1,7 +1,7 @@
## @file
# process OptionROM generation from FILE statement
#
# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -18,7 +18,7 @@
import Common.LongFilePathOs as os
from GenFdsGlobalVariable import GenFdsGlobalVariable
##
##
#
#
class OptRomFileStatement:
@@ -40,10 +40,10 @@ class OptRomFileStatement:
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = {}, IsMakefile=False):
if self.FileName is not None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
return self.FileName

View File

@@ -1,7 +1,7 @@
## @file
# process OptionROM generation from INF statement
#
# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -26,7 +26,7 @@ from Common.StringUtils import *
from FfsInfStatement import FfsInfStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable
##
##
#
#
class OptRomInfStatement (FfsInfStatement):
@@ -45,7 +45,7 @@ class OptRomInfStatement (FfsInfStatement):
# @param self The object pointer
#
def __GetOptRomParams(self):
if self.OverrideAttribs is None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
@@ -59,21 +59,21 @@ class OptRomInfStatement (FfsInfStatement):
if self.OverrideAttribs.PciVendorId is None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
if self.OverrideAttribs.PciClassCode is None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
if self.OverrideAttribs.PciDeviceId is None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
if self.OverrideAttribs.PciRevision is None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
# Name = Record[0]
# Name = Record[0]
## GenFfs() method
#
# Generate FFS
@@ -147,8 +147,8 @@ class OptRomInfStatement (FfsInfStatement):
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
OutputFileList.extend(FileList)
OutputFileList.extend(FileList)
return OutputFileList

View File

@@ -1,7 +1,7 @@
## @file
# process OptionROM generation
#
# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -28,7 +28,7 @@ from Common.BuildToolError import *
T_CHAR_LF = '\n'
##
##
#
#
class OPTIONROM (OptionRomClassObject):
@@ -57,7 +57,7 @@ class OPTIONROM (OptionRomClassObject):
# Process Modules in FfsList
for FfsFile in self.FfsList :
if isinstance(FfsFile, OptRomInfStatement.OptRomInfStatement):
FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
if len(FilePathNameList) == 0:
@@ -70,14 +70,14 @@ class OPTIONROM (OptionRomClassObject):
if not os.path.exists(TmpOutputDir) :
os.makedirs(TmpOutputDir)
TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
FilePathNameList,
[],
FfsFile.OverrideAttribs.NeedCompress,
FfsFile.OverrideAttribs.PciClassCode,
FfsFile.OverrideAttribs.PciRevision,
FfsFile.OverrideAttribs.PciDeviceId,
GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
FilePathNameList,
[],
FfsFile.OverrideAttribs.NeedCompress,
FfsFile.OverrideAttribs.PciClassCode,
FfsFile.OverrideAttribs.PciRevision,
FfsFile.OverrideAttribs.PciDeviceId,
FfsFile.OverrideAttribs.PciVendorId,
IsMakefile = Flag)
BinFileList.append(TmpOutputFile)
@@ -89,14 +89,14 @@ class OPTIONROM (OptionRomClassObject):
if not os.path.exists(TmpOutputDir) :
os.makedirs(TmpOutputDir)
TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
[FilePathName],
[],
FfsFile.OverrideAttribs.NeedCompress,
FfsFile.OverrideAttribs.PciClassCode,
FfsFile.OverrideAttribs.PciRevision,
FfsFile.OverrideAttribs.PciDeviceId,
GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
[FilePathName],
[],
FfsFile.OverrideAttribs.NeedCompress,
FfsFile.OverrideAttribs.PciClassCode,
FfsFile.OverrideAttribs.PciRevision,
FfsFile.OverrideAttribs.PciDeviceId,
FfsFile.OverrideAttribs.PciVendorId,
IsMakefile=Flag)
BinFileList.append(TmpOutputFile)
@@ -105,13 +105,13 @@ class OPTIONROM (OptionRomClassObject):
EfiFileList.append(FilePathName)
else:
BinFileList.append(FilePathName)
#
# Call EfiRom tool
#
OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
OutputFile = OutputFile + '.rom'
GenFdsGlobalVariable.GenerateOptionRom(
OutputFile,
EfiFileList,
@@ -121,17 +121,17 @@ class OPTIONROM (OptionRomClassObject):
if not Flag:
GenFdsGlobalVariable.InfLogger( "\nGenerate %s Option ROM Successfully" %self.DriverName)
GenFdsGlobalVariable.SharpCounter = 0
return OutputFile
class OverrideAttribs:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.PciVendorId = None
self.PciClassCode = None
self.PciDeviceId = None

View File

@@ -311,7 +311,7 @@ class Region(RegionClassObject):
if self.Offset >= End:
Start = End
continue
# region located in current blocks
# region located in current blocks
else:
# region ended within current blocks
if self.Offset + self.Size <= End:
@@ -363,5 +363,5 @@ class Region(RegionClassObject):
else:
Index += 1

View File

@@ -160,7 +160,7 @@ class Section (SectionClassObject):
SuffixMap = FfsInf.GetFinalTargetSuffixMap()
if Suffix in SuffixMap:
FileList.extend(SuffixMap[Suffix])
#Process the file lists is alphabetical for a same section type
if len (FileList) > 1:
FileList.sort()

View File

@@ -1,7 +1,7 @@
## @file
# process VTF generation
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -25,7 +25,7 @@ T_CHAR_LF = '\n'
#
#
class Vtf (VtfClassObject):
## The constructor
#
# @param self The object pointer
@@ -46,7 +46,7 @@ class Vtf (VtfClassObject):
OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.Vtf')
BaseAddArg = self.GetBaseAddressArg(FdAddressDict)
OutputArg, VtfRawDict = self.GenOutputArg()
Cmd = (
'GenVtf',
) + OutputArg + (
@@ -55,9 +55,9 @@ class Vtf (VtfClassObject):
GenFdsGlobalVariable.CallExternalTool(Cmd, "GenFv -Vtf Failed!")
GenFdsGlobalVariable.SharpCounter = 0
return VtfRawDict
## GenBsfInf() method
#
# Generate inf used to generate VTF
@@ -154,7 +154,7 @@ class Vtf (VtfClassObject):
for component in self.ComponentStatementList :
if component.CompLoc.upper() != 'NONE' and not (component.CompLoc.upper() in FvList):
FvList.append(component.CompLoc.upper())
return FvList
## GetBaseAddressArg() method
@@ -173,13 +173,13 @@ class Vtf (VtfClassObject):
'-s', '0x%x' % Size,
)
return CmdStr
## GenOutputArg() method
#
# Get output arguments for GenVtf
#
# @param self The object pointer
#
#
def GenOutputArg(self):
FvVtfDict = {}
OutputFileName = ''
@@ -192,6 +192,6 @@ class Vtf (VtfClassObject):
OutputFileName = os.path.join(GenFdsGlobalVariable.FvDir, OutputFileName)
Arg += ('-o', OutputFileName)
FvVtfDict[FvObj.upper()] = OutputFileName
return Arg, FvVtfDict

View File

@@ -1,7 +1,7 @@
## @file
# Generate PCD table for 'Patchable In Module' type PCD with given .map file.
# The Patch PCD table like:
#
#
# PCD Name Offset in binary
# ======== ================
#
@@ -40,9 +40,9 @@ __copyright__ = "Copyright (c) 2008 - 2018, Intel Corporation. All rights reserv
symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.\-:\\\\\w\?@\$<>]+) +([\da-fA-F]+)', re.UNICODE)
def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
""" Parse map file to get binary patch pcd information
""" Parse map file to get binary patch pcd information
@param path Map file absolution path
@return a list which element hold (PcdName, Offset, SectionName)
"""
lines = []
@@ -52,7 +52,7 @@ def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
f.close()
except:
return None
if len(lines) == 0: return None
firstline = lines[0].strip()
if (firstline.startswith("Archive member included ") and
@@ -111,7 +111,7 @@ def _parseForGCC(lines, efifilepath):
m = pcdPatternGcc.match(lines[index + 1].strip())
if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
@@ -129,11 +129,11 @@ def _parseForGCC(lines, efifilepath):
#assert efisec[0].strip() == pcd[3].strip() and efisec[1] + redirection == pcd[2], "There are some differences between map file and efi file"
pcds.append([pcd[0], efisec[2] + pcd[1] - efisec[1] - redirection, efisec[0]])
return pcds
def _parseGeneral(lines, efifilepath):
""" For MSFT, ICC, EBC
""" For MSFT, ICC, EBC
@param lines line array for map file
@return a list which element hold (PcdName, Offset, SectionName)
"""
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
@@ -177,7 +177,7 @@ def _parseGeneral(lines, efifilepath):
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return None
pcds = []
for pcd in bPcds:
index = 0
@@ -188,7 +188,7 @@ def _parseGeneral(lines, efifilepath):
elif pcd[4] == index:
pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
return pcds
def generatePcdTable(list, pcdpath):
try:
f = open(pcdpath, 'w')
@@ -196,12 +196,12 @@ def generatePcdTable(list, pcdpath):
pass
f.write('PCD Name Offset Section Name\r\n')
for pcditem in list:
f.write('%-30s 0x%-08X %-6s\r\n' % (pcditem[0], pcditem[1], pcditem[2]))
f.close()
#print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
#print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
if __name__ == '__main__':
UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"
@@ -213,7 +213,7 @@ if __name__ == '__main__':
help='Absolute path of EFI binary file.')
parser.add_option('-o', '--outputfile', action='store', dest='outfile',
help='Absolute path of output file to store the got patchable PCD table.')
(options, args) = parser.parse_args()
if options.mapfile is None or options.efifile is None:

View File

@@ -30,14 +30,14 @@ from Common.DataType import *
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
__copyright__ = "Copyright (c) 2010, Intel Corporation. All rights reserved."
__copyright__ = "Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved."
## PatchBinaryFile method
#
# This method mainly patches the data into binary file.
#
#
# @param FileName File path of the binary file
# @param ValueOffset Offset value
# @param ValueOffset Offset value
# @param TypeName DataType Name
# @param Value Value String
# @param MaxSize MaxSize value
@@ -173,7 +173,7 @@ def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
else:
#
# Patch ascii string
# Patch ascii string
#
Index = 0
for ByteString in ValueString[1:-1]:

View File

@@ -1,11 +1,11 @@
## @file
# This tool can be used to generate new RSA 2048 bit private/public key pairs
# in a PEM file format using OpenSSL command line utilities that are installed
# This tool can be used to generate new RSA 2048 bit private/public key pairs
# in a PEM file format using OpenSSL command line utilities that are installed
# on the path specified by the system environment variable OPENSSL_PATH.
# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
# public keys to a file in a C structure format, and in verbose mode display
# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
# public keys to a file in a C structure format, and in verbose mode display
# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
# on STDOUT.
# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
#
@@ -26,7 +26,7 @@ from __future__ import print_function
import os
import sys
import argparse
import argparse
import subprocess
from Common.BuildVersion import gBUILD_VERSION
@@ -35,14 +35,14 @@ from Common.BuildVersion import gBUILD_VERSION
#
__prog__ = 'Rsa2048Sha256GenerateKeys'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
__copyright__ = 'Copyright (c) 2013 - 2014, Intel Corporation. All rights reserved.'
__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s [options]' % (__prog__)
if __name__ == '__main__':
#
# Create command line argument parser object
#
#
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-o", "--output", dest='OutputFile', type=argparse.FileType('wb'), metavar='filename', nargs='*', help="specify the output private key filename in PEM format")
@@ -55,7 +55,7 @@ if __name__ == '__main__':
#
# Parse command line arguments
#
#
args = parser.parse_args()
#
@@ -75,18 +75,18 @@ if __name__ == '__main__':
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0])
args.PemFileName = []
#
# Check for output file argument
#
@@ -106,7 +106,7 @@ if __name__ == '__main__':
if Process.returncode != 0:
print('ERROR: RSA 2048 key generation failed')
sys.exit(Process.returncode)
#
# Check for input file argument
#
@@ -158,7 +158,7 @@ if __name__ == '__main__':
for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (ord(Item))
PublicKeyHashC = PublicKeyHashC[:-2] + '}'
#
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file
#
@@ -167,7 +167,7 @@ if __name__ == '__main__':
args.PublicKeyHashCFile.close ()
except:
pass
#
# If verbose is enabled display the public key in C structure format
#

View File

@@ -21,7 +21,7 @@ from __future__ import print_function
import os
import sys
import argparse
import argparse
import subprocess
import uuid
import struct
@@ -33,7 +33,7 @@ from Common.BuildVersion import gBUILD_VERSION
#
__prog__ = 'Rsa2048Sha256Sign'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
__copyright__ = 'Copyright (c) 2013 - 2016, Intel Corporation. All rights reserved.'
__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
#
@@ -61,7 +61,7 @@ TEST_SIGNING_PRIVATE_KEY_FILENAME = 'TestSigningPrivateKey.pem'
if __name__ == '__main__':
#
# Create command line argument parser object
#
#
parser = argparse.ArgumentParser(prog=__prog__, version=__version__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
@@ -76,7 +76,7 @@ if __name__ == '__main__':
#
# Parse command line arguments
#
#
args = parser.parse_args()
#
@@ -96,19 +96,19 @@ if __name__ == '__main__':
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0])
#
# Read input file into a buffer and save input filename
#
#
args.InputFileName = args.InputFile.name
args.InputFileBuffer = args.InputFile.read()
args.InputFile.close()
@@ -174,17 +174,17 @@ if __name__ == '__main__':
if args.MonotonicCountStr:
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
#
#
# Sign the input file using the specified private key and capture signature from STDOUT
#
Process = subprocess.Popen('%s dgst -sha256 -sign "%s"' % (OpenSslCommand, args.PrivateKeyFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Signature = Process.communicate(input=FullInputFileBuffer)[0]
if Process.returncode != 0:
sys.exit(Process.returncode)
#
# Write output file that contains hash GUID, Public Key, Signature, and Input data
#
#
args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.get_bytes_le())
args.OutputFile.write(PublicKey)
@@ -198,7 +198,7 @@ if __name__ == '__main__':
#
Header = EFI_CERT_BLOCK_RSA_2048_SHA256._make(EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.unpack_from(args.InputFileBuffer))
args.InputFileBuffer = args.InputFileBuffer[EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.size:]
#
# Verify that the Hash Type matches the expected SHA256 type
#
@@ -222,10 +222,10 @@ if __name__ == '__main__':
# Write Signature to output file
#
open(args.OutputFileName, 'wb').write(Header.Signature)
#
# Verify signature
#
#
Process = subprocess.Popen('%s dgst -sha256 -prverify "%s" -signature %s' % (OpenSslCommand, args.PrivateKeyFileName, args.OutputFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.communicate(input=FullInputFileBuffer)
if Process.returncode != 0:
@@ -234,6 +234,6 @@ if __name__ == '__main__':
sys.exit(Process.returncode)
#
# Save output file contents from input file
#
# Save output file contents from input file
#
open(args.OutputFileName, 'wb').write(args.InputFileBuffer)

View File

@@ -1 +1 @@
{0x91, 0x29, 0xc4, 0xbd, 0xea, 0x6d, 0xda, 0xb3, 0xaa, 0x6f, 0x50, 0x16, 0xfc, 0xdb, 0x4b, 0x7e, 0x3c, 0xd6, 0xdc, 0xa4, 0x7a, 0x0e, 0xdd, 0xe6, 0x15, 0x8c, 0x73, 0x96, 0xa2, 0xd4, 0xa6, 0x4d}
{0x91, 0x29, 0xc4, 0xbd, 0xea, 0x6d, 0xda, 0xb3, 0xaa, 0x6f, 0x50, 0x16, 0xfc, 0xdb, 0x4b, 0x7e, 0x3c, 0xd6, 0xdc, 0xa4, 0x7a, 0x0e, 0xdd, 0xe6, 0x15, 0x8c, 0x73, 0x96, 0xa2, 0xd4, 0xa6, 0x4d}

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase a common table
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -19,7 +19,7 @@ import Common.EdkLogger as EdkLogger
## TableFile
#
# This class defined a common table
#
#
# @param object: Inherited from object class
#
# @param Cursor: Cursor of the database
@@ -30,7 +30,7 @@ class Table(object):
self.Cur = Cursor
self.Table = ''
self.ID = 0
## Create table
#
# Create a table
@@ -46,18 +46,18 @@ class Table(object):
#
def Insert(self, SqlCommand):
self.Exec(SqlCommand)
## Query table
#
# Query all records of the table
#
#
def Query(self):
EdkLogger.verbose("\nQuery tabel %s started ..." % self.Table)
SqlCommand = """select * from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Rs in self.Cur:
EdkLogger.verbose(str(Rs))
TotalCount = self.GetCount()
EdkLogger.verbose("*** Total %s records in table %s ***" % (TotalCount, self.Table) )
EdkLogger.verbose("Query tabel %s DONE!" % self.Table)
@@ -70,7 +70,7 @@ class Table(object):
SqlCommand = """drop table IF EXISTS %s""" % self.Table
self.Cur.execute(SqlCommand)
EdkLogger.verbose("Drop tabel %s ... DONE!" % self.Table)
## Get count
#
# Get a count of all records of the table
@@ -82,12 +82,12 @@ class Table(object):
self.Cur.execute(SqlCommand)
for Item in self.Cur:
return Item[0]
## Generate ID
#
# Generate an ID if input ID is -1
#
# @param ID: Input ID
# @param ID: Input ID
#
# @retval ID: New generated ID
#
@@ -96,14 +96,14 @@ class Table(object):
self.ID = self.ID + 1
return self.ID
## Init the ID of the table
#
# Init the ID of the table
#
def InitID(self):
self.ID = self.GetCount()
## Exec
#
# Exec Sql Command, return result

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for data models
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDataModel
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDataModel(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'DataModel'
## Create table
#
# Create table DataModel
@@ -62,13 +62,13 @@ class TableDataModel(Table):
(Name, Description) = ConvertToSqlString((Name, Description))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s')""" % (self.Table, self.ID, CrossIndex, Name, Description)
Table.Insert(self, SqlCommand)
return self.ID
## Init table
#
# Create all default records of table DataModel
#
#
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
for Item in DataClass.MODEL_LIST:
@@ -77,7 +77,7 @@ class TableDataModel(Table):
Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
## Get CrossIndex
#
# Get a model's cross index from its name
@@ -91,5 +91,5 @@ class TableDataModel(Table):
self.Cur.execute(SqlCommand)
for Item in self.Cur:
CrossIndex = Item[0]
return CrossIndex

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for dec datas
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDec
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDec(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dec'
## Create table
#
# Create table Dec
@@ -90,14 +90,14 @@ class TableDec(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for dsc datas
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableDsc
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableDsc(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dsc'
## Create table
#
# Create table Dsc
@@ -90,14 +90,14 @@ class TableDsc(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for ECC reports
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,7 +24,7 @@ import Eot.EotGlobalData as EotGlobalData
## TableReport
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -32,7 +32,7 @@ class TableEotReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
## Create table
#
# Create table report
@@ -68,9 +68,9 @@ class TableEotReport(Table):
% (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
Table.Insert(self, SqlCommand)
def GetMaxID(self):
SqlCommand = """select max(ID) from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Item in self.Cur:
return Item[0]
return Item[0]

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for fdf datas
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableFdf
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableFdf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Fdf'
## Create table
#
# Create table Fdf
@@ -91,14 +91,14 @@ class TableFdf(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine from %s

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for files
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -23,14 +23,14 @@ from CommonDataClass.DataClass import FileClass
## TableFile
#
# This class defined a table used for file
#
#
# @param object: Inherited from object class
#
class TableFile(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'File'
## Create table
#
# Create table File
@@ -72,15 +72,15 @@ class TableFile(Table):
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, '%s')""" \
% (self.Table, self.ID, Name, ExtName, Path, FullPath, Model, TimeStamp)
Table.Insert(self, SqlCommand)
return self.ID
## InsertFile
#
# Insert one file to table
#
# @param FileFullPath: The full path of the file
# @param Model: The model of the file
#
# @param Model: The model of the file
#
# @retval FileID: The ID after record is inserted
#
def InsertFile(self, FileFullPath, Model):
@@ -89,7 +89,7 @@ class TableFile(Table):
TimeStamp = os.stat(FileFullPath)[8]
File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)
## Get ID of a given file
#
# @param FilePath Path of file

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for functions
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,21 +21,21 @@ from Common.StringUtils import ConvertToSqlString
## TableFunction
#
# This class defined a table used for function
#
#
# @param Table: Inherited from Table class
#
class TableFunction(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Function'
## Create table
#
# Create table Function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function
@@ -72,7 +72,7 @@ class TableFunction(Table):
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for Identifiers
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,7 +21,7 @@ from Table import Table
## TableIdentifier
#
# This class defined a table used for Identifier
#
#
# @param object: Inherited from object class
#
#
@@ -29,7 +29,7 @@ class TableIdentifier(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Identifier'
## Create table
#
# Create table Identifier
@@ -87,4 +87,4 @@ class TableIdentifier(Table):
% (self.Table, self.ID, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
return self.ID
return self.ID

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for inf datas
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@ from Common.StringUtils import ConvertToSqlString
## TableInf
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -30,7 +30,7 @@ class TableInf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Inf'
## Create table
#
# Create table Inf
@@ -96,14 +96,14 @@ class TableInf(Table):
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for pcds
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,7 +21,7 @@ from Common.StringUtils import ConvertToSqlString
## TablePcd
#
# This class defined a table used for pcds
#
#
# @param object: Inherited from object class
#
#
@@ -29,7 +29,7 @@ class TablePcd(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Pcd'
## Create table
#
# Create table Pcd
@@ -87,4 +87,4 @@ class TablePcd(Table):
% (self.Table, self.ID, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
return self.ID
return self.ID

View File

@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for ECC reports
#
# Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -25,7 +25,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
## TableReport
#
# This class defined a table used for data model
#
#
# @param object: Inherited from object class
#
#
@@ -33,7 +33,7 @@ class TableReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
## Create table
#
# Create table report
@@ -78,7 +78,7 @@ class TableReport(Table):
## Query table
#
# @retval: A recordSet of all found records
# @retval: A recordSet of all found records
#
def Query(self):
SqlCommand = """select ID, ErrorID, OtherMsg, BelongsToTable, BelongsToItem, Corrected from %s

View File

@@ -86,13 +86,13 @@ class TargetTool():
if isinstance(self.TargetTxtDictionary[Key], type([])):
print("%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key])))
elif self.TargetTxtDictionary[Key] is None:
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
else:
print("%-30s = %s" % (Key, self.TargetTxtDictionary[Key]))
if errMsg != '':
print(os.linesep + 'Warning:' + os.linesep + errMsg)
def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
try:
fr = open(self.FileName, 'r')
@@ -111,7 +111,7 @@ class TargetTool():
existKeys.append(Key)
else:
print("Warning: Found duplicate key item in original configuration files!")
if Num == 0:
Line = "%-30s = \n" % Key
else:
@@ -126,12 +126,12 @@ class TargetTool():
if Line is None:
Line = "%-30s = " % key
fw.write(Line)
fr.close()
fw.close()
os.remove(self.FileName)
os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName)
except:
last_type, last_value, last_tb = sys.exc_info()
traceback.print_exception(last_type, last_value, last_tb)
@@ -143,20 +143,20 @@ def GetConfigureKeyValue(self, Key):
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
elif self.Opt.NUM >= 2:
Line = "%-30s = %s\n" % (Key, 'Enable')
elif self.Opt.NUM <= 1:
Line = "%-30s = %s\n" % (Key, 'Disable')
Line = "%-30s = %s\n" % (Key, 'Disable')
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
@@ -170,13 +170,13 @@ def GetConfigureKeyValue(self, Key):
if os.path.exists(buildruleFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Build rule file %s does not exist!" % self.Opt.BUILD_RULE_FILE, RaiseError=False)
return Line
VersionNumber = ("0.01" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + VersionNumber
__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
__usage__ = "%prog [options] {args} \
\nArgs: \
\n Clean clean the all default configuration of target.txt. \
@@ -200,7 +200,7 @@ def RangeCheckCallback(option, opt_str, value, parser):
setattr(parser.values, option.dest, value)
else:
parser.error("Option %s only allows one instance in command line!" % option)
def MyOptionParser():
parser = OptionParser(version=__version__, prog="TargetTool.exe", usage=__usage__, description=__copyright__)
parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32', 'X64', 'IPF', 'EBC', 'ARM', 'AARCH64', '0'], dest="TARGET_ARCH",
@@ -226,7 +226,7 @@ if __name__ == '__main__':
if os.getenv('WORKSPACE') is None:
print("ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool")
sys.exit(1)
(opt, args) = MyOptionParser()
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
print("The number of args isn't 1 or the value of args is invalid.")

View File

@@ -31,7 +31,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
__copyright__ = "Copyright (c) 2007-2017, Intel Corporation. All rights reserved."
__copyright__ = "Copyright (c) 2007-2018, Intel Corporation. All rights reserved."
## Regular expression for matching Line Control directive like "#line xxx"
gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
@@ -261,7 +261,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
#
def TrimPreprocessedVfr(Source, Target):
CreateDirectory(os.path.dirname(Target))
try:
f = open (Source, 'r')
except:
@@ -338,7 +338,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
SearchPathList = [LocalSearchPath] + IncludePathList
else:
SearchPathList = IncludePathList
for IncludePath in SearchPathList:
IncludeFile = os.path.join(IncludePath, Source)
if os.path.isfile(IncludeFile):
@@ -349,7 +349,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# avoid A "include" B and B "include" A
IncludeFile = os.path.abspath(os.path.normpath(IncludeFile))
if IncludeFile in gIncludedAslFile:
@@ -357,7 +357,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile))
return []
gIncludedAslFile.append(IncludeFile)
for Line in F:
LocalSearchPath = None
Result = gAslIncludePattern.findall(Line)
@@ -367,7 +367,7 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
NewFileContent.append("%s%s" % (Indent, Line))
continue
#
# We should first search the local directory if current file are using pattern #include "XXX"
# We should first search the local directory if current file are using pattern #include "XXX"
#
if Result[0][2] == '"':
LocalSearchPath = os.path.dirname(IncludeFile)
@@ -388,20 +388,20 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None):
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
# @param IncludePathFile The file to log the external include path
# @param IncludePathFile The file to log the external include path
#
def TrimAslFile(Source, Target, IncludePathFile):
CreateDirectory(os.path.dirname(Target))
SourceDir = os.path.dirname(Source)
if SourceDir == '':
SourceDir = '.'
#
# Add source directory as the first search directory
#
IncludePathList = [SourceDir]
#
# If additional include path file is specified, append them all
# to the search directory list.
@@ -672,7 +672,7 @@ def Main():
EdkLogger.SetLevel(CommandOptions.LogLevel)
except FatalError as X:
return 1
try:
if CommandOptions.FileType == "Vfr":
if CommandOptions.OutputFile is None:

View File

@@ -2,11 +2,11 @@
#
# This file is for build version number auto generation
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
Build version information
'''
gBUILD_VERSION = ""
gBUILD_VERSION = "Developer Build based on Revision: Unknown"

View File

@@ -1,11 +1,11 @@
## @file
# This file is for installed package information database operations
#
# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
#
@@ -40,7 +40,7 @@ DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3)
## DependencyRules
#
# This class represents the dependency rule check mechanism
#
#
# @param object: Inherited from object class
#
class DependencyRules(object):
@@ -53,7 +53,7 @@ class DependencyRules(object):
# Add package info from the DIST to be installed.
self.PkgsToBeDepend.extend(self.GenToBeInstalledPkgList(ToBeInstalledPkgList))
def GenToBeInstalledPkgList(self, ToBeInstalledPkgList):
if not ToBeInstalledPkgList:
return []
@@ -81,7 +81,7 @@ class DependencyRules(object):
return True
else:
return False
## Check whether a module depex satisfied.
#
# @param ModuleObj: A module object
@@ -101,7 +101,7 @@ class DependencyRules(object):
#
Exist = self.CheckPackageExists(Dep.GetGuid(), Dep.GetVersion())
#
# check whether satisfied by current distribution
# check whether satisfied by current distribution
#
if not Exist:
if DpObj is None:
@@ -119,7 +119,7 @@ class DependencyRules(object):
else:
Result = False
break
if not Result:
Logger.Error("CheckModuleDepex", UNKNOWN_ERROR, \
ST.ERR_DEPENDENCY_NOT_MATCH % (ModuleObj.GetName(), \
@@ -127,7 +127,7 @@ class DependencyRules(object):
Dep.GetGuid(), \
Dep.GetVersion()))
return Result
## Check whether a package exists in a package list specified by PkgsToBeDepend.
#
# @param Guid: Guid of a package
@@ -154,7 +154,7 @@ class DependencyRules(object):
Logger.Verbose(ST.MSG_CHECK_PACKAGE_FINISH)
return Found
## Check whether a package depex satisfied.
#
# @param PkgObj: A package object
@@ -171,7 +171,7 @@ class DependencyRules(object):
else:
return False
return True
## Check whether a DP exists.
#
# @param Guid: Guid of a Distribution
@@ -216,7 +216,7 @@ class DependencyRules(object):
return True, DpObj
## Check whether a DP depex satisfied by current workspace
## Check whether a DP depex satisfied by current workspace
# (excluding the original distribution's packages to be replaced) for Replace
#
# @param DpObj: A distribution object
@@ -243,17 +243,17 @@ class DependencyRules(object):
continue
else:
return False
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModObj = DpObj.ModuleSurfaceArea[ModKey]
if self.CheckModuleDepexSatisfied(ModObj, DpObj):
continue
else:
return False
return True
## Check whether a DP could be removed from current workspace.
## Check whether a DP could be removed from current workspace.
#
# @param DpGuid: File's guid
# @param DpVersion: File's version
@@ -267,7 +267,7 @@ class DependencyRules(object):
#
# remove modules that included in current DP
# List of item (FilePath)
DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
@@ -277,7 +277,7 @@ class DependencyRules(object):
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
@@ -290,18 +290,18 @@ class DependencyRules(object):
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
#
# the left items in DpPackageList are the packages that installed but not found anymore
#
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
#
# check modules to see if has dependency on package of current DP
#
@@ -320,7 +320,7 @@ class DependencyRules(object):
# @param NewDpPkgList: a list of package information (Guid, Version) in new Dp
# @retval Replaceable: True if distribution could be replaced, False Else
# @retval DependModuleList: the list of modules that make distribution can not be replaced
#
#
def CheckDpDepexForReplace(self, OrigDpGuid, OrigDpVersion, NewDpPkgList):
Replaceable = True
DependModuleList = []
@@ -328,19 +328,19 @@ class DependencyRules(object):
#
# remove modules that included in current DP
# List of item (FilePath)
DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
else:
Logger.Warn("UPT\n",
ST.ERR_MODULE_NOT_INSTALLED % Module)
OtherPkgList = NewDpPkgList
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
@@ -353,7 +353,7 @@ class DependencyRules(object):
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
@@ -366,7 +366,7 @@ class DependencyRules(object):
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
#
# check modules to see if it can be satisfied by package not belong to removed DP
#
@@ -376,8 +376,8 @@ class DependencyRules(object):
DependModuleList.append(Module)
return (Replaceable, DependModuleList)
## check whether module depends on packages in DpPackagePathList, return True
## check whether module depends on packages in DpPackagePathList, return True
# if found, False else
#
# @param Path: a module path
@@ -432,7 +432,7 @@ def GetPackagePath(InfPath):
# @param DpPackagePathList: a list of Package Paths
# @param OtherPkgList: a list of Package Information (Guid, Version)
# @retval: False: module depends on package in DpPackagePathList and can not be satisfied by OtherPkgList
# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
# but can be satisfied by OtherPkgList
#
def VerifyReplaceModuleDep(Path, DpPackagePathList, OtherPkgList):

View File

@@ -1,11 +1,11 @@
## @file
# This file is used to define a class object to describe a distribution package
#
# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -50,62 +50,62 @@ class DistributionPackageHeaderObject(IdentificationObject, \
self.Date = ''
self.Signature = 'Md5Sum'
self.XmlSpecification = ''
def GetReadOnly(self):
return self.ReadOnly
def SetReadOnly(self, ReadOnly):
self.ReadOnly = ReadOnly
def GetRePackage(self):
return self.RePackage
def SetRePackage(self, RePackage):
self.RePackage = RePackage
def GetVendor(self):
return self.Vendor
def SetDate(self, Date):
self.Date = Date
def GetDate(self):
return self.Date
def SetSignature(self, Signature):
self.Signature = Signature
def GetSignature(self):
return self.Signature
def SetXmlSpecification(self, XmlSpecification):
self.XmlSpecification = XmlSpecification
def GetXmlSpecification(self):
return self.XmlSpecification
## DistributionPackageClass
#
# @param object: DistributionPackageClass
#
#
class DistributionPackageClass(object):
def __init__(self):
self.Header = DistributionPackageHeaderObject()
#
# {(Guid, Version, Path) : PackageObj}
#
self.PackageSurfaceArea = Sdict()
self.PackageSurfaceArea = Sdict()
#
# {(Guid, Version, Name, Path) : ModuleObj}
#
self.ModuleSurfaceArea = Sdict()
self.ModuleSurfaceArea = Sdict()
self.Tools = MiscFileObject()
self.MiscellaneousFiles = MiscFileObject()
self.UserExtensions = []
self.FileList = []
## Get all included packages and modules for a distribution package
#
#
# @param WorkspaceDir: WorkspaceDir
# @param PackageList: A list of all packages
# @param ModuleList: A list of all modules
@@ -132,7 +132,7 @@ class DistributionPackageClass(object):
WsRelPath = os.path.normpath(WsRelPath)
if ModuleList and WsRelPath in ModuleList:
Logger.Error("UPT",
OPTION_VALUE_INVALID,
OPTION_VALUE_INVALID,
ST.ERR_NOT_STANDALONE_MODULE_ERROR%\
(WsRelPath, PackageFile))
Filename = os.path.normpath\
@@ -140,12 +140,12 @@ class DistributionPackageClass(object):
os.path.splitext(Filename)
#
# Call INF parser to generate Inf Object.
# Actually, this call is not directly call, but wrapped by
# Actually, this call is not directly call, but wrapped by
# Inf class in InfPomAlignment.
#
try:
ModuleObj = InfPomAlignment(Filename, WorkspaceDir, PackageObj.GetPackagePath())
#
# Add module to package
#
@@ -161,7 +161,7 @@ class DistributionPackageClass(object):
ST.WRN_EDK1_INF_FOUND%Filename)
else:
raise
self.PackageSurfaceArea\
[(PackageObj.GetGuid(), PackageObj.GetVersion(), \
PackageObj.GetCombinePath())] = PackageObj
@@ -176,16 +176,16 @@ class DistributionPackageClass(object):
try:
ModuleObj = InfPomAlignment(ModuleFileFullPath, WorkspaceDir)
ModuleKey = (ModuleObj.GetGuid(),
ModuleObj.GetVersion(),
ModuleObj.GetName(),
ModuleKey = (ModuleObj.GetGuid(),
ModuleObj.GetVersion(),
ModuleObj.GetName(),
ModuleObj.GetCombinePath())
self.ModuleSurfaceArea[ModuleKey] = ModuleObj
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Error("UPT",
EDK1_INF_ERROR,
ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
ExtraData=ST.ERR_NOT_SUPPORTED_SA_MODULE)
else:
raise
@@ -193,16 +193,16 @@ class DistributionPackageClass(object):
# Recover WorkspaceDir
WorkspaceDir = Root
## Get all files included for a distribution package, except tool/misc of
## Get all files included for a distribution package, except tool/misc of
# distribution level
#
#
# @retval DistFileList A list of filepath for NonMetaDataFile, relative to workspace
# @retval MetaDataFileList A list of filepath for MetaDataFile, relative to workspace
#
def GetDistributionFileList(self):
MetaDataFileList = []
SkipModulesUniList = []
for Guid, Version, Path in self.PackageSurfaceArea:
Package = self.PackageSurfaceArea[Guid, Version, Path]
PackagePath = Package.GetPackagePath()
@@ -221,7 +221,7 @@ class DistributionPackageClass(object):
MiscFileFullPath = os.path.normpath(os.path.join(PackagePath, FileObj.GetURI()))
if MiscFileFullPath not in self.FileList:
self.FileList.append(MiscFileFullPath)
Module = None
ModuleDict = Package.GetModuleDict()
for Guid, Version, Name, Path in ModuleDict:
@@ -262,12 +262,12 @@ class DistributionPackageClass(object):
for NonMetaDataFile in NonMetaDataFileList:
if NonMetaDataFile not in self.FileList:
self.FileList.append(NonMetaDataFile)
for SkipModuleUni in SkipModulesUniList:
if SkipModuleUni in self.FileList:
self.FileList.remove(SkipModuleUni)
return self.FileList, MetaDataFileList

View File

@@ -1,11 +1,11 @@
## @file
# This file hooks file and directory creation and removal
#
# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,

View File

@@ -1,11 +1,11 @@
## @file
# This file is for installed package information database operations
#
# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -33,8 +33,8 @@ import platform as pf
#
# This class represents the installed package information database
# Add/Remove/Get installed distribution package information here.
#
#
#
#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the database
#
@@ -71,16 +71,16 @@ class IpiDatabase(object):
SqlCommand = """
create table %s (
Dummy TEXT NOT NULL,
PRIMARY KEY (Dummy)
PRIMARY KEY (Dummy)
)""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
except sqlite3.OperationalError:
Logger.Error("UPT",
UPT_ALREADY_RUNNING_ERROR,
Logger.Error("UPT",
UPT_ALREADY_RUNNING_ERROR,
ST.ERR_UPT_ALREADY_RUNNING_ERROR
)
#
# Create new table
#
@@ -89,12 +89,12 @@ class IpiDatabase(object):
DpGuid TEXT NOT NULL,DpVersion TEXT NOT NULL,
InstallTime REAL NOT NULL,
NewPkgFileName TEXT NOT NULL,
PkgFileName TEXT NOT NULL,
PkgFileName TEXT NOT NULL,
RePackage TEXT NOT NULL,
PRIMARY KEY (DpGuid, DpVersion)
PRIMARY KEY (DpGuid, DpVersion)
)""" % self.DpTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
FilePath TEXT NOT NULL,
@@ -104,7 +104,7 @@ class IpiDatabase(object):
PRIMARY KEY (FilePath)
)""" % self.DpFileListTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
PackageGuid TEXT NOT NULL,
@@ -116,7 +116,7 @@ class IpiDatabase(object):
PRIMARY KEY (PackageGuid, PackageVersion, InstallPath)
)""" % self.PkgTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -129,7 +129,7 @@ class IpiDatabase(object):
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.ModInPkgTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -142,7 +142,7 @@ class IpiDatabase(object):
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.StandaloneModTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
@@ -153,9 +153,9 @@ class IpiDatabase(object):
DepexVersion TEXT
)""" % self.ModDepexTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
Logger.Verbose(ST.MSG_INIT_IPI_FINISH)
def RollBack(self):
@@ -199,7 +199,7 @@ class IpiDatabase(object):
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), FilePath, \
Md5Sum)
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModGuid = ModKey[0]
ModVersion = ModKey[1]
@@ -219,17 +219,17 @@ class IpiDatabase(object):
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), \
Path, Md5Sum)
#
# add tool/misc files
#
for (Path, Md5Sum) in DpObj.FileList:
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), Path, Md5Sum)
self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \
NewDpPkgFileName, DpPkgFileName, RePackage)
except sqlite3.IntegrityError as DetailMsg:
Logger.Error("UPT",
UPT_DB_UPDATE_ERROR,
@@ -239,17 +239,17 @@ class IpiDatabase(object):
## Add a distribution install information
#
# @param Guid Guid of the distribution package
# @param Version Version of the distribution package
# @param Guid Guid of the distribution package
# @param Version Version of the distribution package
# @param NewDpFileName the saved filename of distribution package file
# @param DistributionFileName the filename of distribution package file
#
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
RePackage):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
#
# Add newly installed DP information to DB.
#
@@ -264,10 +264,10 @@ class IpiDatabase(object):
DistributionFileName, str(RePackage).upper())
self.Cur.execute(SqlCommand)
## Add a file list from DP
#
# @param DpGuid: A DpGuid
# @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
# @param Path: A Md5Sum
@@ -284,26 +284,26 @@ class IpiDatabase(object):
(self.DpFileListTable, Path, DpGuid, DpVersion, Md5Sum)
self.Cur.execute(SqlCommand)
## Add a package install information
#
# @param Guid: A package guid
# @param Guid: A package guid
# @param Version: A package version
# @param DpGuid: A DpGuid
# @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
#
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
# Add newly installed package information to DB.
#
@@ -312,10 +312,10 @@ class IpiDatabase(object):
"""insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.PkgTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
self.Cur.execute(SqlCommand)
## Add a module that from a package install information
#
# @param Guid: Module Guid
# @param Guid: Module Guid
# @param Version: Module version
# @param Name: Module Name
# @param PkgGuid: Package Guid
@@ -324,21 +324,21 @@ class IpiDatabase(object):
#
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
PkgVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if PkgGuid is None or len(PkgGuid.strip()) == 0:
PkgGuid = 'N/A'
if PkgVersion is None or len(PkgVersion.strip()) == 0:
PkgVersion = 'N/A'
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
#
# Add module from package information to DB.
#
@@ -348,7 +348,7 @@ class IpiDatabase(object):
(self.ModInPkgTable, Guid, Version, Name, CurrentTime, PkgGuid, PkgVersion, \
Path)
self.Cur.execute(SqlCommand)
## Add a module that is standalone install information
#
# @param Guid: a module Guid
@@ -360,16 +360,16 @@ class IpiDatabase(object):
#
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
DpVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
# Add module standalone information to DB.
#
@@ -379,7 +379,7 @@ class IpiDatabase(object):
(self.StandaloneModTable, Guid, Version, Name, CurrentTime, DpGuid, \
DpVersion, Path)
self.Cur.execute(SqlCommand)
## Add a module depex
#
# @param Guid: a module Guid
@@ -390,49 +390,49 @@ class IpiDatabase(object):
#
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
DepexVersion=None):
if DepexGuid is None or len(DepexGuid.strip()) == 0:
DepexGuid = 'N/A'
if DepexVersion is None or len(DepexVersion.strip()) == 0:
DepexVersion = 'N/A'
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
#
# Add module depex information to DB.
#
SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s', '%s')"""\
% (self.ModDepexTable, Guid, Version, Name, Path, DepexGuid, DepexVersion)
self.Cur.execute(SqlCommand)
## Remove a distribution install information, if no version specified,
## Remove a distribution install information, if no version specified,
# remove all DPs with this Guid.
#
# @param DpGuid: guid of dpex
# @param DpGuid: guid of dpex
# @param DpVersion: version of dpex
#
def RemoveDpObj(self, DpGuid, DpVersion):
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
#
# delete from ModDepex the standalone module's dependency
#
SqlCommand = \
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
and B.DpVersion = '%s')
and ModDepexInfo.ModuleVersion in
(select ModuleVersion from StandaloneModInfo as B
(select ModuleVersion from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.ModuleName in
(select ModuleName from StandaloneModInfo as B
(select ModuleName from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.InstallPath in
(select InstallPath from StandaloneModInfo as B
(select InstallPath from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s') """ % \
(DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)
@@ -443,24 +443,24 @@ class IpiDatabase(object):
for Pkg in PkgList:
SqlCommand = \
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleVersion in
(select ModuleVersion from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
(select ModuleVersion from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleName in
(select ModuleName from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
(select ModuleName from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.InstallPath in
(select InstallPath from ModInPkgInfo where
ModInPkgInfo.PackageGuid ='%s'
(select InstallPath from ModInPkgInfo where
ModInPkgInfo.PackageGuid ='%s'
and ModInPkgInfo.PackageVersion = '%s')""" \
% (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1])
self.Cur.execute(SqlCommand)
#
# delete the standalone module
@@ -474,7 +474,7 @@ class IpiDatabase(object):
#
for Pkg in PkgList:
SqlCommand = \
"""delete from %s where %s.PackageGuid ='%s'
"""delete from %s where %s.PackageGuid ='%s'
and %s.PackageVersion = '%s'""" % \
(self.ModInPkgTable, self.ModInPkgTable, Pkg[0], \
self.ModInPkgTable, Pkg[1])
@@ -493,23 +493,23 @@ class IpiDatabase(object):
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#
#
# delete DP
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#self.Conn.commit()
## Get a list of distribution install information.
#
# @param Guid: distribution package guid
# @param Version: distribution package version
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDp(self, Guid, Version):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
@@ -517,7 +517,7 @@ class IpiDatabase(object):
SqlCommand = """select * from %s where DpGuid ='%s'""" % \
(self.DpTable, DpGuid)
self.Cur.execute(SqlCommand)
else:
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_START)
(DpGuid, DpVersion) = (Guid, Version)
@@ -533,14 +533,14 @@ class IpiDatabase(object):
InstallTime = DpInfo[2]
PkgFileName = DpInfo[3]
DpList.append((DpGuid, DpVersion, InstallTime, PkgFileName))
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
return DpList
## Get a list of distribution install dirs
#
# @param Guid: distribution package guid
# @param Version: distribution package version
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDpInstallDirList(self, Guid, Version):
SqlCommand = """select InstallPath from PkgInfo where DpGuid = '%s' and DpVersion = '%s'""" % (Guid, Version)
@@ -562,11 +562,11 @@ class IpiDatabase(object):
## Get a list of distribution install file path information.
#
# @param Guid: distribution package guid
# @param Version: distribution package version
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDpFileList(self, Guid, Version):
(DpGuid, DpVersion) = (Guid, Version)
SqlCommand = \
"""select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
@@ -578,7 +578,7 @@ class IpiDatabase(object):
Path = Result[0]
Md5Sum = Result[3]
PathList.append((os.path.join(self.Workspace, Path), Md5Sum))
return PathList
## Get files' repackage attribute if present that are installed into current workspace
@@ -588,12 +588,12 @@ class IpiDatabase(object):
def GetRePkgDict(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
DpInfoList = []
for Result in self.Cur:
DpInfoList.append(Result)
FileDict = {}
FileDict = {}
for Result in DpInfoList:
DpGuid = Result[0]
DpVersion = Result[1]
@@ -606,12 +606,12 @@ class IpiDatabase(object):
for FileInfo in self.GetDpFileList(DpGuid, DpVersion):
PathInfo = FileInfo[0]
FileDict[PathInfo] = DpGuid, DpVersion, NewDpFileName, RePackage
return FileDict
## Get (Guid, Version) from distribution file name information.
#
# @param DistributionFile: Distribution File
# @param DistributionFile: Distribution File
#
def GetDpByName(self, DistributionFile):
SqlCommand = """select * from %s where NewPkgFileName = '%s'""" % \
@@ -622,34 +622,34 @@ class IpiDatabase(object):
DpGuid = Result[0]
DpVersion = Result[1]
NewDpFileName = Result[3]
return (DpGuid, DpVersion, NewDpFileName)
else:
return (None, None, None)
## Get a list of package information.
#
# @param Guid: package guid
# @param Guid: package guid
# @param Version: package version
#
def GetPackage(self, Guid, Version, DpGuid='', DpVersion=''):
if DpVersion == '' or DpGuid == '':
(PackageGuid, PackageVersion) = (Guid, Version)
SqlCommand = """select * from %s where PackageGuid ='%s'
SqlCommand = """select * from %s where PackageGuid ='%s'
and PackageVersion = '%s'""" % (self.PkgTable, PackageGuid, \
PackageVersion)
self.Cur.execute(SqlCommand)
elif Version is None or len(Version.strip()) == 0:
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
(self.PkgTable, Guid)
self.Cur.execute(SqlCommand)
else:
(PackageGuid, PackageVersion) = (Guid, Version)
SqlCommand = """select * from %s where PackageGuid ='%s' and
SqlCommand = """select * from %s where PackageGuid ='%s' and
PackageVersion = '%s'
and DpGuid = '%s' and DpVersion = '%s'""" % \
(self.PkgTable, PackageGuid, PackageVersion, \
@@ -664,10 +664,10 @@ class IpiDatabase(object):
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
return PkgList
## Get a list of module in package information.
#
# @param Guid: A module guid
@@ -676,15 +676,15 @@ class IpiDatabase(object):
def GetModInPackage(self, Guid, Version, Name, Path, PkgGuid='', PkgVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if PkgVersion == '' or PkgGuid == '':
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
else:
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s' and PackageGuid ='%s'
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s' and PackageGuid ='%s'
and PackageVersion = '%s'
""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName, PkgGuid, PkgVersion)
@@ -698,26 +698,26 @@ class IpiDatabase(object):
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, PkgGuid, \
PkgVersion, InstallPath))
return ModList
## Get a list of module standalone.
#
# @param Guid: A module guid
# @param Version: A module version
# @param Guid: A module guid
# @param Version: A module version
#
def GetStandaloneModule(self, Guid, Version, Name, Path, DpGuid='', DpVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if DpGuid == '':
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
else:
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, ModuleName, InstallPath, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -730,18 +730,18 @@ class IpiDatabase(object):
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
return ModList
## Get a list of module information that comes from DP.
#
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
#
def GetSModInsPathListFromDp(self, DpGuid, DpVersion):
PathList = []
SqlCommand = """select InstallPath from %s where DpGuid ='%s'
SqlCommand = """select InstallPath from %s where DpGuid ='%s'
and DpVersion = '%s'
""" % (self.StandaloneModTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -749,17 +749,17 @@ class IpiDatabase(object):
for Result in self.Cur:
InstallPath = Result[0]
PathList.append(InstallPath)
return PathList
## Get a list of package information.
#
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
#
def GetPackageListFromDp(self, DpGuid, DpVersion):
SqlCommand = """select * from %s where DpGuid ='%s' and
SqlCommand = """select * from %s where DpGuid ='%s' and
DpVersion = '%s' """ % (self.PkgTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
@@ -769,31 +769,31 @@ class IpiDatabase(object):
PkgVersion = PkgInfo[1]
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallPath))
return PkgList
## Get a list of modules that depends on package information from a DP.
#
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
#
def GetDpDependentModuleList(self, DpGuid, DpVersion):
ModList = []
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
if len(PkgList) > 0:
return ModList
for Pkg in PkgList:
#
# get all in-package modules that depends on current
# Pkg (Guid match, Version match or NA) but not belong to
# get all in-package modules that depends on current
# Pkg (Guid match, Version match or NA) but not belong to
# current Pkg
#
SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
t1.InstallPath from %s as t1, %s as t2 where
t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
t1.InstallPath from %s as t1, %s as t2 where
t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.PackageGuid != '%s' and t1.PackageVersion != '%s'
""" % (self.ModInPkgTable, \
@@ -807,13 +807,13 @@ class IpiDatabase(object):
ModList.append((ModGuid, ModVersion, InstallPath))
#
# get all modules from standalone modules that depends on current
# get all modules from standalone modules that depends on current
#Pkg (Guid match, Version match or NA) but not in current dp
#
SqlCommand = \
"""select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
"""select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.DpGuid != '%s' and t1.DpVersion != '%s'
""" % \
@@ -825,75 +825,75 @@ class IpiDatabase(object):
ModVersion = ModInfo[1]
InstallPath = ModInfo[2]
ModList.append((ModGuid, ModVersion, InstallPath))
return ModList
## Get Dp's list of modules.
#
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
# @param DpGuid: A Distrabution Guid
# @param DpVersion: A Distrabution version
#
def GetDpModuleList(self, DpGuid, DpVersion):
def GetDpModuleList(self, DpGuid, DpVersion):
ModList = []
#
# get Dp module list from the DpFileList table
#
SqlCommand = """select FilePath
SqlCommand = """select FilePath
from %s
where DpGuid = '%s' and DpVersion = '%s' and
where DpGuid = '%s' and DpVersion = '%s' and
FilePath like '%%.inf'
""" % (self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
for ModuleInfo in self.Cur:
FilePath = ModuleInfo[0]
ModList.append(os.path.join(self.Workspace, FilePath))
return ModList
return ModList
## Get a module depex
#
# @param DpGuid: A module Guid
# @param DpVersion: A module version
# @param DpGuid: A module Guid
# @param DpVersion: A module version
# @param Path:
#
def GetModuleDepex(self, Guid, Version, Path):
#
# Get module depex information to DB.
#
SqlCommand = """select * from %s where ModuleGuid ='%s' and
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath ='%s'
""" % (self.ModDepexTable, Guid, Version, Path)
self.Cur.execute(SqlCommand)
DepexList = []
for DepInfo in self.Cur:
DepexGuid = DepInfo[3]
DepexVersion = DepInfo[4]
DepexList.append((DepexGuid, DepexVersion))
return DepexList
## Inventory the distribution installed to current workspace
#
# Inventory the distribution installed to current workspace
#
#
def InventoryDistInstalled(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
DpInfoList = []
for Result in self.Cur:
DpGuid = Result[0]
DpVersion = Result[1]
DpAliasName = Result[3]
DpFileName = Result[4]
DpFileName = Result[4]
DpInfoList.append((DpGuid, DpVersion, DpFileName, DpAliasName))
return DpInfoList
return DpInfoList
## Close entire database
#
@@ -904,18 +904,18 @@ class IpiDatabase(object):
# drop the dummy table
#
SqlCommand = """
drop table IF EXISTS %s
drop table IF EXISTS %s
""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
self.Cur.close()
self.Conn.close()
## Convert To Sql String
#
# 1. Replace "'" with "''" in each item of StringList
#
#
# @param StringList: A list for strings to be converted
#
def __ConvertToSqlString(self, StringList):
@@ -925,4 +925,4 @@ class IpiDatabase(object):

View File

@@ -2,11 +2,11 @@
#
# PackageFile class represents the zip file of a distribution package.
#
# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -52,25 +52,25 @@ class PackageFile:
for Filename in self._ZipFile.namelist():
self._Files[os.path.normpath(Filename)] = Filename
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (FileName, str(Xstr)))
BadFile = self._ZipFile.testzip()
if BadFile is not None:
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
ExtraData="[%s] in %s" % (BadFile, FileName))
def GetZipFile(self):
return self._ZipFile
## Get file name
## Get file name
#
def __str__(self):
return self._FileName
## Extract the file
#
# @param To: the destination file
#
# @param To: the destination file
#
def Unpack(self, ToDest):
for FileN in self._ZipFile.namelist():
@@ -78,11 +78,11 @@ class PackageFile:
Msg = "%s -> %s" % (FileN, ToFile)
Logger.Info(Msg)
self.Extract(FileN, ToFile)
## Extract the file
#
# @param File: the extracted file
# @param ToFile: the destination file
#
# @param File: the extracted file
# @param ToFile: the destination file
#
def UnpackFile(self, File, ToFile):
File = File.replace('\\', '/')
@@ -91,13 +91,13 @@ class PackageFile:
Logger.Info(Msg)
self.Extract(File, ToFile)
return ToFile
return ''
## Extract the file
#
# @param Which: the source path
# @param ToDest: the destination path
#
# @param Which: the source path
# @param ToDest: the destination path
#
def Extract(self, Which, ToDest):
Which = os.path.normpath(Which)
@@ -107,7 +107,7 @@ class PackageFile:
try:
FileContent = self._ZipFile.read(self._Files[Which])
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
ExtraData="[%s] in %s (%s)" % (Which, \
self._FileName, \
str(Xstr)))
@@ -120,19 +120,19 @@ class PackageFile:
else:
ToFile = __FileHookOpen__(ToDest, 'wb')
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
try:
ToFile.write(FileContent)
ToFile.close()
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
## Remove the file
#
# @param Files: the removed files
#
# @param Files: the removed files
#
def Remove(self, Files):
TmpDir = os.path.join(tempfile.gettempdir(), ".packaging")
@@ -144,7 +144,7 @@ class PackageFile:
for SinF in Files:
SinF = os.path.normpath(SinF)
if SinF not in self._Files:
Logger.Error("PackagingTool", FILE_NOT_FOUND,
Logger.Error("PackagingTool", FILE_NOT_FOUND,
ExtraData="%s is not in %s!" % \
(SinF, self._FileName))
self._Files.pop(SinF)
@@ -159,12 +159,12 @@ class PackageFile:
RemoveDirectory(TmpDir, True)
## Pack the files under Top directory, the directory shown in the zipFile start from BaseDir,
# BaseDir should be the parent directory of the Top directory, for example,
# Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
# BaseDir should be the parent directory of the Top directory, for example,
# Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
# start from Workspace
#
# @param Top: the top directory
# @param BaseDir: the base directory
#
# @param Top: the top directory
# @param BaseDir: the base directory
#
def Pack(self, Top, BaseDir):
if not os.path.isdir(Top):
@@ -175,14 +175,14 @@ class PackageFile:
Cwd = os.getcwd()
os.chdir(BaseDir)
RelaDir = Top[Top.upper().find(BaseDir.upper()).\
join(len(BaseDir).join(1)):]
join(len(BaseDir).join(1)):]
for Root, Dirs, Files in os.walk(RelaDir):
if 'CVS' in Dirs:
Dirs.remove('CVS')
if '.svn' in Dirs:
Dirs.remove('.svn')
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
@@ -200,8 +200,8 @@ class PackageFile:
os.chdir(Cwd)
## Pack the file
#
# @param Files: the files to pack
#
# @param Files: the files to pack
#
def PackFiles(self, Files):
for File in Files:
@@ -211,9 +211,9 @@ class PackageFile:
os.chdir(Cwd)
## Pack the file
#
# @param File: the files to pack
# @param ArcName: the Arc Name
#
# @param File: the files to pack
# @param ArcName: the Arc Name
#
def PackFile(self, File, ArcName=None):
try:
@@ -221,7 +221,7 @@ class PackageFile:
# avoid packing same file multiple times
#
if platform.system() != 'Windows':
File = File.replace('\\', '/')
File = File.replace('\\', '/')
ZipedFilesNameList = self._ZipFile.namelist()
for ZipedFile in ZipedFilesNameList:
if File == os.path.normpath(ZipedFile):
@@ -233,9 +233,9 @@ class PackageFile:
ExtraData="%s (%s)" % (File, str(Xstr)))
## Write data to the packed file
#
# @param Data: data to write
# @param ArcName: the Arc Name
#
# @param Data: data to write
# @param ArcName: the Arc Name
#
def PackData(self, Data, ArcName):
try:
@@ -247,7 +247,7 @@ class PackageFile:
ExtraData="%s (%s)" % (ArcName, str(Xstr)))
## Close file
#
#
#
def Close(self):
self._ZipFile.close()

View File

@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
Core init file
'''
'''

View File

@@ -4,9 +4,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -70,7 +70,7 @@ from Library.StringUtils import GetUniFileName
def GenPcd(Package, Content):
#
# generate [Pcd] section
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
#
ValidUsageDict = {}
for Pcd in Package.GetPcdList():
@@ -80,16 +80,16 @@ def GenPcd(Package, Content):
HelpTextList = Pcd.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
PromptList = Pcd.GetPromptList()
PromptStr = _GetHelpStr(PromptList)
CommentStr += GenGenericCommentF(PromptStr.strip(), 1, True)
PcdErrList = Pcd.GetPcdErrorsList()
for PcdErr in PcdErrList:
CommentStr += GenPcdErrComment(PcdErr)
Statement = CommentStr
CName = Pcd.GetCName()
TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
DefaultValue = Pcd.GetDefaultValue()
@@ -107,7 +107,7 @@ def GenPcd(Package, Content):
ValidUsage = 'PcdsDynamic'
elif ValidUsage == 'PcdEx':
ValidUsage = 'PcdsDynamicEx'
if ValidUsage in ValidUsageDict:
NewSectionDict = ValidUsageDict[ValidUsage]
else:
@@ -129,17 +129,17 @@ def GenPcd(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
NewSectionDict[SortedArch] = [Statement]
for ValidUsage in ValidUsageDict:
Content += GenSection(ValidUsage, ValidUsageDict[ValidUsage], True, True)
return Content
def GenPcdErrorMsgSection(Package, Content):
if not Package.PcdErrorCommentDict:
return Content
#
# Generate '# [Error.<TokenSpcCName>]' section
#
@@ -148,14 +148,14 @@ def GenPcdErrorMsgSection(Package, Content):
SectionComment += TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_ERROR_SECTION_COMMENT + END_OF_LINE
SectionComment += TAB_COMMENT_SPLIT + END_OF_LINE
TokenSpcCNameList = []
#
# Get TokenSpcCName list in PcdErrorCommentDict in Package object
#
for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
if TokenSpcCName not in TokenSpcCNameList:
TokenSpcCNameList.append(TokenSpcCName)
for TokenSpcCNameItem in TokenSpcCNameList:
SectionName = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SECTION_START + TAB_PCD_ERROR + \
TAB_SPLIT + TokenSpcCNameItem + TAB_SECTION_END + END_OF_LINE
@@ -168,10 +168,10 @@ def GenPcdErrorMsgSection(Package, Content):
ErrorNumber + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT + \
PcdErrorMsg + END_OF_LINE
Content += SectionItem
Content += TAB_COMMENT_SPLIT
return Content
def GenGuidProtocolPpi(Package, Content):
#
# generate [Guids] section
@@ -203,17 +203,17 @@ def GenGuidProtocolPpi(Package, Content):
# generate tail comment
#
if Guid.GetSupModuleList():
Statement += GenDecTailComment(Guid.GetSupModuleList())
Statement += GenDecTailComment(Guid.GetSupModuleList())
ArchList = sorted(Guid.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Guids', NewSectionDict, True, True)
#
# generate [Protocols] section
#
@@ -232,9 +232,9 @@ def GenGuidProtocolPpi(Package, Content):
#
HelpTextList = Protocol.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
CommentStr = GenGenericCommentF(HelpStr, 2)
Statement = CommentStr
Statement = CommentStr
CName = Protocol.GetCName()
Value = GuidStringToGuidStructureString(Protocol.GetGuid())
Statement += CName.ljust(LeftOffset) + ' = ' + Value
@@ -250,9 +250,9 @@ def GenGuidProtocolPpi(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Protocols', NewSectionDict, True, True)
Content += GenSection('Protocols', NewSectionDict, True, True)
#
# generate [Ppis] section
@@ -290,28 +290,28 @@ def GenGuidProtocolPpi(Package, Content):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Ppis', NewSectionDict, True, True)
return Content
## Transfer Package Object to Dec files
#
# Transfer all contents of a standard Package Object to a Dec file
# Transfer all contents of a standard Package Object to a Dec file
#
# @param Package: A Package
# @param Package: A Package
#
def PackageToDec(Package, DistHeader = None):
#
# Init global information for the file
#
ContainerFile = Package.GetFullPath()
Content = ''
#
# Generate file header
# Generate file header
#
PackageAbstract = GetLocalValue(Package.GetAbstract())
PackageDescription = GetLocalValue(Package.GetDescription())
@@ -335,7 +335,7 @@ def PackageToDec(Package, DistHeader = None):
if not PackageLicense and DistHeader:
for (Lang, License) in DistHeader.GetLicense():
PackageLicense = License
#
# Generate header comment section of DEC file
#
@@ -345,7 +345,7 @@ def PackageToDec(Package, DistHeader = None):
PackageLicense).replace('\r\n', '\n')
#
# Generate Binary header
# Generate Binary header
#
for UserExtension in Package.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
@@ -357,12 +357,12 @@ def PackageToDec(Package, DistHeader = None):
for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
PackageBinaryCopyright = Copyright
for (Lang, License) in UserExtension.GetBinaryLicense():
PackageBinaryLicense = License
PackageBinaryLicense = License
if PackageBinaryAbstract and PackageBinaryDescription and \
PackageBinaryCopyright and PackageBinaryLicense:
Content += GenHeaderCommentSection(PackageBinaryAbstract,
PackageBinaryDescription,
PackageBinaryCopyright,
Content += GenHeaderCommentSection(PackageBinaryAbstract,
PackageBinaryDescription,
PackageBinaryCopyright,
PackageBinaryLicense,
True)
@@ -374,23 +374,23 @@ def PackageToDec(Package, DistHeader = None):
GenPackageUNIEncodeFile(Package, FileHeader)
#
# for each section, maintain a dict, sorted arch will be its key,
# for each section, maintain a dict, sorted arch will be its key,
#statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
# 'Arch1' : [statement1, statement3]
# 'Arch1' : [statement1, statement3]
# }
#
#
# generate [Defines] section
# generate [Defines] section
#
LeftOffset = 31
NewSectionDict = {TAB_ARCH_COMMON : []}
SpecialItemList = []
Statement = (u'%s ' % TAB_DEC_DEFINES_DEC_SPECIFICATION).ljust(LeftOffset) + u'= %s' % '0x00010017'
SpecialItemList.append(Statement)
BaseName = Package.GetBaseName()
if BaseName.startswith('.') or BaseName.startswith('-'):
BaseName = '_' + BaseName
@@ -405,7 +405,7 @@ def PackageToDec(Package, DistHeader = None):
if Package.UNIFlag:
Statement = (u'%s ' % TAB_DEC_DEFINES_PKG_UNI_FILE).ljust(LeftOffset) + u'= %s' % Package.GetBaseName() + '.uni'
SpecialItemList.append(Statement)
SpecialItemList.append(Statement)
for SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
@@ -417,7 +417,7 @@ def PackageToDec(Package, DistHeader = None):
#
NewSectionDict = {}
IncludeArchList = Package.GetIncludeArchList()
if IncludeArchList:
if IncludeArchList:
for Path, ArchList in IncludeArchList:
Statement = Path
ArchList.sort()
@@ -465,7 +465,7 @@ def PackageToDec(Package, DistHeader = None):
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('LibraryClasses', NewSectionDict, True, True)
@@ -473,7 +473,7 @@ def PackageToDec(Package, DistHeader = None):
# Generate '# [Error.<TokenSpcCName>]' section
#
Content = GenPcdErrorMsgSection(Package, Content)
Content = GenPcd(Package, Content)
#
@@ -498,7 +498,7 @@ def PackageToDec(Package, DistHeader = None):
NewStatement = ""
for Line in LineList:
NewStatement += " %s\n" % Line
SectionList = []
SectionName = 'UserExtensions'
UserId = UserExtension.GetUserID()
@@ -533,7 +533,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
BinaryAbstract = []
BinaryDescription = []
#
# If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
# If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
# then the PACKAGE_UNI_FILE must be created.
#
for (Key, Value) in PackageObject.GetAbstract() + PackageObject.GetDescription():
@@ -541,7 +541,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
for UserExtension in PackageObject.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
@@ -565,7 +565,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
for PcdError in Pcd.GetPcdErrorsList():
if PcdError.GetErrorNumber().startswith('0x') or PcdError.GetErrorNumber().startswith('0X'):
for (Key, Value) in PcdError.GetErrorMessageList():
@@ -579,26 +579,26 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
return
else:
PackageObject.UNIFlag = True
if not os.path.exists(os.path.dirname(PackageObject.GetFullPath())):
os.makedirs(os.path.dirname(PackageObject.GetFullPath()))
ContainerFile = GetUniFileName(os.path.dirname(PackageObject.GetFullPath()), PackageObject.GetBaseName())
Content = UniFileHeader + '\r\n'
Content += '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_DESCRIPTION, PackageObject.GetDescription(), ContainerFile) \
+ '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\r\n'
PromptGenList = []
HelpTextGenList = []
HelpTextGenList = []
for Pcd in PackageObject.GetPcdList():
# Generate Prompt for each Pcd
PcdPromptStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_PROMPT '
@@ -607,7 +607,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
Lang = TxtObj.GetLang()
PromptStr = TxtObj.GetString()
#
# Avoid generating the same PROMPT entry more than one time.
# Avoid generating the same PROMPT entry more than one time.
#
if (PcdPromptStrName, Lang) not in PromptGenList:
TokenValueList.append((Lang, PromptStr))
@@ -615,7 +615,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\r\n'
if PromptString not in Content:
Content += PromptString
# Generate Help String for each Pcd
PcdHelpStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_HELP '
TokenValueList = []
@@ -623,7 +623,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
Lang = TxtObj.GetLang()
HelpStr = TxtObj.GetString()
#
# Avoid generating the same HELP entry more than one time.
# Avoid generating the same HELP entry more than one time.
#
if (PcdHelpStrName, Lang) not in HelpTextGenList:
TokenValueList.append((Lang, HelpStr))
@@ -631,7 +631,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\r\n'
if HelpTextString not in Content:
Content += HelpTextString
# Generate PcdError for each Pcd if ErrorNo exist.
for PcdError in Pcd.GetPcdErrorsList():
ErrorNo = PcdError.GetErrorNumber()
@@ -642,7 +642,7 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\r\n'
if PcdErrString not in Content:
Content += PcdErrString
File = codecs.open(ContainerFile, 'w', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
@@ -652,23 +652,23 @@ def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCO
PackageObject.FileList.append((ContainerFile, Md5Sum))
return ContainerFile
## GenPcdErrComment
#
# @param PcdErrObject: PcdErrorObject
#
#
# @retval CommentStr: Generated comment lines, with prefix "#"
#
#
def GenPcdErrComment (PcdErrObject):
CommentStr = ''
CommentStr = ''
ErrorCode = PcdErrObject.GetErrorNumber()
ValidValueRange = PcdErrObject.GetValidValueRange()
if ValidValueRange:
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += ValidValueRange + END_OF_LINE
ValidValue = PcdErrObject.GetValidValue()
if ValidValue:
ValidValueList = \
@@ -677,13 +677,13 @@ def GenPcdErrComment (PcdErrObject):
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += TAB_COMMA_SPLIT.join(ValidValueList) + END_OF_LINE
Expression = PcdErrObject.GetExpression()
if Expression:
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_EXPRESSION + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += Expression + END_OF_LINE
return CommentStr

View File

@@ -4,9 +4,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -46,8 +46,8 @@ from Library.StringUtils import GetUniFileName
## Transfer Module Object to Inf files
#
# Transfer all contents of a standard Module Object to an Inf file
# @param ModuleObject: A Module Object
# Transfer all contents of a standard Module Object to an Inf file
# @param ModuleObject: A Module Object
#
def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
if not GlobalData.gWSPKG_LIST:
@@ -59,9 +59,9 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
Content = ''
#
# Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
# should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
# that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
# Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
# should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
# that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
# use the abstract, description, copyright or license from the DistributionPackage.Header elements.
#
ModuleAbstract = GetLocalValue(ModuleObject.GetAbstract())
@@ -107,15 +107,15 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
#
# Generate header comment section of INF file
#
#
Content += GenHeaderCommentSection(ModuleAbstract,
ModuleDescription,
ModuleCopyright,
ModuleLicense).replace('\r\n', '\n')
#
# Generate Binary Header
#
# Generate Binary Header
#
for UserExtension in ModuleObject.GetUserExtensionList():
if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
@@ -152,10 +152,10 @@ def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
else:
GlobalData.gIS_BINARY_INF = False
#
# for each section, maintain a dict, sorted arch will be its key,
# for each section, maintain a dict, sorted arch will be its key,
# statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
# 'Arch1' : [statement1, statement3]
# 'Arch1' : [statement1, statement3]
# }
#
# Gen section contents
@@ -197,7 +197,7 @@ def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCOD
BinaryAbstract = []
BinaryDescription = []
#
# If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
# If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
# then the MODULE_UNI_FILE must be created.
#
for (Key, Value) in ModuleObject.GetAbstract() + ModuleObject.GetDescription():
@@ -300,11 +300,11 @@ def GenDefines(ModuleObject):
BaseName = '_' + BaseName
Statement = (u'%s ' % DT.TAB_INF_DEFINES_BASE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_FILE_GUID
Statement = (u'%s ' % DT.TAB_INF_DEFINES_FILE_GUID).ljust(LeftOffset) + u'= %s' % ModuleObject.GetGuid()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_VERSION_STRING
Statement = (u'%s ' % DT.TAB_INF_DEFINES_VERSION_STRING).ljust(LeftOffset) + u'= %s' % ModuleObject.GetVersion()
SpecialStatementList.append(Statement)
@@ -480,7 +480,7 @@ def GenPackages(ModuleObject):
Path = ''
#
# find package path/name
#
#
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
@@ -553,7 +553,7 @@ def GenDepex(ModuleObject):
else:
NewSectionDict[Key] = [Statement]
Content += GenSection('Depex', NewSectionDict, False)
return Content
## GenUserExtensions
#
@@ -673,7 +673,7 @@ def GenBinaryStatement(Key, Value, SubTypeGuidValue=None):
Statement += '|' + Target
return Statement
## GenGuidSections
#
#
# @param GuidObjList: List of GuidObject
# @retVal Content: The generated section contents
#
@@ -736,7 +736,7 @@ def GenGuidSections(GuidObjList):
return Content
## GenProtocolPPiSections
#
#
# @param ObjList: List of ProtocolObject or Ppi Object
# @retVal Content: The generated section contents
#
@@ -804,7 +804,7 @@ def GenPcdSections(ModuleObject):
Content = ''
if not GlobalData.gIS_BINARY_INF:
#
# for each Pcd Itemtype, maintain a dict so the same type will be grouped
# for each Pcd Itemtype, maintain a dict so the same type will be grouped
# together
#
ItemTypeDict = {}
@@ -866,7 +866,7 @@ def GenPcdSections(ModuleObject):
if NewSectionDict:
Content += GenSection(ItemType, NewSectionDict)
#
# For AsBuild INF files
# For AsBuild INF files
#
else:
Content += GenAsBuiltPacthPcdSections(ModuleObject)
@@ -905,7 +905,7 @@ def GenAsBuiltPacthPcdSections(ModuleObject):
Statement = HelpString + TokenSpaceName + '.' + PcdCName + ' | ' + PcdValue + ' | ' + \
PcdOffset + DT.TAB_SPACE_SPLIT
#
# Use binary file's Arch to be Pcd's Arch
# Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
@@ -954,7 +954,7 @@ def GenAsBuiltPcdExSections(ModuleObject):
Statement = HelpString + TokenSpaceName + DT.TAB_SPLIT + PcdCName + DT.TAB_SPACE_SPLIT
#
# Use binary file's Arch to be Pcd's Arch
# Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
@@ -1033,7 +1033,7 @@ def GenSpecialSections(ObjectList, SectionName, UserExtensionsContent=''):
Content = Content.lstrip()
#
# add a return to differentiate it between other possible sections
#
#
if Content:
Content += '\n'
return Content
@@ -1110,7 +1110,7 @@ def GenBinaries(ModuleObject):
else:
NewSectionDict[SortedArch] = [Statement]
#
# as we already generated statement for this DictKey here set the Valuelist to be empty
# as we already generated statement for this DictKey here set the Valuelist to be empty
# to avoid generate duplicate entries as the DictKey may have multiple entries
#
BinariesDict[Key] = []

View File

@@ -2,11 +2,11 @@
#
# This file contained the miscellaneous routines for GenMetaFile usage.
#
# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -26,7 +26,7 @@ from Parser.DecParser import Dec
# @param SectionDict: string of source file path/name
# @param Arch: string of source file family field
# @param ExternList: string of source file FeatureFlag field
#
#
def AddExternToDefineSec(SectionDict, Arch, ExternList):
LeftOffset = 31
for ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList in ExternList:
@@ -93,7 +93,7 @@ def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
Path = None
#
# find package path/name
#
#
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
@@ -156,9 +156,9 @@ def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
return TokenSpaceGuidName, PcdCName
## _TransferDict
# transfer dict that using (Statement, SortedArch) as key,
# transfer dict that using (Statement, SortedArch) as key,
# (GenericComment, UsageComment) as value into a dict that using SortedArch as
# key and NewStatement as value
# key and NewStatement as value
#
def TransferDict(OrigDict, Type=None):
NewDict = {}
@@ -171,7 +171,7 @@ def TransferDict(OrigDict, Type=None):
for Statement, SortedArch in OrigDict:
if len(Statement) > LeftOffset:
LeftOffset = len(Statement)
for Statement, SortedArch in OrigDict:
Comment = OrigDict[Statement, SortedArch]
#

View File

@@ -2,11 +2,11 @@
#
# This file contained the logical of generate XML files.
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -15,4 +15,4 @@
'''
GenXmlFile
'''
'''

View File

@@ -4,11 +4,11 @@
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -17,4 +17,4 @@
'''
GenMetaFile
'''
'''

View File

@@ -1,11 +1,11 @@
## @file
# Install distribution package.
#
# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -102,7 +102,7 @@ def InstallNewModule(WorkspaceDir, Path, PathList = None):
Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%FullPath)
else:
return Path
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input == '':
@@ -110,7 +110,7 @@ def InstallNewModule(WorkspaceDir, Path, PathList = None):
Input = Input.replace('\r', '').replace('\n', '')
return InstallNewModule(WorkspaceDir, Input, PathList)
## InstallNewFile
#
# @param WorkspaceDir: Workspace Direction
@@ -137,15 +137,15 @@ def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
ContentZipFile = None
Logger.Quiet(ST.MSG_UZIP_PARSE_XML)
DistFile = PackageFile(DpPkgFileName)
DpDescFileName, ContentFileName = GetDPFile(DistFile.GetZipFile())
TempDir = os.path.normpath(os.path.join(WorkspaceDir, "Conf/.tmp%s" % str(Index)))
GlobalData.gUNPACK_DIR.append(TempDir)
DistPkgFile = DistFile.UnpackFile(DpDescFileName, os.path.normpath(os.path.join(TempDir, DpDescFileName)))
if not DistPkgFile:
Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_FILE_BROKEN %DpDescFileName)
#
# Generate distpkg
#
@@ -166,10 +166,10 @@ def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
#
# Get file size
#
#
FileSize = os.path.getsize(ContentFile)
if FileSize != 0:
if FileSize != 0:
ContentZipFile = PackageFile(ContentFile)
#
@@ -201,12 +201,12 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
NewPackagePath = InstallNewPackage(WorkspaceDir, GuidedPkgPath, Options.CustomPath)
else:
NewPackagePath = InstallNewPackage(WorkspaceDir, PackagePath, Options.CustomPath)
InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
DistPkg.Header.ReadOnly)
PackageList.append(Package)
NewDict[Guid, Version, Package.GetPackagePath()] = Package
#
# Now generate meta-data files, first generate all dec for package
# dec should be generated before inf, and inf should be generated after
@@ -219,7 +219,7 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
Md5Sum = Md5Sigature.hexdigest()
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
return NewDict
## GetModuleList
@@ -229,18 +229,18 @@ def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleLi
def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
#
# ModulePathList will keep track of the standalone module path that
# we just installed. If a new module's path in that list
# (only multiple INF in one directory will be so), we will
# install them directly. If not, we will try to create a new directory
# we just installed. If a new module's path in that list
# (only multiple INF in one directory will be so), we will
# install them directly. If not, we will try to create a new directory
# for it.
#
ModulePathList = []
#
# Check module exist and install
#
Module = None
NewDict = Sdict()
NewDict = Sdict()
for Guid, Version, Name, Path in DistPkg.ModuleSurfaceArea:
ModulePath = Path
Module = DistPkg.ModuleSurfaceArea[Guid, Version, Name, Path]
@@ -259,14 +259,14 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
ModulePathList.append(NewModuleFullPath)
else:
NewModulePath = ModulePath
InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
DistPkg.Header.ReadOnly)
#
# Update module
#
Module.SetModulePath(Module.GetModulePath().replace(Path, NewModulePath, 1))
NewDict[Guid, Version, Name, Module.GetModulePath()] = Module
#
@@ -289,7 +289,7 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
for (FilePath, Md5Sum) in Module.FileList:
if str(FilePath).endswith('.uni') and Package and (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
return NewDict
##
@@ -303,7 +303,7 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
DependentPpiCNames = []
DependentGuidCNames = []
DependentPcdNames = []
for PackageObj in DePackageObjList:
#
# Get protocol CName list from all dependent DEC file
@@ -312,29 +312,29 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
for Protocol in PackageObj.GetProtocolList():
if Protocol.GetCName() not in ProtocolCNames:
ProtocolCNames.append(Protocol.GetCName())
DependentProtocolCNames.append(ProtocolCNames)
#
# Get Ppi CName list from all dependent DEC file
#
#
PpiCNames = []
for Ppi in PackageObj.GetPpiList():
if Ppi.GetCName() not in PpiCNames:
PpiCNames.append(Ppi.GetCName())
DependentPpiCNames.append(PpiCNames)
#
# Get Guid CName list from all dependent DEC file
#
#
GuidCNames = []
for Guid in PackageObj.GetGuidList():
if Guid.GetCName() not in GuidCNames:
GuidCNames.append(Guid.GetCName())
DependentGuidCNames.append(GuidCNames)
#
# Get PcdName list from all dependent DEC file
#
@@ -343,10 +343,10 @@ def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
PcdName = '.'.join([Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()])
if PcdName not in PcdNames:
PcdNames.append(PcdName)
DependentPcdNames.append(PcdNames)
return DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames
##
@@ -358,8 +358,8 @@ def CheckProtoclCNameRedefined(Module, DependentProtocolCNames):
for PackageProtocolCNames in DependentProtocolCNames:
if ProtocolInModule.GetCName() in PackageProtocolCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % ProtocolInModule.GetCName())
else:
@@ -374,11 +374,11 @@ def CheckPpiCNameRedefined(Module, DependentPpiCNames):
for PackagePpiCNames in DependentPpiCNames:
if PpiInModule.GetCName() in PackagePpiCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PpiInModule.GetCName())
else:
IsCNameDefined = True
IsCNameDefined = True
##
# Check if Guid CName is redefined
@@ -389,8 +389,8 @@ def CheckGuidCNameRedefined(Module, DependentGuidCNames):
for PackageGuidCNames in DependentGuidCNames:
if GuidInModule.GetCName() in PackageGuidCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % GuidInModule.GetCName())
else:
@@ -414,8 +414,8 @@ def CheckPcdNameRedefined(Module, DependentPcdNames):
for PcdNames in DependentPcdNames:
if PcdName in PcdNames:
if IsPcdNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PcdName)
else:
IsPcdNameDefined = True
@@ -427,7 +427,7 @@ def CheckCNameInModuleRedefined(Module, DistPkg):
DePackageObjList = []
#
# Get all dependent package objects
#
#
for Obj in Module.GetPackageDependencyList():
Guid = Obj.GetGuid()
Version = Obj.GetVersion()
@@ -435,7 +435,7 @@ def CheckCNameInModuleRedefined(Module, DistPkg):
if Key[0] == Guid and Key[1] == Version:
if DistPkg.PackageSurfaceArea[Key] not in DePackageObjList:
DePackageObjList.append(DistPkg.PackageSurfaceArea[Key])
DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames = \
GetDepProtocolPpiGuidPcdNames(DePackageObjList)
@@ -457,7 +457,7 @@ def GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile):
ToolFileNum = 0
FileNum = 0
RootDir = WorkspaceDir
#
# FileList stores both tools files and misc files
# Misc file list must be appended to FileList *AFTER* Tools file list
@@ -506,7 +506,7 @@ def Main(Options = None):
WorkspaceDir = GlobalData.gWORKSPACE
if not Options.PackageFile:
Logger.Error("InstallPkg", OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
# Get all Dist Info
DistInfoList = []
DistPkgList = []
@@ -536,17 +536,17 @@ def Main(Options = None):
InstallDp(ToBeInstalledDist[0], ToBeInstalledDist[2], ToBeInstalledDist[1],
Options, Dep, WorkspaceDir, DataBase)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except:
ReturnCode = CODE_ERROR
Logger.Error(
@@ -574,8 +574,8 @@ def Main(Options = None):
return ReturnCode
# BackupDist method
#
# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
#
# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
# if there is already a same-named distribution existed.
#
# @param DpPkgFileName: The distribution path
@@ -645,19 +645,19 @@ def CheckInstallDpx(Dep, DistPkg, DistPkgFileName):
#
def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
WorkspaceDir, ModuleList, Package = None, ReadOnly = False):
if NewPath.startswith("\\") or NewPath.startswith("/"):
NewPath = NewPath[1:]
if not IsValidInstallPath(NewPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewPath,
ConvertPath(ModulePath)))
Module.SetFullPath(os.path.normpath(os.path.join(NewModuleFullPath,
ConvertPath(Module.GetName()) + '.inf')))
Module.FileList = []
for MiscFile in Module.GetMiscFileList():
if not MiscFile:
continue
@@ -665,12 +665,12 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
File = Item.GetURI()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
Executable = Item.GetExecutable()
Executable = Item.GetExecutable()
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
@@ -683,10 +683,10 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
File = Item.GetSourceFile()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
@@ -698,24 +698,24 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
Module.FileList.append((ToFile, Md5Sum))
for Item in Module.GetBinaryFileList():
FileNameList = Item.GetFileNameList()
for FileName in FileNameList:
File = FileName.GetFilename()
for FileName in FileNameList:
File = FileName.GetFilename()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList)
@@ -726,7 +726,7 @@ def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList):
#
# Extract other files under current module path in content Zip file but not listed in the description
# Extract other files under current module path in content Zip file but not listed in the description
#
if ContentZipFile:
for FileName in ContentZipFile.GetZipFile().namelist():
@@ -735,12 +735,12 @@ def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceD
if FileUnderPath(FileName, CheckPath):
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = FileName
ToFile = os.path.normpath(os.path.join(WorkspaceDir,
ToFile = os.path.normpath(os.path.join(WorkspaceDir,
ConvertPath(FileName.replace(FromPath, NewPath, 1))))
CheckList = copy.copy(Module.FileList)
if Package:
@@ -755,16 +755,16 @@ def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceD
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
Module.FileList.append((ToFile, Md5Sum))
ModuleList.append((Module, Package))
## FileUnderPath
# Check whether FileName started with directory specified by CheckPath
# Check whether FileName started with directory specified by CheckPath
#
# @param FileName: the FileName need to be checked
# @param CheckPath: the path need to be checked against
# @return: True or False
# @return: True or False
#
def FileUnderPath(FileName, CheckPath):
FileName = FileName.replace('\\', '/')
@@ -777,13 +777,13 @@ def FileUnderPath(FileName, CheckPath):
RemainingPath = RemainingPath[1:]
if FileName == os.path.normpath(os.path.join(CheckPath, RemainingPath)):
return True
return False
## InstallFile
# Extract File from Zipfile, set file attribute, and return the Md5Sum
#
# @return: True or False
# @return: True or False
#
def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
if os.path.exists(os.path.normpath(ToFile)):
@@ -802,7 +802,7 @@ def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
stat.S_IWOTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
else:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
Md5Sigature = md5.new(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Sigature.hexdigest()
@@ -823,44 +823,44 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
if Dep:
pass
Package.FileList = []
if ToPath.startswith("\\") or ToPath.startswith("/"):
ToPath = ToPath[1:]
if not IsValidInstallPath(ToPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
if FromPath.startswith("\\") or FromPath.startswith("/"):
FromPath = FromPath[1:]
if not IsValidInstallPath(FromPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
PackageFullPath = os.path.normpath(os.path.join(WorkspaceDir, ToPath))
for MiscFile in Package.GetMiscFileList():
for Item in MiscFile.GetFileList():
FileName = Item.GetURI()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
Executable = Item.GetExecutable()
ToFile = (os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
PackageIncludeArchList = []
PackageIncludeArchList = []
for Item in Package.GetPackageIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
RetFile = ContentZipFile.UnpackFile(FromFile, ToFile)
@@ -875,21 +875,21 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
if ReadOnly:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
Md5Sigature = md5.new(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Sigature.hexdigest()
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
Package.SetIncludeArchList(PackageIncludeArchList)
for Item in Package.GetStandardIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
@@ -932,7 +932,7 @@ def GetDPFile(ZipFile):
continue
else:
continue
Logger.Error("PackagingTool", FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_FILE_TOOMANY)
if not DescFile or not ContentFile:
@@ -951,13 +951,13 @@ def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir
#
PackageList = []
ModuleList = []
DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
ContentZipFile, ModuleList, PackageList)
DistPkg.ModuleSurfaceArea = GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList)
GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile)
#
# copy "Distribution File" to directory $(WORKSPACE)/conf/upt
#
@@ -968,6 +968,6 @@ def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir
# update database
#
Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
DistPkg.Header.RePackage)

View File

@@ -1,11 +1,11 @@
## @file
# Inventory workspace's distribution package information.
#
# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -37,7 +37,7 @@ from Library import GlobalData
#
def InventoryDistInstalled(DataBase):
DistInstalled = DataBase.InventoryDistInstalled()
#
# find the max length for each item
#
@@ -47,9 +47,9 @@ def InventoryDistInstalled(DataBase):
DpOriginalNameStr = "DpOriginalName"
MaxGuidlen = len(DpGuidStr)
MaxVerlen = len(DpVerStr)
MaxDpAliasFileNameLen = len(DpNameStr)
MaxDpAliasFileNameLen = len(DpNameStr)
MaxDpOrigFileNamelen = len(DpOriginalNameStr)
for (DpGuid, DpVersion, DpOriginalName, DpAliasFileName) in DistInstalled:
MaxGuidlen = max(MaxGuidlen, len(DpGuid))
MaxVerlen = max(MaxVerlen, len(DpVersion))
@@ -57,22 +57,22 @@ def InventoryDistInstalled(DataBase):
MaxDpOrigFileNamelen = max(MaxDpOrigFileNamelen, len(DpOriginalName))
OutMsgFmt = "%-*s\t%-*s\t%-*s\t%-s"
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpNameStr,
MaxGuidlen,
DpGuidStr,
MaxVerlen,
DpVerStr,
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpNameStr,
MaxGuidlen,
DpGuidStr,
MaxVerlen,
DpVerStr,
DpOriginalNameStr)
Logger.Info(OutMsg)
for (DpGuid, DpVersion, DpFileName, DpAliasFileName) in DistInstalled:
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpAliasFileName,
MaxGuidlen,
DpGuid,
MaxVerlen,
DpVersion,
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpAliasFileName,
MaxGuidlen,
DpGuid,
MaxVerlen,
DpVersion,
DpFileName)
Logger.Info(OutMsg)
@@ -90,13 +90,13 @@ def Main(Options = None):
try:
DataBase = GlobalData.gDB
InventoryDistInstalled(DataBase)
ReturnCode = 0
InventoryDistInstalled(DataBase)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except KeyboardInterrupt:
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
@@ -113,5 +113,5 @@ def Main(Options = None):
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
return ReturnCode
return ReturnCode

View File

@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -39,21 +39,21 @@ from Library.Misc import GetLocalValue
## GenTailCommentLines
#
# @param TailCommentLines: the tail comment lines that need to be generated
# @param LeadingSpaceNum: the number of leading space needed for non-first
# @param LeadingSpaceNum: the number of leading space needed for non-first
# line tail comment
#
#
def GenTailCommentLines (TailCommentLines, LeadingSpaceNum = 0):
TailCommentLines = TailCommentLines.rstrip(END_OF_LINE)
CommentStr = TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + \
(END_OF_LINE + LeadingSpaceNum * TAB_SPACE_SPLIT + TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + \
TAB_SPACE_SPLIT).join(GetSplitValueList(TailCommentLines, END_OF_LINE))
return CommentStr
## GenGenericComment
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
#
#
def GenGenericComment (CommentLines):
if not CommentLines:
return ''
@@ -68,8 +68,8 @@ def GenGenericComment (CommentLines):
# and for line with only <EOL>, '#\n' will be generated instead of '# \n'
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
# @return CommentStr: Generated comment line
#
# @return CommentStr: Generated comment line
#
def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibraryClass=False):
if not CommentLines:
return ''
@@ -104,7 +104,7 @@ def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibrary
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT * 16 + Line + END_OF_LINE
else:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
return CommentStr
@@ -112,7 +112,7 @@ def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibrary
#
# Generate Header comment sections
#
# @param Abstract One line of abstract
# @param Abstract One line of abstract
# @param Description multiple lines of Description
# @param Copyright possible multiple copyright lines
# @param License possible multiple license lines
@@ -148,9 +148,9 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Description, '\n'))
Content += '\r\n' + CommChar + '\r\n'
#
# There is no '#\n' line to separate multiple copyright lines in code base
# There is no '#\n' line to separate multiple copyright lines in code base
#
if Copyright:
Copyright = Copyright.rstrip('\r\n')
@@ -163,12 +163,12 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(License, '\n'))
Content += '\r\n' + CommChar + '\r\n'
if CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\r\n'
else:
Content += CommChar * 2 + '\r\n'
return Content
@@ -177,11 +177,11 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
#
def GenInfPcdTailComment (Usage, TailCommentText):
if (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
CommentLine = TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
@@ -190,16 +190,16 @@ def GenInfPcdTailComment (Usage, TailCommentText):
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
#
def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
if (not Notify) and (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
if Notify:
CommentLine = USAGE_ITEM_NOTIFY + " ## "
else:
CommentLine = ''
CommentLine += TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
@@ -208,39 +208,39 @@ def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
#
def GenInfGuidTailComment (Usage, GuidTypeList, VariableName, TailCommentText):
GuidType = GuidTypeList[0]
if (Usage == ITEM_UNDEFINED) and (GuidType == ITEM_UNDEFINED) and \
(not TailCommentText):
return ''
FirstLine = Usage + " ## " + GuidType
FirstLine = Usage + " ## " + GuidType
if GuidType == TAB_INF_GUIDTYPE_VAR:
FirstLine += ":" + VariableName
CommentLine = TAB_SPACE_SPLIT.join([FirstLine, TailCommentText])
return GenTailCommentLines(CommentLine)
## GenDecGuidTailComment
#
# @param SupModuleList: Supported module type list
#
def GenDecTailComment (SupModuleList):
#
def GenDecTailComment (SupModuleList):
CommentLine = TAB_SPACE_SPLIT.join(SupModuleList)
return GenTailCommentLines(CommentLine)
## _GetHelpStr
# get HelpString from a list of HelpTextObject, the priority refer to
# get HelpString from a list of HelpTextObject, the priority refer to
# related HLD
#
# @param HelpTextObjList: List of HelpTextObject
#
#
# @return HelpStr: the help text string found, '' means no help text found
#
def _GetHelpStr(HelpTextObjList):
ValueList = []
ValueList = []
for HelpObj in HelpTextObjList:
ValueList.append((HelpObj.GetLang(), HelpObj.GetString()))
return GetLocalValue(ValueList, True)

View File

@@ -3,9 +3,9 @@
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -69,7 +69,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
else:
STR_HEADER_COMMENT_START = "@file"
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
#
# first find the last copyright line
#
@@ -79,24 +79,24 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
if _IsCopyrightLine(Line):
Last = Index
break
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
if not Line.startswith(TAB_COMMENT_SPLIT) and Line:
Logger.Error("\nUPT", FORMAT_INVALID, ST.ERR_INVALID_COMMENT_FORMAT, FileName, Item[1])
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
# if there are blank lines between License or Description, keep them as they would be
# if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
@@ -114,20 +114,20 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
#
#
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Description += Comment + EndOfLine
Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
@@ -136,23 +136,23 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
else:
#
# Contents after copyright line are license, those non-copyright lines in between
# copyright line will be discarded
# copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_LICENSE
HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
@@ -160,7 +160,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = Fal
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent):
Result = True
@@ -169,23 +169,23 @@ def _IsCopyrightLine (LineContent):
## ParseGenericComment
#
# @param GenericComment: Generic comment list, element of
# @param GenericComment: Generic comment list, element of
# (CommentLine, LineNum)
# @param ContainerFile: Input value for filename of Dec file
#
#
def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
if ContainerFile:
pass
HelpTxt = None
HelpStr = ''
HelpTxt = None
HelpStr = ''
for Item in GenericComment:
CommentLine = Item[0]
Comment = CleanString2(CommentLine)[1]
if SkipTag is not None and Comment.startswith(SkipTag):
Comment = Comment.replace(SkipTag, '', 1)
HelpStr += Comment + '\n'
if HelpStr:
HelpTxt = TextObject()
if HelpStr.endswith('\n') and not HelpStr.endswith('\n\n') and HelpStr != '\n':
@@ -196,22 +196,22 @@ def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
## ParsePcdErrorCode
#
# @param Value: original ErrorCode value
# @param Value: original ErrorCode value
# @param ContainerFile: Input value for filename of Dec file
# @param LineNum: Line Num
#
def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
try:
# @param LineNum: Line Num
#
def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
try:
if Value.strip().startswith((TAB_HEX_START, TAB_CAPHEX_START)):
Base = 16
else:
Base = 10
ErrorCode = long(Value, Base)
if ErrorCode > PCD_ERR_CODE_MAX_SIZE or ErrorCode < 0:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
#
# To delete the tailing 'L'
@@ -220,27 +220,27 @@ def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
except ValueError as XStr:
if XStr:
pass
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
## ParseDecPcdGenericComment
#
# @param GenericComment: Generic comment list, element of (CommentLine,
# @param GenericComment: Generic comment list, element of (CommentLine,
# LineNum)
# @param ContainerFile: Input value for filename of Dec file
#
def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
HelpStr = ''
#
def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
HelpStr = ''
PromptStr = ''
PcdErr = None
PcdErrList = []
ValidValueNum = 0
ValidRangeNum = 0
ExpressionNum = 0
for (CommentLine, LineNum) in GenericComment:
Comment = CleanString2(CommentLine)[1]
#
@@ -252,13 +252,13 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
if MatchedStr:
Macro = MatchedStr.strip().lstrip('$(').rstrip(')').strip()
if Macro in MacroReplaceDict:
Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
if Comment.startswith(TAB_PCD_VALIDRANGE):
if ValidValueNum > 0 or ExpressionNum > 0:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -280,21 +280,21 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_VALIDLIST):
if ValidRangeNum > 0 or ExpressionNum > 0:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
elif ValidValueNum > 0:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_VALIDVALUE,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -317,15 +317,15 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_EXPRESSION):
if ValidRangeNum > 0 or ValidValueNum > 0:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
@@ -344,24 +344,24 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam
else:
PcdErr.SetExpression(Expression)
PcdErrList.append(PcdErr)
else:
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
LineNum)
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_PROMPT):
if PromptStr:
Logger.Error('Parser',
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_PROMPT,
File = ContainerFile,
File = ContainerFile,
Line = LineNum)
PromptStr = Comment.replace(TAB_PCD_PROMPT, "", 1).strip()
else:
if Comment:
HelpStr += Comment + '\n'
#
# remove the last EOL if the comment is of format 'FOO\n'
#
@@ -384,9 +384,9 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
LineNum = TailCommentList[0][1]
Comment = TailComment.lstrip(" #")
ReFindFirstWordRe = re.compile(r"""^([^ #]*)""", re.DOTALL)
#
# get first word and compare with SUP_MODULE_LIST
#
@@ -398,7 +398,7 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
# parse line, it must have supported module type specified
#
if Comment.find(TAB_COMMENT_SPLIT) == -1:
Comment += TAB_COMMENT_SPLIT
Comment += TAB_COMMENT_SPLIT
SupMode, HelpStr = GetSplitValueList(Comment, TAB_COMMENT_SPLIT, 1)
SupModuleList = []
for Mod in GetSplitValueList(SupMode, TAB_SPACE_SPLIT):
@@ -407,8 +407,8 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
elif Mod not in SUP_MODULE_LIST:
Logger.Error("UPT",
FORMAT_INVALID,
ST.WRN_INVALID_MODULE_TYPE%Mod,
ContainerFile,
ST.WRN_INVALID_MODULE_TYPE%Mod,
ContainerFile,
LineNum)
else:
SupModuleList.append(Mod)
@@ -417,15 +417,15 @@ def ParseDecPcdTailComment (TailCommentList, ContainerFile):
## _CheckListExpression
#
# @param Expression: Pcd value list expression
# @param Expression: Pcd value list expression
#
def _CheckListExpression(Expression):
ListExpr = ''
if TAB_VALUE_SPLIT in Expression:
ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
ListExpr = Expression
return IsValidListExpr(ListExpr)
## _CheckExpreesion
@@ -443,14 +443,14 @@ def _CheckExpression(Expression):
## _CheckRangeExpression
#
# @param Expression: Pcd range expression
#
#
def _CheckRangeExpression(Expression):
RangeExpr = ''
if TAB_VALUE_SPLIT in Expression:
RangeExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
RangeExpr = Expression
return IsValidRangeExpr(RangeExpr)
## ValidateCopyright
@@ -459,28 +459,28 @@ def _CheckRangeExpression(Expression):
#
def ValidateCopyright(Result, ErrType, FileName, LineNo, ErrMsg):
if not Result:
Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
## _ValidateCopyright
#
# @param Line: Line that contains copyright information, # stripped
#
#
# @retval Result: True if line is conformed to Spec format, False else
# @retval ErrMsg: the detailed error description
#
#
def _ValidateCopyright(Line):
if Line:
pass
Result = True
ErrMsg = ''
return Result, ErrMsg
def GenerateTokenList (Comment):
#
# Tokenize Comment using '#' and ' ' as token seperators
#
RelplacedComment = None
RelplacedComment = None
while Comment != RelplacedComment:
RelplacedComment = Comment
Comment = Comment.replace('##', '#').replace(' ', ' ').replace(' ', '#').strip('# ')
@@ -500,25 +500,25 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
Usage = None
Type = None
String = None
Comment = Comment[0]
NumTokens = 2
NumTokens = 2
if ParseVariable:
#
# Remove white space around first instance of ':' from Comment if 'Variable'
#
# Remove white space around first instance of ':' from Comment if 'Variable'
# is in front of ':' and Variable is the 1st or 2nd token in Comment.
#
List = Comment.split(':', 1)
List = Comment.split(':', 1)
if len(List) > 1:
SubList = GenerateTokenList (List[0].strip())
if len(SubList) in [1, 2] and SubList[-1] == 'Variable':
if List[1].strip().find('L"') == 0:
if List[1].strip().find('L"') == 0:
Comment = List[0].strip() + ':' + List[1].strip()
#
#
# Remove first instance of L"<VariableName> from Comment and put into String
# if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
# if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
# L"<VariableName>" is the third token immediately following 'Variable:'.
#
End = -1
@@ -533,25 +533,25 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
End = String[2:].find('"')
if End >= 0:
SubList = GenerateTokenList (Comment[:Start])
if len(SubList) < 2:
if len(SubList) < 2:
Comment = Comment[:Start] + String[End + 3:]
String = String[:End + 3]
Type = 'Variable'
NumTokens = 1
NumTokens = 1
#
# Initialze HelpText to Comment.
# Initialze HelpText to Comment.
# Content will be remove from HelpText as matching tokens are found
#
#
HelpText = Comment
#
# Tokenize Comment using '#' and ' ' as token seperators
#
List = GenerateTokenList (Comment)
#
# Search first two tokens for Usage and Type and remove any matching tokens
# Search first two tokens for Usage and Type and remove any matching tokens
# from HelpText
#
for Token in List[0:NumTokens]:
@@ -563,39 +563,39 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
if Type is None and Token in TypeTokens:
Type = TypeTokens[Token]
HelpText = HelpText.replace(Token, '')
if Usage is not None:
if Usage is not None:
for Token in List[0:NumTokens]:
if Token in RemoveTokens:
HelpText = HelpText.replace(Token, '')
#
# If no Usage token is present and set Usage to UNDEFINED
#
#
if Usage is None:
Usage = 'UNDEFINED'
#
# If no Type token is present and set Type to UNDEFINED
#
#
if Type is None:
Type = 'UNDEFINED'
#
# If Type is not 'Variable:', then set String to None
#
#
if Type != 'Variable':
String = None
String = None
#
# Strip ' ' and '#' from the beginning of HelpText
# If HelpText is an empty string after all parsing is
# If HelpText is an empty string after all parsing is
# complete then set HelpText to None
#
#
HelpText = HelpText.lstrip('# ')
if HelpText == '':
HelpText = None
#
# Return parsing results
#
return Usage, Type, String, HelpText
#
return Usage, Type, String, HelpText

View File

@@ -1,11 +1,11 @@
## @file
# This file is used to define class for data type structure
#
# Copyright (c) 2011 - 2016, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -64,13 +64,13 @@ USAGE_ITEM_UNDEFINED = 'UNDEFINED'
USAGE_CONSUMES_LIST = [USAGE_ITEM_CONSUMES,
'CONSUMED',
'ALWAYS_CONSUMED',
'ALWAYS_CONSUMES'
'ALWAYS_CONSUMES'
]
USAGE_PRODUCES_LIST = [USAGE_ITEM_PRODUCES,
'PRODUCED',
'ALWAYS_PRODUCED',
'ALWAYS_PRODUCES'
'ALWAYS_PRODUCES'
]
USAGE_SOMETIMES_PRODUCES_LIST = [USAGE_ITEM_SOMETIMES_PRODUCES,
@@ -94,7 +94,7 @@ TAB_STR_TOKENERR = 'ERR'
#
# Dictionary of usage tokens and their synonmys
#
#
ALL_USAGE_TOKENS = {
"PRODUCES" : "PRODUCES",
"PRODUCED" : "PRODUCES",
@@ -109,20 +109,20 @@ ALL_USAGE_TOKENS = {
"SOMETIMES_CONSUMES" : "SOMETIMES_CONSUMES",
"SOMETIMES_CONSUMED" : "SOMETIMES_CONSUMES",
"SOMETIME_CONSUMES" : "SOMETIMES_CONSUMES",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
PROTOCOL_USAGE_TOKENS = {
"TO_START" : "TO_START",
"BY_START" : "BY_START"
}
PROTOCOL_USAGE_TOKENS.update (ALL_USAGE_TOKENS)
#
# Dictionary of GUID type tokens
#
GUID_TYPE_TOKENS = {
#
GUID_TYPE_TOKENS = {
"Event" : "Event",
"File" : "File",
"FV" : "FV",
@@ -134,31 +134,31 @@ GUID_TYPE_TOKENS = {
"Hob:" : "HOB",
"SystemTable" : "SystemTable",
"TokenSpaceGuid" : "TokenSpaceGuid",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
#
# Dictionary of Protocol Notify tokens and their synonyms
#
PROTOCOL_NOTIFY_TOKENS = {
#
PROTOCOL_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PROTOCOL_NOTIFY" : "NOTIFY",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
#
# Dictionary of PPI Notify tokens and their synonyms
#
PPI_NOTIFY_TOKENS = {
#
PPI_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PPI_NOTIFY" : "NOTIFY",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
EVENT_TOKENS = {
"EVENT_TYPE_PERIODIC_TIMER" : "EVENT_TYPE_PERIODIC_TIMER",
"EVENT_TYPE_RELATIVE_TIMER" : "EVENT_TYPE_RELATIVE_TIMER",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
BOOTMODE_TOKENS = {
@@ -182,16 +182,16 @@ BOOTMODE_TOKENS = {
"RECOVERY_S4_RESUME" : "RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME" : "RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE" : "RECOVERY_FLASH_UPDATE",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
HOB_TOKENS = {
HOB_TOKENS = {
"PHIT" : "PHIT",
"MEMORY_ALLOCATION" : "MEMORY_ALLOCATION",
"LOAD_PEIM" : "LOAD_PEIM",
"RESOURCE_DESCRIPTOR" : "RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME" : "FIRMWARE_VOLUME",
"UNDEFINED" : "UNDEFINED"
"UNDEFINED" : "UNDEFINED"
}
##
@@ -223,22 +223,22 @@ PCD_DIRVER_TYPE_LIST = ["PEI_PCD_DRIVER", "DXE_PCD_DRIVER"]
#
BOOT_MODE_LIST = ["FULL",
"MINIMAL",
"NO_CHANGE",
"DIAGNOSTICS",
"DEFAULT",
"NO_CHANGE",
"DIAGNOSTICS",
"DEFAULT",
"S2_RESUME",
"S3_RESUME",
"S4_RESUME",
"S5_RESUME",
"S3_RESUME",
"S4_RESUME",
"S5_RESUME",
"FLASH_UPDATE",
"RECOVERY_FULL",
"RECOVERY_MINIMAL",
"RECOVERY_FULL",
"RECOVERY_MINIMAL",
"RECOVERY_NO_CHANGE",
"RECOVERY_DIAGNOSTICS",
"RECOVERY_DIAGNOSTICS",
"RECOVERY_DEFAULT",
"RECOVERY_S2_RESUME",
"RECOVERY_S2_RESUME",
"RECOVERY_S3_RESUME",
"RECOVERY_S4_RESUME",
"RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE"]
@@ -251,9 +251,9 @@ EVENT_TYPE_LIST = ["EVENT_TYPE_PERIODIC_TIMER",
##
# Hob Type List Items
#
HOB_TYPE_LIST = ["PHIT",
HOB_TYPE_LIST = ["PHIT",
"MEMORY_ALLOCATION",
"RESOURCE_DESCRIPTOR",
"RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME",
"LOAD_PEIM"]
@@ -290,19 +290,19 @@ BINARY_FILE_TYPE_LIST = ["PE32", "PIC", "TE", "DXE_DEPEX", "VER", "UI", "COMPAT1
BINARY_FILE_TYPE_LIST_IN_UDP = \
["GUID", "FREEFORM",
"UEFI_IMAGE", "PE32", "PIC",
"PEI_DEPEX",
"PEI_DEPEX",
"DXE_DEPEX",
"SMM_DEPEX",
"FV", "TE",
"BIN", "VER", "UI"
"BIN", "VER", "UI"
]
SUBTYPE_GUID_BINARY_FILE_TYPE = "FREEFORM"
##
# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
# the table,
# "Component (module) Types." For each component, the BASE_NAME and
# COMPONENT_TYPE
# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
# the table,
# "Component (module) Types." For each component, the BASE_NAME and
# COMPONENT_TYPE
# are required. The COMPONENT_TYPE definition is case sensitive.
#
COMPONENT_TYPE_LIST = [
@@ -436,7 +436,7 @@ BINARY_FILE_TYPE_FV = 'FV'
BINARY_FILE_TYPE_UI_LIST = [BINARY_FILE_TYPE_UNI_UI,
BINARY_FILE_TYPE_SEC_UI,
BINARY_FILE_TYPE_UI
]
]
BINARY_FILE_TYPE_VER_LIST = [BINARY_FILE_TYPE_UNI_VER,
BINARY_FILE_TYPE_SEC_VER,
BINARY_FILE_TYPE_VER
@@ -712,7 +712,7 @@ TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
TAB_INF_DEFINES_PACKAGE = 'PACKAGE'
TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
TAB_INF_DEFINES_VERSION = 'VERSION'
TAB_INF_DEFINES_VERSION = 'VERSION'
TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
TAB_INF_DEFINES_TIANO_EDK1_FLASHMAP_H = 'TIANO_EDK1_FLASHMAP_H'
@@ -720,9 +720,9 @@ TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
TAB_INF_DEFINES_PCI_REVISION = 'PCI_REVISION'
TAB_INF_DEFINES_PCI_COMPRESS = 'PCI_COMPRESS'
TAB_INF_DEFINES_DEFINE = 'DEFINE'
@@ -819,12 +819,12 @@ TAB_IF_EXIST = '!if exist'
TAB_UNKNOWN = 'UNKNOWN'
#
# Header section (virtual section for abstract, description, copyright,
# Header section (virtual section for abstract, description, copyright,
# license)
#
TAB_HEADER = 'Header'
TAB_HEADER_ABSTRACT = 'Abstract'
TAB_HEADER_DESCRIPTION = 'Description'
TAB_HEADER_DESCRIPTION = 'Description'
TAB_HEADER_COPYRIGHT = 'Copyright'
TAB_HEADER_LICENSE = 'License'
TAB_BINARY_HEADER_IDENTIFIER = 'BinaryHeader'
@@ -833,7 +833,7 @@ TAB_BINARY_HEADER_USERID = 'TianoCore'
#
# Build database path
#
DATABASE_PATH = ":memory:"
DATABASE_PATH = ":memory:"
#
# used by ECC
#
@@ -855,7 +855,7 @@ TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
#
# used to indicate the state of processing header comment section of dec,
# used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
@@ -953,6 +953,6 @@ TOOL_FAMILY_LIST = ["MSFT",
TYPE_HOB_SECTION = 'HOB'
TYPE_EVENT_SECTION = 'EVENT'
TYPE_BOOTMODE_SECTION = 'BOOTMODE'
TYPE_BOOTMODE_SECTION = 'BOOTMODE'
PCD_ERR_CODE_MAX_SIZE = 4294967295

View File

@@ -1,11 +1,11 @@
## @file
# This file is used to check PCD logical expression
#
# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -47,7 +47,7 @@ def IsValidBareCString(String):
and (IntChar < 0x23 or IntChar > 0x7e):
return False
PreChar = Char
# Last char cannot be \ if PreChar is not \
if LastChar == '\\' and PreChar == LastChar:
return False
@@ -83,7 +83,7 @@ class _ExprBase:
self.Token = Token
self.Index = 0
self.Len = len(Token)
## SkipWhitespace
#
def SkipWhitespace(self):
@@ -91,14 +91,14 @@ class _ExprBase:
if Char not in ' \t':
break
self.Index += 1
## IsCurrentOp
#
# @param OpList: option list
#
# @param OpList: option list
#
def IsCurrentOp(self, OpList):
self.SkipWhitespace()
LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
"or", "OR", "XOR"]
OpMap = {
'|' : '|',
@@ -107,11 +107,11 @@ class _ExprBase:
'>' : '=',
'<' : '='
}
for Operator in OpList:
if not self.Token[self.Index:].startswith(Operator):
continue
self.Index += len(Operator)
Char = self.Token[self.Index : self.Index + 1]
@@ -119,36 +119,36 @@ class _ExprBase:
or (Operator in OpMap and OpMap[Operator] == Char):
self.Index -= len(Operator)
break
return True
return False
## _LogicalExpressionParser
#
# @param _ExprBase: _ExprBase object
#
#
class _LogicalExpressionParser(_ExprBase):
#
# STRINGITEM can only be logical field according to spec
#
STRINGITEM = -1
#
# Evaluate to True or False
#
LOGICAL = 0
REALLOGICAL = 2
#
# Just arithmetic expression
#
ARITH = 1
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.Parens = 0
def _CheckToken(self, MatchList):
for Match in MatchList:
if Match and Match.start() == 0:
@@ -156,7 +156,7 @@ class _LogicalExpressionParser(_ExprBase):
self.Token[self.Index:self.Index+Match.end()]
):
return False
self.Index += Match.end()
if self.Token[self.Index - 1] == '"':
return True
@@ -164,61 +164,61 @@ class _LogicalExpressionParser(_ExprBase):
self.Token[self.Index:self.Index+1].isalnum():
self.Index -= Match.end()
return False
Token = self.Token[self.Index - Match.end():self.Index]
if Token.strip() in ["EQ", "NE", "GE", "LE", "GT", "LT",
"NOT", "and", "AND", "or", "OR", "XOR"]:
self.Index -= Match.end()
return False
return True
return False
def IsAtomicNumVal(self):
#
# Hex number
#
Match1 = re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
#
# Number
#
Match2 = re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
#
# Macro
#
Match3 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
#
# PcdName
#
Match4 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
return self._CheckToken([Match1, Match2, Match3, Match4])
def IsAtomicItem(self):
#
# Macro
#
Match1 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
#
# PcdName
#
Match2 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
#
# Quoted string
#
Match3 = re.compile(self.QUOTED_PATTERN).\
match(self.Token[self.Index:].replace('\\\\', '//').\
replace('\\\"', '\\\''))
return self._CheckToken([Match1, Match2, Match3])
## A || B
#
def LogicalExpression(self):
@@ -233,12 +233,12 @@ class _LogicalExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
def SpecNot(self):
if self.IsCurrentOp(["NOT", "!", "not"]):
return self.SpecNot()
return self.Rel()
## A < B, A > B, A <= B, A >= B
#
def Rel(self):
@@ -252,7 +252,7 @@ class _LogicalExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
## A + B, A - B
#
def Expr(self):
@@ -269,7 +269,7 @@ class _LogicalExpressionParser(_ExprBase):
return Ret
## Factor
#
#
def Factor(self):
if self.IsCurrentOp(["("]):
self.Parens += 1
@@ -279,7 +279,7 @@ class _LogicalExpressionParser(_ExprBase):
(self.Token, self.Token[self.Index:]))
self.Parens -= 1
return Ret
if self.IsAtomicItem():
if self.Token[self.Index - 1] == '"':
return self.STRINGITEM
@@ -289,7 +289,7 @@ class _LogicalExpressionParser(_ExprBase):
else:
raise _ExprError(ST.ERR_EXPR_FACTOR % \
(self.Token[self.Index:], self.Token))
## IsValidLogicalExpression
#
def IsValidLogicalExpression(self):
@@ -319,7 +319,7 @@ class _ValidRangeExpressionParser(_ExprBase):
self.INT = 2
self.IsParenHappen = False
self.IsLogicalOpHappen = False
## IsValidRangeExpression
#
def IsValidRangeExpression(self):
@@ -330,12 +330,12 @@ class _ValidRangeExpressionParser(_ExprBase):
return False, ST.ERR_EXPR_RANGE % self.Token
except _ExprError as XExcept:
return False, XExcept.Error
self.SkipWhitespace()
if self.Index != self.Len:
return False, (ST.ERR_EXPR_RANGE % self.Token)
return True, ''
## RangeExpression
#
def RangeExpression(self):
@@ -346,22 +346,22 @@ class _ValidRangeExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
self.IsParenHappen = False
Ret = self.Unary()
if self.IsCurrentOp(['XOR']):
Ret = self.Unary()
return Ret
## Unary
#
def Unary(self):
if self.IsCurrentOp(["NOT"]):
return self.Unary()
return self.ValidRange()
## ValidRange
#
#
def ValidRange(self):
Ret = -1
if self.IsCurrentOp(["("]):
@@ -375,10 +375,10 @@ class _ValidRangeExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % self.Token)
self.Parens -= 1
return Ret
if self.IsLogicalOpHappen:
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
if self.IsCurrentOp(["LT", "GT", "LE", "GE", "EQ", "XOR"]):
IntMatch = \
re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
@@ -417,7 +417,7 @@ class _ValidListExpressionParser(_ExprBase):
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.NUM = 1
def IsValidListExpression(self):
if self.Len == 0:
return False, ST.ERR_EXPR_LIST_EMPTY
@@ -432,7 +432,7 @@ class _ValidListExpressionParser(_ExprBase):
return False, (ST.ERR_EXPR_LIST % self.Token)
return True, ''
def ListExpression(self):
Ret = -1
self.SkipWhitespace()
@@ -444,7 +444,7 @@ class _ValidListExpressionParser(_ExprBase):
raise _ExprError(ST.ERR_EXPR_LIST % self.Token)
return Ret
## _StringTestParser
#
class _StringTestParser(_ExprBase):
@@ -452,7 +452,7 @@ class _StringTestParser(_ExprBase):
_ExprBase.__init__(self, Token)
## IsValidStringTest
#
#
def IsValidStringTest(self):
if self.Len == 0:
return False, ST.ERR_EXPR_EMPTY
@@ -463,7 +463,7 @@ class _StringTestParser(_ExprBase):
return True, ''
## StringItem
#
#
def StringItem(self):
Match1 = re.compile(self.QUOTED_PATTERN)\
.match(self.Token[self.Index:].replace('\\\\', '//')\
@@ -489,7 +489,7 @@ class _StringTestParser(_ExprBase):
(self.Token, self.Token[self.Index:]))
## StringTest
#
#
def StringTest(self):
self.StringItem()
if not self.IsCurrentOp(["==", "EQ", "!=", "NE"]):
@@ -538,7 +538,7 @@ def IsValidRangeExpr(Token):
##
# Check syntax of value list expression token
#
# @param Token: value list expression token
# @param Token: value list expression token
#
def IsValidListExpr(Token):
return _ValidListExpressionParser(Token).IsValidListExpression()
@@ -562,7 +562,7 @@ def IsValidFeatureFlagExp(Token, Flag=False):
if not Valid:
Valid, Cause = IsValidLogicalExpr(Token, Flag)
if not Valid:
return False, Cause
return False, Cause
return True, ""
if __name__ == '__main__':
@@ -570,4 +570,4 @@ if __name__ == '__main__':
print(_LogicalExpressionParser('gCrownBayTokenSpaceGuid.PcdPciDevice1BridgeAddressLE0').IsValidLogicalExpression())

View File

@@ -1,11 +1,11 @@
## @file
# This file is used to define common static strings and global data used by UPT
#
# Copyright (c) 2011 - 2017, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
@@ -16,7 +16,7 @@ GlobalData
'''
#
# The workspace directory
# The workspace directory
#
gWORKSPACE = '.'
gPACKAGE_PATH = None

Some files were not shown because too many files have changed in this diff Show More