Check In tool source code based on Build tool project revision r1655.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@8964 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
152
BaseTools/Source/Python/Common/BuildToolError.py
Normal file
152
BaseTools/Source/Python/Common/BuildToolError.py
Normal file
@@ -0,0 +1,152 @@
|
||||
## @file
|
||||
# Standardized Error Hanlding infrastructures.
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
FILE_OPEN_FAILURE = 1
|
||||
FILE_WRITE_FAILURE = 2
|
||||
FILE_PARSE_FAILURE = 3
|
||||
FILE_READ_FAILURE = 4
|
||||
FILE_CREATE_FAILURE = 5
|
||||
FILE_CHECKSUM_FAILURE = 6
|
||||
FILE_COMPRESS_FAILURE = 7
|
||||
FILE_DECOMPRESS_FAILURE = 8
|
||||
FILE_MOVE_FAILURE = 9
|
||||
FILE_DELETE_FAILURE = 10
|
||||
FILE_COPY_FAILURE = 11
|
||||
FILE_POSITIONING_FAILURE = 12
|
||||
FILE_ALREADY_EXIST = 13
|
||||
FILE_NOT_FOUND = 14
|
||||
FILE_TYPE_MISMATCH = 15
|
||||
FILE_CASE_MISMATCH = 16
|
||||
FILE_DUPLICATED = 17
|
||||
FILE_UNKNOWN_ERROR = 0x0FFF
|
||||
|
||||
OPTION_UNKNOWN = 0x1000
|
||||
OPTION_MISSING = 0x1001
|
||||
OPTION_CONFLICT = 0x1002
|
||||
OPTION_VALUE_INVALID = 0x1003
|
||||
OPTION_DEPRECATED = 0x1004
|
||||
OPTION_NOT_SUPPORTED = 0x1005
|
||||
OPTION_UNKNOWN_ERROR = 0x1FFF
|
||||
|
||||
PARAMETER_INVALID = 0x2000
|
||||
PARAMETER_MISSING = 0x2001
|
||||
PARAMETER_UNKNOWN_ERROR =0x2FFF
|
||||
|
||||
FORMAT_INVALID = 0x3000
|
||||
FORMAT_NOT_SUPPORTED = 0x3001
|
||||
FORMAT_UNKNOWN = 0x3002
|
||||
FORMAT_UNKNOWN_ERROR = 0x3FFF
|
||||
|
||||
RESOURCE_NOT_AVAILABLE = 0x4000
|
||||
RESOURCE_ALLOCATE_FAILURE = 0x4001
|
||||
RESOURCE_FULL = 0x4002
|
||||
RESOURCE_OVERFLOW = 0x4003
|
||||
RESOURCE_UNDERRUN = 0x4004
|
||||
RESOURCE_UNKNOWN_ERROR = 0x4FFF
|
||||
|
||||
ATTRIBUTE_NOT_AVAILABLE = 0x5000
|
||||
ATTRIBUTE_GET_FAILURE = 0x5001
|
||||
ATTRIBUTE_SET_FAILURE = 0x5002
|
||||
ATTRIBUTE_UPDATE_FAILURE = 0x5003
|
||||
ATTRIBUTE_ACCESS_DENIED = 0x5004
|
||||
ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
|
||||
|
||||
IO_NOT_READY = 0x6000
|
||||
IO_BUSY = 0x6001
|
||||
IO_TIMEOUT = 0x6002
|
||||
IO_UNKNOWN_ERROR = 0x6FFF
|
||||
|
||||
COMMAND_FAILURE = 0x7000
|
||||
|
||||
CODE_ERROR = 0xC0DE
|
||||
|
||||
AUTOGEN_ERROR = 0xF000
|
||||
PARSER_ERROR = 0xF001
|
||||
BUILD_ERROR = 0xF002
|
||||
GENFDS_ERROR = 0xF003
|
||||
ECC_ERROR = 0xF004
|
||||
EOT_ERROR = 0xF005
|
||||
DDC_ERROR = 0xF009
|
||||
WARNING_AS_ERROR = 0xF006
|
||||
MIGRATION_ERROR = 0xF010
|
||||
ABORT_ERROR = 0xFFFE
|
||||
UNKNOWN_ERROR = 0xFFFF
|
||||
|
||||
## Error message of each error code
|
||||
gErrorMessage = {
|
||||
FILE_NOT_FOUND : "File/directory not found",
|
||||
FILE_OPEN_FAILURE : "File open failure",
|
||||
FILE_WRITE_FAILURE : "File write failure",
|
||||
FILE_PARSE_FAILURE : "File parse failure",
|
||||
FILE_READ_FAILURE : "File read failure",
|
||||
FILE_CREATE_FAILURE : "File create failure",
|
||||
FILE_CHECKSUM_FAILURE : "Invalid checksum of file",
|
||||
FILE_COMPRESS_FAILURE : "File compress failure",
|
||||
FILE_DECOMPRESS_FAILURE : "File decompress failure",
|
||||
FILE_MOVE_FAILURE : "File move failure",
|
||||
FILE_DELETE_FAILURE : "File delete failure",
|
||||
FILE_COPY_FAILURE : "File copy failure",
|
||||
FILE_POSITIONING_FAILURE: "Failed to seeking position",
|
||||
FILE_ALREADY_EXIST : "File or directory already exists",
|
||||
FILE_TYPE_MISMATCH : "Incorrect file type",
|
||||
FILE_CASE_MISMATCH : "File name case mismatch",
|
||||
FILE_DUPLICATED : "Duplicated file found",
|
||||
FILE_UNKNOWN_ERROR : "Unknown error encountered on file",
|
||||
|
||||
OPTION_UNKNOWN : "Unknown option",
|
||||
OPTION_MISSING : "Missing option",
|
||||
OPTION_CONFLICT : "Conflict options",
|
||||
OPTION_VALUE_INVALID : "Invalid value of option",
|
||||
OPTION_DEPRECATED : "Deprecated option",
|
||||
OPTION_NOT_SUPPORTED : "Unsupported option",
|
||||
OPTION_UNKNOWN_ERROR : "Unknown error when processing options",
|
||||
|
||||
PARAMETER_INVALID : "Invalid parameter",
|
||||
PARAMETER_MISSING : "Missing parameter",
|
||||
PARAMETER_UNKNOWN_ERROR : "Unknown error in parameters",
|
||||
|
||||
FORMAT_INVALID : "Invalid syntax/format",
|
||||
FORMAT_NOT_SUPPORTED : "Not supported syntax/format",
|
||||
FORMAT_UNKNOWN : "Unknown format",
|
||||
FORMAT_UNKNOWN_ERROR : "Unknown error in syntax/format ",
|
||||
|
||||
RESOURCE_NOT_AVAILABLE : "Not available",
|
||||
RESOURCE_ALLOCATE_FAILURE : "Allocate failure",
|
||||
RESOURCE_FULL : "Full",
|
||||
RESOURCE_OVERFLOW : "Overflow",
|
||||
RESOURCE_UNDERRUN : "Underrun",
|
||||
RESOURCE_UNKNOWN_ERROR : "Unkown error",
|
||||
|
||||
ATTRIBUTE_NOT_AVAILABLE : "Not available",
|
||||
ATTRIBUTE_GET_FAILURE : "Failed to retrieve",
|
||||
ATTRIBUTE_SET_FAILURE : "Failed to set",
|
||||
ATTRIBUTE_UPDATE_FAILURE: "Failed to update",
|
||||
ATTRIBUTE_ACCESS_DENIED : "Access denied",
|
||||
ATTRIBUTE_UNKNOWN_ERROR : "Unknown error when accessing",
|
||||
|
||||
COMMAND_FAILURE : "Failed to execute command",
|
||||
|
||||
IO_NOT_READY : "Not ready",
|
||||
IO_BUSY : "Busy",
|
||||
IO_TIMEOUT : "Timeout",
|
||||
IO_UNKNOWN_ERROR : "Unknown error in IO operation",
|
||||
|
||||
UNKNOWN_ERROR : "Unknown error",
|
||||
}
|
||||
|
||||
## Exception indicating a fatal error
|
||||
class FatalError(Exception):
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
pass
|
401
BaseTools/Source/Python/Common/DataType.py
Normal file
401
BaseTools/Source/Python/Common/DataType.py
Normal file
@@ -0,0 +1,401 @@
|
||||
## @file
|
||||
# This file is used to define common static strings used by INF/DEC/DSC files
|
||||
#
|
||||
# Copyright (c) 2007 ~ 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
##
|
||||
# Common Definitions
|
||||
#
|
||||
TAB_SPLIT = '.'
|
||||
TAB_COMMENT_R8_START = '/*'
|
||||
TAB_COMMENT_R8_END = '*/'
|
||||
TAB_COMMENT_R8_SPLIT = '//'
|
||||
TAB_COMMENT_SPLIT = '#'
|
||||
TAB_EQUAL_SPLIT = '='
|
||||
TAB_VALUE_SPLIT = '|'
|
||||
TAB_COMMA_SPLIT = ','
|
||||
TAB_SPACE_SPLIT = ' '
|
||||
TAB_SECTION_START = '['
|
||||
TAB_SECTION_END = ']'
|
||||
TAB_OPTION_START = '<'
|
||||
TAB_OPTION_END = '>'
|
||||
TAB_SLASH = '\\'
|
||||
TAB_BACK_SLASH = '/'
|
||||
|
||||
TAB_EDK_SOURCE = '$(EDK_SOURCE)'
|
||||
TAB_EFI_SOURCE = '$(EFI_SOURCE)'
|
||||
TAB_WORKSPACE = '$(WORKSPACE)'
|
||||
|
||||
TAB_ARCH_NULL = ''
|
||||
TAB_ARCH_COMMON = 'COMMON'
|
||||
TAB_ARCH_IA32 = 'IA32'
|
||||
TAB_ARCH_X64 = 'X64'
|
||||
TAB_ARCH_IPF = 'IPF'
|
||||
TAB_ARCH_ARM = 'ARM'
|
||||
TAB_ARCH_EBC = 'EBC'
|
||||
|
||||
ARCH_LIST = [TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_IPF, TAB_ARCH_ARM, TAB_ARCH_EBC]
|
||||
ARCH_LIST_FULL = [TAB_ARCH_COMMON] + ARCH_LIST
|
||||
|
||||
SUP_MODULE_BASE = 'BASE'
|
||||
SUP_MODULE_SEC = 'SEC'
|
||||
SUP_MODULE_PEI_CORE = 'PEI_CORE'
|
||||
SUP_MODULE_PEIM = 'PEIM'
|
||||
SUP_MODULE_DXE_CORE = 'DXE_CORE'
|
||||
SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
|
||||
SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
|
||||
SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
|
||||
SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
|
||||
SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
|
||||
SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
|
||||
SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
|
||||
SUP_MODULE_SMM_DRIVER = 'SMM_DRIVER'
|
||||
SUP_MODULE_SMM_CORE = 'SMM_CORE'
|
||||
|
||||
SUP_MODULE_LIST = [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
|
||||
SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
|
||||
SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_SMM_DRIVER, SUP_MODULE_SMM_CORE]
|
||||
SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(l for l in SUP_MODULE_LIST)
|
||||
|
||||
EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
|
||||
EDK_COMPONENT_TYPE_SECUARITY_CORE = 'SECUARITY_CORE'
|
||||
EDK_COMPONENT_TYPE_PEI_CORE = 'PEI_CORE'
|
||||
EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
|
||||
EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
|
||||
EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
|
||||
EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
|
||||
EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
|
||||
EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
|
||||
EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
|
||||
|
||||
BINARY_FILE_TYPE_FW = 'FW'
|
||||
BINARY_FILE_TYPE_GUID = 'GUID'
|
||||
BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
|
||||
BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
|
||||
BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
|
||||
BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
|
||||
BINARY_FILE_TYPE_LIB = 'LIB'
|
||||
BINARY_FILE_TYPE_PE32 = 'PE32'
|
||||
BINARY_FILE_TYPE_PIC = 'PIC'
|
||||
BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
|
||||
BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
|
||||
BINARY_FILE_TYPE_TE = 'TE'
|
||||
BINARY_FILE_TYPE_VER = 'VER'
|
||||
BINARY_FILE_TYPE_UI = 'UI'
|
||||
BINARY_FILE_TYPE_BIN = 'BIN'
|
||||
BINARY_FILE_TYPE_FV = 'FV'
|
||||
|
||||
PLATFORM_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
|
||||
PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
|
||||
PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
|
||||
|
||||
TAB_LIBRARIES = 'Libraries'
|
||||
|
||||
TAB_SOURCES = 'Sources'
|
||||
TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_SOURCES_IPF = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_BINARIES = 'Binaries'
|
||||
TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_BINARIES_IPF = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_INCLUDES = 'Includes'
|
||||
TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_INCLUDES_IPF = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_GUIDS = 'Guids'
|
||||
TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_GUIDS_IPF = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PROTOCOLS = 'Protocols'
|
||||
TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PROTOCOLS_IPF = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PPIS = 'Ppis'
|
||||
TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PPIS_IPF = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_LIBRARY_CLASSES = 'LibraryClasses'
|
||||
TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_LIBRARY_CLASSES_IPF = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PACKAGES = 'Packages'
|
||||
TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PACKAGES_IPF = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCDS = 'Pcds'
|
||||
TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
|
||||
TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
|
||||
TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
|
||||
TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
|
||||
TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
|
||||
TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
|
||||
TAB_PCDS_DYNAMIC = 'Dynamic'
|
||||
TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
|
||||
TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
|
||||
TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
|
||||
|
||||
PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
|
||||
PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
|
||||
|
||||
## Dynamic-ex PCD types
|
||||
gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
|
||||
|
||||
TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
|
||||
TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PCDS_FIXED_AT_BUILD_IPF = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_IPF = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
|
||||
TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PCDS_FEATURE_FLAG_IPF = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
|
||||
TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
|
||||
TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
|
||||
TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
|
||||
TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PCDS_DYNAMIC_EX_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
|
||||
TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
|
||||
TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
|
||||
TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
|
||||
TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_PCDS_DYNAMIC_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, TAB_PCDS_DYNAMIC_HII_NULL]
|
||||
TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL]
|
||||
|
||||
TAB_DEPEX = 'Depex'
|
||||
TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_DEPEX_IPF = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_SKUIDS = 'SkuIds'
|
||||
|
||||
TAB_LIBRARIES = 'Libraries'
|
||||
TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_LIBRARIES_IPF = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_COMPONENTS = 'Components'
|
||||
TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
|
||||
TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
|
||||
TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
|
||||
TAB_COMPONENTS_IPF = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IPF
|
||||
TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
|
||||
TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
|
||||
|
||||
TAB_COMPONENTS_SOURCE_OVERRIDE_PATH = 'SOURCE_OVERRIDE_PATH'
|
||||
|
||||
TAB_BUILD_OPTIONS = 'BuildOptions'
|
||||
|
||||
TAB_DEFINE = 'DEFINE'
|
||||
TAB_NMAKE = 'Nmake'
|
||||
TAB_USER_EXTENSIONS = 'UserExtensions'
|
||||
TAB_INCLUDE = '!include'
|
||||
|
||||
#
|
||||
# Common Define
|
||||
#
|
||||
TAB_COMMON_DEFINES = 'Defines'
|
||||
|
||||
#
|
||||
# Inf Definitions
|
||||
#
|
||||
TAB_INF_DEFINES = TAB_COMMON_DEFINES
|
||||
TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
|
||||
TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
|
||||
TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
|
||||
TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
|
||||
TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
|
||||
TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
|
||||
TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
|
||||
TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
|
||||
TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
|
||||
TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
|
||||
TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
|
||||
TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
|
||||
TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
|
||||
TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
|
||||
TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
|
||||
TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
|
||||
TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
|
||||
TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
|
||||
TAB_INF_DEFINES_VERSION = 'VERSION' # for R8 inf, the same as VERSION_NUMBER
|
||||
TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
|
||||
TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
|
||||
TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H = 'TIANO_R8_FLASHMAP_H'
|
||||
TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
|
||||
TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
|
||||
TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
|
||||
TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
|
||||
TAB_INF_DEFINES_DEFINE = 'DEFINE'
|
||||
TAB_INF_DEFINES_SPEC = 'SPEC'
|
||||
TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
|
||||
TAB_INF_DEFINES_MACRO = '__MACROS__'
|
||||
TAB_INF_DEFINES_SHADOW = 'SHADOW'
|
||||
TAB_INF_FIXED_PCD = 'FixedPcd'
|
||||
TAB_INF_FEATURE_PCD = 'FeaturePcd'
|
||||
TAB_INF_PATCH_PCD = 'PatchPcd'
|
||||
TAB_INF_PCD = 'Pcd'
|
||||
TAB_INF_PCD_EX = 'PcdEx'
|
||||
|
||||
#
|
||||
# Dec Definitions
|
||||
#
|
||||
TAB_DEC_DEFINES = TAB_COMMON_DEFINES
|
||||
TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
|
||||
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
|
||||
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
|
||||
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
|
||||
|
||||
#
|
||||
# Dsc Definitions
|
||||
#
|
||||
TAB_DSC_DEFINES = TAB_COMMON_DEFINES
|
||||
TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
|
||||
TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
|
||||
TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
|
||||
TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
|
||||
TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
|
||||
TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
|
||||
TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
|
||||
TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
|
||||
TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
|
||||
TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
|
||||
TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
|
||||
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
|
||||
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
|
||||
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
|
||||
|
||||
#
|
||||
# TargetTxt Definitions
|
||||
#
|
||||
TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
|
||||
TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
|
||||
TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
|
||||
TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
|
||||
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
|
||||
TAB_TAT_DEFINES_TARGET = 'TARGET'
|
||||
TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
|
||||
TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
|
||||
TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
|
||||
|
||||
#
|
||||
# ToolDef Definitions
|
||||
#
|
||||
TAB_TOD_DEFINES_TARGET = 'TARGET'
|
||||
TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
|
||||
TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
|
||||
TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
|
||||
TAB_TOD_DEFINES_FAMILY = 'FAMILY'
|
||||
TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
|
||||
|
||||
#
|
||||
# Conditional Statements
|
||||
#
|
||||
TAB_IF = '!if'
|
||||
TAB_END_IF = '!endif'
|
||||
TAB_ELSE_IF = '!elseif'
|
||||
TAB_ELSE = '!else'
|
||||
TAB_IF_DEF = '!ifdef'
|
||||
TAB_IF_N_DEF = '!ifndef'
|
||||
TAB_IF_EXIST = '!if exist'
|
||||
|
||||
#
|
||||
# Unknown section
|
||||
#
|
||||
TAB_UNKNOWN = 'UNKNOWN'
|
||||
|
||||
#
|
||||
# Build database path
|
||||
#
|
||||
DATABASE_PATH = ":memory:" #"BuildDatabase.db"
|
||||
|
||||
# used by ECC
|
||||
MODIFIER_LIST = ['IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', 'EFI_BOOTSERVICE', 'EFIAPI']
|
||||
|
||||
# Dependency Expression
|
||||
DEPEX_SUPPORTED_OPCODE = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR", "TRUE", "FALSE", '(', ')']
|
||||
|
||||
TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
|
||||
TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
|
||||
TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
|
||||
TAB_C_CODE_FILE = "C-CODE-FILE"
|
||||
TAB_C_HEADER_FILE = "C-HEADER-FILE"
|
||||
TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
|
||||
TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
|
||||
TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
|
||||
TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
|
||||
|
120
BaseTools/Source/Python/Common/Database.py
Normal file
120
BaseTools/Source/Python/Common/Database.py
Normal file
@@ -0,0 +1,120 @@
|
||||
## @file
|
||||
# This file is used to create a database used by ECC tool
|
||||
#
|
||||
# Copyright (c) 2007 ~ 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
import EdkLogger as EdkLogger
|
||||
from CommonDataClass.DataClass import *
|
||||
from String import *
|
||||
from DataType import *
|
||||
|
||||
from Table.TableDataModel import TableDataModel
|
||||
from Table.TableFile import TableFile
|
||||
from Table.TableInf import TableInf
|
||||
from Table.TableDec import TableDec
|
||||
from Table.TableDsc import TableDsc
|
||||
|
||||
## Database
|
||||
#
|
||||
# This class defined the build databse
|
||||
# During the phase of initialization, the database will create all tables and
|
||||
# insert all records of table DataModel
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
# @param DbPath: A string for the path of the ECC database
|
||||
#
|
||||
# @var Conn: Connection of the ECC database
|
||||
# @var Cur: Cursor of the connection
|
||||
# @var TblDataModel: Local instance for TableDataModel
|
||||
#
|
||||
class Database(object):
|
||||
def __init__(self, DbPath):
|
||||
if os.path.exists(DbPath):
|
||||
os.remove(DbPath)
|
||||
self.Conn = sqlite3.connect(DbPath, isolation_level = 'DEFERRED')
|
||||
self.Conn.execute("PRAGMA page_size=8192")
|
||||
self.Conn.execute("PRAGMA synchronous=OFF")
|
||||
self.Cur = self.Conn.cursor()
|
||||
self.TblDataModel = TableDataModel(self.Cur)
|
||||
self.TblFile = TableFile(self.Cur)
|
||||
self.TblInf = TableInf(self.Cur)
|
||||
self.TblDec = TableDec(self.Cur)
|
||||
self.TblDsc = TableDsc(self.Cur)
|
||||
|
||||
## Initialize build database
|
||||
#
|
||||
# 1. Delete all old existing tables
|
||||
# 2. Create new tables
|
||||
# 3. Initialize table DataModel
|
||||
#
|
||||
def InitDatabase(self):
|
||||
EdkLogger.verbose("\nInitialize ECC database started ...")
|
||||
#
|
||||
# Drop all old existing tables
|
||||
#
|
||||
# self.TblDataModel.Drop()
|
||||
# self.TblDsc.Drop()
|
||||
# self.TblFile.Drop()
|
||||
|
||||
#
|
||||
# Create new tables
|
||||
#
|
||||
self.TblDataModel.Create()
|
||||
self.TblFile.Create()
|
||||
self.TblInf.Create()
|
||||
self.TblDec.Create()
|
||||
self.TblDsc.Create()
|
||||
|
||||
#
|
||||
# Initialize table DataModel
|
||||
#
|
||||
self.TblDataModel.InitTable()
|
||||
EdkLogger.verbose("Initialize ECC database ... DONE!")
|
||||
|
||||
## Query a table
|
||||
#
|
||||
# @param Table: The instance of the table to be queried
|
||||
#
|
||||
def QueryTable(self, Table):
|
||||
Table.Query()
|
||||
|
||||
## Close entire database
|
||||
#
|
||||
# Commit all first
|
||||
# Close the connection and cursor
|
||||
#
|
||||
def Close(self):
|
||||
self.Conn.commit()
|
||||
self.Cur.close()
|
||||
self.Conn.close()
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
|
||||
|
||||
Db = Database(DATABASE_PATH)
|
||||
Db.InitDatabase()
|
||||
Db.QueryTable(Db.TblDataModel)
|
||||
Db.QueryTable(Db.TblFile)
|
||||
Db.QueryTable(Db.TblDsc)
|
||||
Db.Close()
|
||||
|
563
BaseTools/Source/Python/Common/DecClassObject.py
Normal file
563
BaseTools/Source/Python/Common/DecClassObject.py
Normal file
@@ -0,0 +1,563 @@
|
||||
## @file
|
||||
# This file is used to define each component of DEC file
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
from String import *
|
||||
from DataType import *
|
||||
from Identification import *
|
||||
from Dictionary import *
|
||||
from CommonDataClass.PackageClass import *
|
||||
from CommonDataClass.CommonClass import PcdClass
|
||||
from BuildToolError import *
|
||||
from Table.TableDec import TableDec
|
||||
import Database
|
||||
from Parsing import *
|
||||
import GlobalData
|
||||
|
||||
#
|
||||
# Global variable
|
||||
#
|
||||
Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
|
||||
TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
|
||||
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
|
||||
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
|
||||
TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
|
||||
TAB_GUIDS.upper() : MODEL_EFI_GUID,
|
||||
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
|
||||
TAB_PPIS.upper() : MODEL_EFI_PPI,
|
||||
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
|
||||
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
|
||||
TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
|
||||
TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
|
||||
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
|
||||
}
|
||||
|
||||
|
||||
## DecObject
|
||||
#
|
||||
# This class defined basic Dec object which is used by inheriting
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
#
|
||||
class DecObject(object):
|
||||
def __init__(self):
|
||||
object.__init__()
|
||||
|
||||
## Dec
|
||||
#
|
||||
# This class defined the structure used in Dec object
|
||||
#
|
||||
# @param DecObject: Inherited from DecObject class
|
||||
# @param Filename: Input value for Filename of Dec file, default is None
|
||||
# @param IsMergeAllArches: Input value for IsMergeAllArches
|
||||
# True is to merge all arches
|
||||
# Fales is not to merge all arches
|
||||
# default is False
|
||||
# @param IsToPackage: Input value for IsToPackage
|
||||
# True is to transfer to PackageObject automatically
|
||||
# False is not to transfer to PackageObject automatically
|
||||
# default is False
|
||||
# @param WorkspaceDir: Input value for current workspace directory, default is None
|
||||
#
|
||||
# @var Identification: To store value for Identification, it is a structure as Identification
|
||||
# @var Defines: To store value for Defines, it is a structure as DecDefines
|
||||
# @var UserExtensions: To store value for UserExtensions
|
||||
# @var Package: To store value for Package, it is a structure as PackageClass
|
||||
# @var WorkspaceDir: To store value for WorkspaceDir
|
||||
# @var Contents: To store value for Contents, it is a structure as DecContents
|
||||
# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
|
||||
#
|
||||
class Dec(DecObject):
|
||||
def __init__(self, Filename = None, IsToDatabase = False, IsToPackage = False, WorkspaceDir = None, Database = None, SupArchList = DataType.ARCH_LIST):
|
||||
self.Identification = Identification()
|
||||
self.Package = PackageClass()
|
||||
self.UserExtensions = ''
|
||||
self.WorkspaceDir = WorkspaceDir
|
||||
self.SupArchList = SupArchList
|
||||
self.IsToDatabase = IsToDatabase
|
||||
|
||||
self.Cur = Database.Cur
|
||||
self.TblFile = Database.TblFile
|
||||
self.TblDec = Database.TblDec
|
||||
self.FileID = -1
|
||||
|
||||
self.KeyList = [
|
||||
TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
|
||||
TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
|
||||
TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
|
||||
]
|
||||
#
|
||||
# Upper all KEYs to ignore case sensitive when parsing
|
||||
#
|
||||
self.KeyList = map(lambda c: c.upper(), self.KeyList)
|
||||
|
||||
#
|
||||
# Init RecordSet
|
||||
#
|
||||
self.RecordSet = {}
|
||||
for Key in self.KeyList:
|
||||
self.RecordSet[Section[Key]] = []
|
||||
|
||||
#
|
||||
# Load Dec file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
self.LoadDecFile(Filename)
|
||||
|
||||
#
|
||||
# Transfer to Package Object if IsToPackage is True
|
||||
#
|
||||
if IsToPackage:
|
||||
self.DecToPackage()
|
||||
|
||||
## Load Dec file
|
||||
#
|
||||
# Load the file if it exists
|
||||
#
|
||||
# @param Filename: Input value for filename of Dec file
|
||||
#
|
||||
def LoadDecFile(self, Filename):
|
||||
#
|
||||
# Insert a record for file
|
||||
#
|
||||
Filename = NormPath(Filename)
|
||||
self.Identification.FileFullPath = Filename
|
||||
(self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
|
||||
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
|
||||
|
||||
#
|
||||
# Init DecTable
|
||||
#
|
||||
#self.TblDec.Table = "Dec%s" % self.FileID
|
||||
#self.TblDec.Create()
|
||||
|
||||
#
|
||||
# Init common datas
|
||||
#
|
||||
IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
|
||||
[], [], TAB_UNKNOWN, [], [], []
|
||||
LineNo = 0
|
||||
|
||||
#
|
||||
# Parse file content
|
||||
#
|
||||
IsFindBlockComment = False
|
||||
ReservedLine = ''
|
||||
for Line in open(Filename, 'r'):
|
||||
LineNo = LineNo + 1
|
||||
#
|
||||
# Remove comment block
|
||||
#
|
||||
if Line.find(TAB_COMMENT_R8_START) > -1:
|
||||
ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
|
||||
IsFindBlockComment = True
|
||||
if Line.find(TAB_COMMENT_R8_END) > -1:
|
||||
Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
|
||||
ReservedLine = ''
|
||||
IsFindBlockComment = False
|
||||
if IsFindBlockComment:
|
||||
continue
|
||||
|
||||
#
|
||||
# Remove comments at tail and remove spaces again
|
||||
#
|
||||
Line = CleanString(Line)
|
||||
if Line == '':
|
||||
continue
|
||||
|
||||
#
|
||||
# Find a new section tab
|
||||
# First insert previous section items
|
||||
# And then parse the content of the new section
|
||||
#
|
||||
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
|
||||
#
|
||||
# Insert items data of previous section
|
||||
#
|
||||
Model = Section[CurrentSection.upper()]
|
||||
InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
|
||||
|
||||
#
|
||||
# Parse the new section
|
||||
#
|
||||
SectionItemList = []
|
||||
ArchList = []
|
||||
ThirdList = []
|
||||
|
||||
CurrentSection = ''
|
||||
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
|
||||
for Item in LineList:
|
||||
ItemList = GetSplitValueList(Item, TAB_SPLIT)
|
||||
if CurrentSection == '':
|
||||
CurrentSection = ItemList[0]
|
||||
else:
|
||||
if CurrentSection != ItemList[0]:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
if CurrentSection.upper() not in self.KeyList:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
ItemList.append('')
|
||||
ItemList.append('')
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
else:
|
||||
if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
ArchList.append(ItemList[1].upper())
|
||||
ThirdList.append(ItemList[2])
|
||||
|
||||
continue
|
||||
|
||||
#
|
||||
# Not in any defined section
|
||||
#
|
||||
if CurrentSection == TAB_UNKNOWN:
|
||||
ErrorMsg = "%s is not in any defined section" % Line
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
#
|
||||
# Add a section item
|
||||
#
|
||||
SectionItemList.append([Line, LineNo])
|
||||
# End of parse
|
||||
#End of For
|
||||
|
||||
#
|
||||
# Insert items data of last section
|
||||
#
|
||||
Model = Section[CurrentSection.upper()]
|
||||
InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
|
||||
|
||||
#
|
||||
# Replace all DEFINE macros with its actual values
|
||||
#
|
||||
ParseDefineMacro2(self.TblDec, self.RecordSet, GlobalData.gGlobalDefines)
|
||||
|
||||
## Transfer to Package Object
|
||||
#
|
||||
# Transfer all contents of a Dec file to a standard Package Object
|
||||
#
|
||||
def DecToPackage(self):
|
||||
#
|
||||
# Init global information for the file
|
||||
#
|
||||
ContainerFile = self.Identification.FileFullPath
|
||||
|
||||
#
|
||||
# Generate Package Header
|
||||
#
|
||||
self.GenPackageHeader(ContainerFile)
|
||||
|
||||
#
|
||||
# Generate Includes
|
||||
#
|
||||
self.GenIncludes(ContainerFile)
|
||||
|
||||
#
|
||||
# Generate Guids
|
||||
#
|
||||
self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
|
||||
|
||||
#
|
||||
# Generate Protocols
|
||||
#
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
|
||||
|
||||
#
|
||||
# Generate Ppis
|
||||
#
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
|
||||
|
||||
#
|
||||
# Generate LibraryClasses
|
||||
#
|
||||
self.GenLibraryClasses(ContainerFile)
|
||||
|
||||
#
|
||||
# Generate Pcds
|
||||
#
|
||||
self.GenPcds(ContainerFile)
|
||||
|
||||
## Get Package Header
|
||||
#
|
||||
# Gen Package Header of Dec as <Key> = <Value>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenPackageHeader(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate PackageHeader ...")
|
||||
#
|
||||
# Update all defines item in database
|
||||
#
|
||||
RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
|
||||
for Record in RecordSet:
|
||||
ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
|
||||
if len(ValueList) != 2:
|
||||
RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2])
|
||||
ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2]
|
||||
SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
|
||||
where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
|
||||
self.TblDec.Exec(SqlCommand)
|
||||
|
||||
#
|
||||
# Get detailed information
|
||||
#
|
||||
for Arch in self.SupArchList:
|
||||
PackageHeader = PackageHeaderClass()
|
||||
|
||||
PackageHeader.Name = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_NAME, Arch, self.FileID)[0]
|
||||
PackageHeader.Guid = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_GUID, Arch, self.FileID)[0]
|
||||
PackageHeader.Version = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_VERSION, Arch, self.FileID)[0]
|
||||
PackageHeader.FileName = self.Identification.FileName
|
||||
PackageHeader.FullPath = self.Identification.FileFullPath
|
||||
PackageHeader.DecSpecification = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_DEC_SPECIFICATION, Arch, self.FileID)[0]
|
||||
|
||||
self.Package.Header[Arch] = PackageHeader
|
||||
|
||||
## GenIncludes
|
||||
#
|
||||
# Gen Includes of Dec
|
||||
#
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenIncludes(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
|
||||
Includes = {}
|
||||
#
|
||||
# Get all Includes
|
||||
#
|
||||
RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
|
||||
|
||||
#
|
||||
# Go through each arch
|
||||
#
|
||||
for Arch in self.SupArchList:
|
||||
for Record in RecordSet:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
MergeArches(Includes, Record[0], Arch)
|
||||
|
||||
for Key in Includes.keys():
|
||||
Include = IncludeClass()
|
||||
Include.FilePath = NormPath(Key)
|
||||
Include.SupArchList = Includes[Key]
|
||||
self.Package.Includes.append(Include)
|
||||
|
||||
## GenPpis
|
||||
#
|
||||
# Gen Ppis of Dec
|
||||
# <CName>=<GuidValue>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenGuidProtocolPpis(self, Type, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % Type)
|
||||
Lists = {}
|
||||
#
|
||||
# Get all Items
|
||||
#
|
||||
RecordSet = self.RecordSet[Section[Type.upper()]]
|
||||
|
||||
#
|
||||
# Go through each arch
|
||||
#
|
||||
for Arch in self.SupArchList:
|
||||
for Record in RecordSet:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
|
||||
MergeArches(Lists, (Name, Value), Arch)
|
||||
if self.IsToDatabase:
|
||||
SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
|
||||
where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3])
|
||||
self.TblDec.Exec(SqlCommand)
|
||||
|
||||
ListMember = None
|
||||
if Type == TAB_GUIDS:
|
||||
ListMember = self.Package.GuidDeclarations
|
||||
elif Type == TAB_PROTOCOLS:
|
||||
ListMember = self.Package.ProtocolDeclarations
|
||||
elif Type == TAB_PPIS:
|
||||
ListMember = self.Package.PpiDeclarations
|
||||
|
||||
for Key in Lists.keys():
|
||||
ListClass = GuidProtocolPpiCommonClass()
|
||||
ListClass.CName = Key[0]
|
||||
ListClass.Guid = Key[1]
|
||||
ListClass.SupArchList = Lists[Key]
|
||||
ListMember.append(ListClass)
|
||||
|
||||
|
||||
## GenLibraryClasses
|
||||
#
|
||||
# Gen LibraryClasses of Dec
|
||||
# <CName>=<GuidValue>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenLibraryClasses(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
|
||||
LibraryClasses = {}
|
||||
#
|
||||
# Get all Guids
|
||||
#
|
||||
RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
|
||||
|
||||
#
|
||||
# Go through each arch
|
||||
#
|
||||
for Arch in self.SupArchList:
|
||||
for Record in RecordSet:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
|
||||
if len(List) != 2:
|
||||
RaiseParserError(Record[0], 'LibraryClasses', ContainerFile, '<LibraryClassName>|<LibraryClassInstanceFilename>', Record[2])
|
||||
else:
|
||||
CheckFileExist(self.Identification.FileRelativePath, List[1], ContainerFile, 'LibraryClasses', Record[0])
|
||||
MergeArches(LibraryClasses, (List[0], List[1]), Arch)
|
||||
if self.IsToDatabase:
|
||||
SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
|
||||
where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(List[0]), ConvertToSqlString2(List[1]), SUP_MODULE_LIST_STRING, Record[3])
|
||||
self.TblDec.Exec(SqlCommand)
|
||||
|
||||
|
||||
for Key in LibraryClasses.keys():
|
||||
LibraryClass = LibraryClassClass()
|
||||
LibraryClass.LibraryClass = Key[0]
|
||||
LibraryClass.RecommendedInstance = NormPath(Key[1])
|
||||
LibraryClass.SupModuleList = SUP_MODULE_LIST
|
||||
LibraryClass.SupArchList = LibraryClasses[Key]
|
||||
self.Package.LibraryClassDeclarations.append(LibraryClass)
|
||||
|
||||
## GenPcds
|
||||
#
|
||||
# Gen Pcds of Dec
|
||||
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenPcds(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
|
||||
Pcds = {}
|
||||
PcdToken = {}
|
||||
#
|
||||
# Get all Guids
|
||||
#
|
||||
RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
|
||||
RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
|
||||
RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
|
||||
RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
|
||||
RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
|
||||
|
||||
#
|
||||
# Go through each arch
|
||||
#
|
||||
for Arch in self.SupArchList:
|
||||
for Record in RecordSet1:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
|
||||
MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
|
||||
PcdToken[Record[3]] = (TokenGuidCName, TokenName)
|
||||
for Record in RecordSet2:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
|
||||
MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
|
||||
PcdToken[Record[3]] = (TokenGuidCName, TokenName)
|
||||
for Record in RecordSet3:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
|
||||
MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
|
||||
PcdToken[Record[3]] = (TokenGuidCName, TokenName)
|
||||
for Record in RecordSet4:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
|
||||
MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
|
||||
PcdToken[Record[3]] = (TokenGuidCName, TokenName)
|
||||
for Record in RecordSet5:
|
||||
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
|
||||
(TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
|
||||
MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
|
||||
PcdToken[Record[3]] = (TokenGuidCName, TokenName)
|
||||
#
|
||||
# Update to database
|
||||
#
|
||||
if self.IsToDatabase:
|
||||
for Key in PcdToken.keys():
|
||||
SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblDec.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key)
|
||||
self.TblDec.Exec(SqlCommand)
|
||||
|
||||
for Key in Pcds.keys():
|
||||
Pcd = PcdClass()
|
||||
Pcd.CName = Key[1]
|
||||
Pcd.Token = Key[4]
|
||||
Pcd.TokenSpaceGuidCName = Key[0]
|
||||
Pcd.DatumType = Key[3]
|
||||
Pcd.DefaultValue = Key[2]
|
||||
Pcd.ItemType = Key[5]
|
||||
Pcd.SupArchList = Pcds[Key]
|
||||
self.Package.PcdDeclarations.append(Pcd)
|
||||
|
||||
## Show detailed information of Package
|
||||
#
|
||||
# Print all members and their values of Package class
|
||||
#
|
||||
def ShowPackage(self):
|
||||
M = self.Package
|
||||
for Arch in M.Header.keys():
|
||||
print '\nArch =', Arch
|
||||
print 'Filename =', M.Header[Arch].FileName
|
||||
print 'FullPath =', M.Header[Arch].FullPath
|
||||
print 'BaseName =', M.Header[Arch].Name
|
||||
print 'Guid =', M.Header[Arch].Guid
|
||||
print 'Version =', M.Header[Arch].Version
|
||||
print 'DecSpecification =', M.Header[Arch].DecSpecification
|
||||
print '\nIncludes =', M.Includes
|
||||
for Item in M.Includes:
|
||||
print Item.FilePath, Item.SupArchList
|
||||
print '\nGuids =', M.GuidDeclarations
|
||||
for Item in M.GuidDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nProtocols =', M.ProtocolDeclarations
|
||||
for Item in M.ProtocolDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nPpis =', M.PpiDeclarations
|
||||
for Item in M.PpiDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nLibraryClasses =', M.LibraryClassDeclarations
|
||||
for Item in M.LibraryClassDeclarations:
|
||||
print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
|
||||
print '\nPcds =', M.PcdDeclarations
|
||||
for Item in M.PcdDeclarations:
|
||||
print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
|
||||
|
||||
W = os.getenv('WORKSPACE')
|
||||
F = os.path.join(W, 'Nt32Pkg/Nt32Pkg.dec')
|
||||
|
||||
Db = Database.Database('Dec.db')
|
||||
Db.InitDatabase()
|
||||
|
||||
P = Dec(os.path.normpath(F), True, True, W, Db)
|
||||
P.ShowPackage()
|
||||
|
||||
Db.Close()
|
580
BaseTools/Source/Python/Common/DecClassObjectLight.py
Normal file
580
BaseTools/Source/Python/Common/DecClassObjectLight.py
Normal file
@@ -0,0 +1,580 @@
|
||||
## @file
|
||||
# This file is used to define each component of DEC file in light mode
|
||||
#
|
||||
# Copyright (c) 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
from Misc import GetFiles
|
||||
from String import *
|
||||
from DataType import *
|
||||
from CommonDataClass.PackageClass import *
|
||||
from CommonDataClass import CommonClass
|
||||
from BuildToolError import *
|
||||
from Parsing import *
|
||||
|
||||
# Global variable
|
||||
Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
|
||||
TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
|
||||
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
|
||||
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
|
||||
TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
|
||||
TAB_GUIDS.upper() : MODEL_EFI_GUID,
|
||||
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
|
||||
TAB_PPIS.upper() : MODEL_EFI_PPI,
|
||||
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
|
||||
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
|
||||
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
|
||||
TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
|
||||
TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
|
||||
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
|
||||
}
|
||||
|
||||
## DecObject
|
||||
#
|
||||
# This class defined basic Dec object which is used by inheriting
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
#
|
||||
class DecObject(object):
|
||||
def __init__(self):
|
||||
object.__init__()
|
||||
|
||||
## Dec
|
||||
#
|
||||
# This class defined the structure used in Dec object
|
||||
#
|
||||
# @param DecObject: Inherited from DecObject class
|
||||
# @param Filename: Input value for Filename of Dec file, default is None
|
||||
# @param IsMergeAllArches: Input value for IsMergeAllArches
|
||||
# True is to merge all arches
|
||||
# Fales is not to merge all arches
|
||||
# default is False
|
||||
# @param IsToPackage: Input value for IsToPackage
|
||||
# True is to transfer to PackageObject automatically
|
||||
# False is not to transfer to PackageObject automatically
|
||||
# default is False
|
||||
# @param WorkspaceDir: Input value for current workspace directory, default is None
|
||||
#
|
||||
# @var Identification: To store value for Identification, it is a structure as Identification
|
||||
# @var Defines: To store value for Defines, it is a structure as DecDefines
|
||||
# @var UserExtensions: To store value for UserExtensions
|
||||
# @var Package: To store value for Package, it is a structure as PackageClass
|
||||
# @var WorkspaceDir: To store value for WorkspaceDir
|
||||
# @var Contents: To store value for Contents, it is a structure as DecContents
|
||||
# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
|
||||
#
|
||||
class Dec(DecObject):
|
||||
def __init__(self, Filename = None, IsToPackage = False, WorkspaceDir = None, AllGuidVersionDict = None, SupArchList = DataType.ARCH_LIST):
|
||||
self.Identification = IdentificationClass()
|
||||
self.Package = PackageClass()
|
||||
self.UserExtensions = ''
|
||||
self.WorkspaceDir = WorkspaceDir
|
||||
self.SupArchList = SupArchList
|
||||
self.AllGuidVersionDict = {}
|
||||
if AllGuidVersionDict:
|
||||
self.AllGuidVersionDict = AllGuidVersionDict
|
||||
|
||||
self.KeyList = [
|
||||
TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
|
||||
TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
|
||||
TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
|
||||
]
|
||||
# Upper all KEYs to ignore case sensitive when parsing
|
||||
self.KeyList = map(lambda c: c.upper(), self.KeyList)
|
||||
|
||||
# Init RecordSet
|
||||
self.RecordSet = {}
|
||||
for Key in self.KeyList:
|
||||
self.RecordSet[Section[Key]] = []
|
||||
|
||||
# Init Comment
|
||||
self.SectionHeaderCommentDict = {}
|
||||
|
||||
# Load Dec file if filename is not None
|
||||
if Filename != None:
|
||||
self.LoadDecFile(Filename)
|
||||
|
||||
# Transfer to Package Object if IsToPackage is True
|
||||
if IsToPackage:
|
||||
self.DecToPackage()
|
||||
|
||||
## Load Dec file
|
||||
#
|
||||
# Load the file if it exists
|
||||
#
|
||||
# @param Filename: Input value for filename of Dec file
|
||||
#
|
||||
def LoadDecFile(self, Filename):
|
||||
# Insert a record for file
|
||||
Filename = NormPath(Filename)
|
||||
self.Identification.FullPath = Filename
|
||||
(self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
|
||||
if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
|
||||
self.Identification.PackagePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
|
||||
|
||||
# Init common datas
|
||||
IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
|
||||
[], [], TAB_UNKNOWN, [], [], []
|
||||
LineNo = 0
|
||||
|
||||
# Parse file content
|
||||
IsFindBlockComment = False
|
||||
ReservedLine = ''
|
||||
Comment = ''
|
||||
for Line in open(Filename, 'r'):
|
||||
LineNo = LineNo + 1
|
||||
# Remove comment block
|
||||
if Line.find(TAB_COMMENT_R8_START) > -1:
|
||||
ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
|
||||
if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
ReservedLine = ''
|
||||
else:
|
||||
Comment = Comment + Line[len(ReservedLine):] + '\n'
|
||||
IsFindBlockComment = True
|
||||
if not ReservedLine:
|
||||
continue
|
||||
if Line.find(TAB_COMMENT_R8_END) > -1:
|
||||
Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
|
||||
Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
|
||||
ReservedLine = ''
|
||||
IsFindBlockComment = False
|
||||
if IsFindBlockComment:
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
continue
|
||||
|
||||
# Remove comments at tail and remove spaces again
|
||||
if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
Line = CleanString(Line)
|
||||
if Line == '':
|
||||
continue
|
||||
|
||||
## Find a new section tab
|
||||
# First insert previous section items
|
||||
# And then parse the content of the new section
|
||||
#
|
||||
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
|
||||
# Insert items data of previous section
|
||||
Model = Section[CurrentSection.upper()]
|
||||
InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
|
||||
# Parse the new section
|
||||
SectionItemList = []
|
||||
ArchList = []
|
||||
ThirdList = []
|
||||
|
||||
CurrentSection = ''
|
||||
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
|
||||
for Item in LineList:
|
||||
ItemList = GetSplitValueList(Item, TAB_SPLIT)
|
||||
if CurrentSection == '':
|
||||
CurrentSection = ItemList[0]
|
||||
else:
|
||||
if CurrentSection != ItemList[0]:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
if CurrentSection.upper() not in self.KeyList:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
ItemList.append('')
|
||||
ItemList.append('')
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
else:
|
||||
if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
ArchList.append(ItemList[1].upper())
|
||||
ThirdList.append(ItemList[2])
|
||||
|
||||
if Comment:
|
||||
if Comment.endswith('\n'):
|
||||
Comment = Comment[:len(Comment) - len('\n')]
|
||||
self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
|
||||
Comment = ''
|
||||
continue
|
||||
|
||||
# Not in any defined section
|
||||
if CurrentSection == TAB_UNKNOWN:
|
||||
ErrorMsg = "%s is not in any defined section" % Line
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
# Add a section item
|
||||
SectionItemList.append([Line, LineNo, Comment])
|
||||
Comment = ''
|
||||
# End of parse
|
||||
#End of For
|
||||
|
||||
#
|
||||
# Insert items data of last section
|
||||
#
|
||||
Model = Section[CurrentSection.upper()]
|
||||
InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
|
||||
if Comment != '':
|
||||
self.SectionHeaderCommentDict[Model] = Comment
|
||||
Comment = ''
|
||||
|
||||
## Package Object to DEC file
|
||||
def PackageToDec(self, Package):
|
||||
Dec = ''
|
||||
DecList = sdict()
|
||||
SectionHeaderCommentDict = {}
|
||||
if Package == None:
|
||||
return Dec
|
||||
|
||||
PackageHeader = Package.PackageHeader
|
||||
TmpList = []
|
||||
if PackageHeader.Name:
|
||||
TmpList.append(TAB_DEC_DEFINES_PACKAGE_NAME + ' = ' + PackageHeader.Name)
|
||||
if PackageHeader.Guid:
|
||||
TmpList.append(TAB_DEC_DEFINES_PACKAGE_GUID + ' = ' + PackageHeader.Guid)
|
||||
if PackageHeader.Version:
|
||||
TmpList.append(TAB_DEC_DEFINES_PACKAGE_VERSION + ' = ' + PackageHeader.Version)
|
||||
if PackageHeader.DecSpecification:
|
||||
TmpList.append(TAB_DEC_DEFINES_DEC_SPECIFICATION + ' = ' + PackageHeader.DecSpecification)
|
||||
if Package.UserExtensions != None:
|
||||
for Item in Package.UserExtensions.Defines:
|
||||
TmpList.append(Item)
|
||||
DecList['Defines'] =TmpList
|
||||
if PackageHeader.Description != '':
|
||||
SectionHeaderCommentDict['Defines'] = PackageHeader.Description
|
||||
|
||||
for Item in Package.Includes:
|
||||
Key = 'Includes.' + Item.SupArchList
|
||||
Value = Item.FilePath
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
for Item in Package.GuidDeclarations:
|
||||
Key = 'Guids.' + Item.SupArchList
|
||||
Value = Item.CName + '=' + Item.Guid
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
for Item in Package.ProtocolDeclarations:
|
||||
Key = 'Protocols.' + Item.SupArchList
|
||||
Value = Item.CName + '=' + Item.Guid
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
for Item in Package.PpiDeclarations:
|
||||
Key = 'Ppis.' + Item.SupArchList
|
||||
Value = Item.CName + '=' + Item.Guid
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
for Item in Package.LibraryClassDeclarations:
|
||||
Key = 'LibraryClasses.' + Item.SupArchList
|
||||
Value = Item.LibraryClass + '|' + Item.RecommendedInstance
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
for Item in Package.PcdDeclarations:
|
||||
Key = 'Pcds' + Item.ItemType + '.' + Item.SupArchList
|
||||
Value = Item.TokenSpaceGuidCName + '.' + Item.CName
|
||||
if Item.DefaultValue != '':
|
||||
Value = Value + '|' + Item.DefaultValue
|
||||
if Item.DatumType != '':
|
||||
Value = Value + '|' + Item.DatumType
|
||||
if Item.Token != '':
|
||||
Value = Value + '|' + Item.Token
|
||||
GenMetaDatSectionItem(Key, Value, DecList)
|
||||
|
||||
# Transfer Package to Inf
|
||||
for Key in DecList:
|
||||
if Key in SectionHeaderCommentDict:
|
||||
List = SectionHeaderCommentDict[Key].split('\r')
|
||||
for Item in List:
|
||||
Dec = Dec + Item + '\n'
|
||||
Dec = Dec + '[' + Key + ']' + '\n'
|
||||
for Value in DecList[Key]:
|
||||
if type(Value) == type([]):
|
||||
for SubValue in Value:
|
||||
Dec = Dec + ' ' + SubValue + '\n'
|
||||
else:
|
||||
Dec = Dec + ' ' + Value + '\n'
|
||||
Dec = Dec + '\n'
|
||||
|
||||
return Dec
|
||||
|
||||
## Transfer to Package Object
|
||||
#
|
||||
# Transfer all contents of a Dec file to a standard Package Object
|
||||
#
|
||||
def DecToPackage(self):
|
||||
# Init global information for the file
|
||||
ContainerFile = self.Identification.FullPath
|
||||
|
||||
# Generate Package Header
|
||||
self.GenPackageHeader(ContainerFile)
|
||||
|
||||
# Generate Includes
|
||||
# Only for R8
|
||||
self.GenIncludes(ContainerFile)
|
||||
|
||||
# Generate Guids
|
||||
self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
|
||||
|
||||
# Generate Protocols
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
|
||||
|
||||
# Generate Ppis
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
|
||||
|
||||
# Generate LibraryClasses
|
||||
self.GenLibraryClasses(ContainerFile)
|
||||
|
||||
# Generate Pcds
|
||||
self.GenPcds(ContainerFile)
|
||||
|
||||
# Init MiscFiles
|
||||
self.GenMiscFiles(ContainerFile)
|
||||
|
||||
## GenMiscFiles
|
||||
#
|
||||
def GenMiscFiles(self, ContainerFile):
|
||||
MiscFiles = MiscFileClass()
|
||||
MiscFiles.Name = 'ModuleFiles'
|
||||
for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
|
||||
File = CommonClass.FileClass()
|
||||
File.Filename = Item
|
||||
MiscFiles.Files.append(File)
|
||||
self.Package.MiscFiles = MiscFiles
|
||||
|
||||
## Get Package Header
|
||||
#
|
||||
# Gen Package Header of Dec as <Key> = <Value>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenPackageHeader(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate PackageHeader ...")
|
||||
#
|
||||
# Update all defines item in database
|
||||
#
|
||||
RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
|
||||
PackageHeader = PackageHeaderClass()
|
||||
OtherDefines = []
|
||||
for Record in RecordSet:
|
||||
ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
|
||||
if len(ValueList) != 2:
|
||||
OtherDefines.append(Record[0])
|
||||
else:
|
||||
Name = ValueList[0]
|
||||
Value = ValueList[1]
|
||||
if Name == TAB_DEC_DEFINES_PACKAGE_NAME:
|
||||
PackageHeader.Name = Value
|
||||
elif Name == TAB_DEC_DEFINES_PACKAGE_GUID:
|
||||
PackageHeader.Guid = Value
|
||||
elif Name == TAB_DEC_DEFINES_PACKAGE_VERSION:
|
||||
PackageHeader.Version = Value
|
||||
elif Name == TAB_DEC_DEFINES_DEC_SPECIFICATION:
|
||||
PackageHeader.DecSpecification = Value
|
||||
else:
|
||||
OtherDefines.append(Record[0])
|
||||
|
||||
PackageHeader.FileName = self.Identification.FileName
|
||||
PackageHeader.FullPath = self.Identification.FullPath
|
||||
PackageHeader.RelaPath = self.Identification.RelaPath
|
||||
PackageHeader.PackagePath = self.Identification.PackagePath
|
||||
PackageHeader.ModulePath = self.Identification.ModulePath
|
||||
PackageHeader.CombinePath = os.path.normpath(os.path.join(PackageHeader.PackagePath, PackageHeader.ModulePath, PackageHeader.FileName))
|
||||
|
||||
if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
|
||||
PackageHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
|
||||
|
||||
self.Package.PackageHeader = PackageHeader
|
||||
UE = UserExtensionsClass()
|
||||
UE.Defines = OtherDefines
|
||||
self.Package.UserExtensions = UE
|
||||
|
||||
|
||||
## GenIncludes
|
||||
#
|
||||
# Gen Includes of Dec
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenIncludes(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
|
||||
Includes = {}
|
||||
# Get all Includes
|
||||
RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
|
||||
|
||||
# Go through each arch
|
||||
for Record in RecordSet:
|
||||
Arch = Record[1]
|
||||
Key = Record[0]
|
||||
Include = IncludeClass()
|
||||
Include.FilePath = NormPath(Key)
|
||||
Include.SupArchList = Arch
|
||||
self.Package.Includes.append(Include)
|
||||
|
||||
## GenPpis
|
||||
#
|
||||
# Gen Ppis of Dec
|
||||
# <CName>=<GuidValue>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenGuidProtocolPpis(self, Type, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % Type)
|
||||
Lists = {}
|
||||
# Get all Items
|
||||
RecordSet = self.RecordSet[Section[Type.upper()]]
|
||||
|
||||
# Go through each arch
|
||||
for Record in RecordSet:
|
||||
Arch = Record[1]
|
||||
(Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
|
||||
|
||||
ListMember = None
|
||||
if Type == TAB_GUIDS:
|
||||
ListMember = self.Package.GuidDeclarations
|
||||
elif Type == TAB_PROTOCOLS:
|
||||
ListMember = self.Package.ProtocolDeclarations
|
||||
elif Type == TAB_PPIS:
|
||||
ListMember = self.Package.PpiDeclarations
|
||||
|
||||
ListClass = GuidProtocolPpiCommonClass()
|
||||
ListClass.CName = Name
|
||||
ListClass.Guid = Value
|
||||
ListClass.SupArchList = Arch
|
||||
ListMember.append(ListClass)
|
||||
|
||||
## GenLibraryClasses
|
||||
#
|
||||
# Gen LibraryClasses of Dec
|
||||
# <CName>=<GuidValue>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenLibraryClasses(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
|
||||
LibraryClasses = {}
|
||||
# Get all Guids
|
||||
RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
|
||||
|
||||
# Go through each arch
|
||||
for Record in RecordSet:
|
||||
Arch = Record[1]
|
||||
List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
|
||||
if len(List) != 2:
|
||||
continue
|
||||
LibraryClass = LibraryClassClass()
|
||||
LibraryClass.LibraryClass = List[0]
|
||||
LibraryClass.RecommendedInstance = NormPath(List[1])
|
||||
LibraryClass.SupArchList = Arch
|
||||
self.Package.LibraryClassDeclarations.append(LibraryClass)
|
||||
|
||||
def AddPcd(self, CName, Token, TokenSpaceGuidCName, DatumType, DefaultValue, ItemType, Arch):
|
||||
Pcd = CommonClass.PcdClass()
|
||||
Pcd.CName = CName
|
||||
Pcd.Token = Token
|
||||
Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
|
||||
Pcd.DatumType = DatumType
|
||||
Pcd.DefaultValue = DefaultValue
|
||||
Pcd.ItemType = ItemType
|
||||
Pcd.SupArchList = Arch
|
||||
self.Package.PcdDeclarations.append(Pcd)
|
||||
|
||||
## GenPcds
|
||||
#
|
||||
# Gen Pcds of Dec
|
||||
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenPcds(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
|
||||
Pcds = {}
|
||||
PcdToken = {}
|
||||
# Get all Pcds
|
||||
RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
|
||||
RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
|
||||
RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
|
||||
RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
|
||||
RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
|
||||
|
||||
# Go through each pcd
|
||||
for Record in RecordSet1:
|
||||
Arch = Record[1]
|
||||
(TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
|
||||
for Record in RecordSet2:
|
||||
Arch = Record[1]
|
||||
(TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
|
||||
for Record in RecordSet3:
|
||||
Arch = Record[1]
|
||||
(TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
|
||||
for Record in RecordSet4:
|
||||
Arch = Record[1]
|
||||
(TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
|
||||
for Record in RecordSet5:
|
||||
Arch = Record[1]
|
||||
(TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
|
||||
|
||||
## Show detailed information of Package
|
||||
#
|
||||
# Print all members and their values of Package class
|
||||
#
|
||||
def ShowPackage(self):
|
||||
M = self.Package
|
||||
print 'Filename =', M.PackageHeader.FileName
|
||||
print 'FullPath =', M.PackageHeader.FullPath
|
||||
print 'RelaPath =', M.PackageHeader.RelaPath
|
||||
print 'PackagePath =', M.PackageHeader.PackagePath
|
||||
print 'ModulePath =', M.PackageHeader.ModulePath
|
||||
print 'CombinePath =', M.PackageHeader.CombinePath
|
||||
|
||||
print 'BaseName =', M.PackageHeader.Name
|
||||
print 'Guid =', M.PackageHeader.Guid
|
||||
print 'Version =', M.PackageHeader.Version
|
||||
print 'DecSpecification =', M.PackageHeader.DecSpecification
|
||||
|
||||
print '\nIncludes ='#, M.Includes
|
||||
for Item in M.Includes:
|
||||
print Item.FilePath, Item.SupArchList
|
||||
print '\nGuids ='#, M.GuidDeclarations
|
||||
for Item in M.GuidDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nProtocols ='#, M.ProtocolDeclarations
|
||||
for Item in M.ProtocolDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nPpis ='#, M.PpiDeclarations
|
||||
for Item in M.PpiDeclarations:
|
||||
print Item.CName, Item.Guid, Item.SupArchList
|
||||
print '\nLibraryClasses ='#, M.LibraryClassDeclarations
|
||||
for Item in M.LibraryClassDeclarations:
|
||||
print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
|
||||
print '\nPcds ='#, M.PcdDeclarations
|
||||
for Item in M.PcdDeclarations:
|
||||
print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
|
||||
print '\nUserExtensions =', M.UserExtensions.Defines
|
||||
print '\n*** FileList ***'
|
||||
for Item in M.MiscFiles.Files:
|
||||
print Item.Filename
|
||||
print '****************\n'
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
|
||||
W = os.getenv('WORKSPACE')
|
||||
F = os.path.join(W, 'MdeModulePkg/MdeModulePkg.dec')
|
||||
|
||||
P = Dec(os.path.normpath(F), True, W)
|
||||
P.ShowPackage()
|
||||
print P.PackageToDec(P.Package)
|
75
BaseTools/Source/Python/Common/Dictionary.py
Normal file
75
BaseTools/Source/Python/Common/Dictionary.py
Normal file
@@ -0,0 +1,75 @@
|
||||
## @file
|
||||
# Define a dictionary structure
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import EdkLogger
|
||||
from DataType import *
|
||||
|
||||
## Convert a text file to a dictionary
|
||||
#
|
||||
# Convert a text file to a dictionary of (name:value) pairs.
|
||||
#
|
||||
# @retval 0 Convert successful
|
||||
# @retval 1 Open file failed
|
||||
#
|
||||
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
try:
|
||||
F = open(FileName,'r')
|
||||
Keys = []
|
||||
for Line in F:
|
||||
if Line.startswith(CommentCharacter):
|
||||
continue
|
||||
LineList = Line.split(KeySplitCharacter,1)
|
||||
if len(LineList) >= 2:
|
||||
Key = LineList[0].split()
|
||||
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
|
||||
if ValueSplitFlag:
|
||||
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
|
||||
else:
|
||||
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
|
||||
Keys += [Key[0]]
|
||||
F.close()
|
||||
return 0
|
||||
except:
|
||||
EdkLogger.info('Open file failed')
|
||||
return 1
|
||||
|
||||
## Print the dictionary
|
||||
#
|
||||
# Print all items of dictionary one by one
|
||||
#
|
||||
# @param Dict: The dictionary to be printed
|
||||
#
|
||||
def printDict(Dict):
|
||||
if Dict != None:
|
||||
KeyList = Dict.keys()
|
||||
for Key in KeyList:
|
||||
if Dict[Key] != '':
|
||||
print Key + ' = ' + str(Dict[Key])
|
||||
|
||||
## Print the dictionary
|
||||
#
|
||||
# Print the items of dictionary which matched with input key
|
||||
#
|
||||
# @param list: The dictionary to be printed
|
||||
# @param key: The key of the item to be printed
|
||||
#
|
||||
def printList(Key, List):
|
||||
if type(List) == type([]):
|
||||
if len(List) > 0:
|
||||
if key.find(TAB_SPLIT) != -1:
|
||||
print "\n" + Key
|
||||
for Item in List:
|
||||
print Item
|
1434
BaseTools/Source/Python/Common/DscClassObject.py
Normal file
1434
BaseTools/Source/Python/Common/DscClassObject.py
Normal file
File diff suppressed because it is too large
Load Diff
318
BaseTools/Source/Python/Common/EdkIIWorkspace.py
Normal file
318
BaseTools/Source/Python/Common/EdkIIWorkspace.py
Normal file
@@ -0,0 +1,318 @@
|
||||
## @file
|
||||
# This is the base class for applications that operate on an EDK II Workspace
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os, sys, time
|
||||
from DataType import *
|
||||
|
||||
## EdkIIWorkspace
|
||||
#
|
||||
# Collect WorkspaceDir from the environment, the Verbose command line flag, and detect an icon bitmap file.
|
||||
#
|
||||
# @var StartTime: Time of build system starting
|
||||
# @var PrintRunTime: Printable time of build system running
|
||||
# @var PrintRunStatus: Printable status of build system running
|
||||
# @var RunStatus: Status of build system running
|
||||
#
|
||||
class EdkIIWorkspace:
|
||||
def __init__(self):
|
||||
self.StartTime = time.time()
|
||||
self.PrintRunTime = False
|
||||
self.PrintRunStatus = False
|
||||
self.RunStatus = ''
|
||||
|
||||
#
|
||||
# Check environment valiable 'WORKSPACE'
|
||||
#
|
||||
if os.environ.get('WORKSPACE') == None:
|
||||
print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
|
||||
return False
|
||||
|
||||
self.CurrentWorkingDir = os.getcwd()
|
||||
|
||||
self.WorkspaceDir = os.path.realpath(os.environ.get('WORKSPACE'))
|
||||
(Drive, Path) = os.path.splitdrive(self.WorkspaceDir)
|
||||
if Drive == '':
|
||||
(Drive, CwdPath) = os.path.splitdrive(self.CurrentWorkingDir)
|
||||
if Drive != '':
|
||||
self.WorkspaceDir = Drive + Path
|
||||
else:
|
||||
self.WorkspaceDir = Drive.upper() + Path
|
||||
|
||||
self.WorkspaceRelativeWorkingDir = self.WorkspaceRelativePath (self.CurrentWorkingDir)
|
||||
|
||||
try:
|
||||
#
|
||||
# Load TianoCoreOrgLogo, used for GUI tool
|
||||
#
|
||||
self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF)
|
||||
except:
|
||||
self.Icon = None
|
||||
|
||||
self.Verbose = False
|
||||
for Arg in sys.argv:
|
||||
if Arg.lower() == '-v':
|
||||
self.Verbose = True
|
||||
|
||||
## Close build system
|
||||
#
|
||||
# Close build system and print running time and status
|
||||
#
|
||||
def Close(self):
|
||||
if self.PrintRunTime:
|
||||
Seconds = int(time.time() - self.StartTime)
|
||||
if Seconds < 60:
|
||||
print 'Run Time: %d seconds' % (Seconds)
|
||||
else:
|
||||
Minutes = Seconds / 60
|
||||
Seconds = Seconds % 60
|
||||
if Minutes < 60:
|
||||
print 'Run Time: %d minutes %d seconds' % (Minutes, Seconds)
|
||||
else:
|
||||
Hours = Minutes / 60
|
||||
Minutes = Minutes % 60
|
||||
print 'Run Time: %d hours %d minutes %d seconds' % (Hours, Minutes, Seconds)
|
||||
if self.RunStatus != '':
|
||||
print self.RunStatus
|
||||
|
||||
## Convert to a workspace relative filename
|
||||
#
|
||||
# Convert a full path filename to a workspace relative filename.
|
||||
#
|
||||
# @param FileName: The filename to be Converted
|
||||
#
|
||||
# @retval None Workspace dir is not found in the full path
|
||||
# @retval string The relative filename
|
||||
#
|
||||
def WorkspaceRelativePath(self, FileName):
|
||||
FileName = os.path.realpath(FileName)
|
||||
if FileName.find(self.WorkspaceDir) != 0:
|
||||
return None
|
||||
return FileName.replace (self.WorkspaceDir, '').strip('\\').strip('/')
|
||||
|
||||
## Convert to a full path filename
|
||||
#
|
||||
# Convert a workspace relative filename to a full path filename.
|
||||
#
|
||||
# @param FileName: The filename to be Converted
|
||||
#
|
||||
# @retval string The full path filename
|
||||
#
|
||||
def WorkspaceFile(self, FileName):
|
||||
return os.path.realpath(os.path.join(self.WorkspaceDir,FileName))
|
||||
|
||||
## Convert to a real path filename
|
||||
#
|
||||
# Convert ${WORKSPACE} to real path
|
||||
#
|
||||
# @param FileName: The filename to be Converted
|
||||
#
|
||||
# @retval string The full path filename
|
||||
#
|
||||
def WorkspacePathConvert(self, FileName):
|
||||
return os.path.realpath(FileName.replace(TAB_WORKSPACE, self.WorkspaceDir))
|
||||
|
||||
## Convert XML into a DOM
|
||||
#
|
||||
# Parse an XML file into a DOM and return the DOM.
|
||||
#
|
||||
# @param FileName: The filename to be parsed
|
||||
#
|
||||
# @retval XmlParseFile (self.WorkspaceFile(FileName))
|
||||
#
|
||||
def XmlParseFile (self, FileName):
|
||||
if self.Verbose:
|
||||
print FileName
|
||||
return XmlParseFile (self.WorkspaceFile(FileName))
|
||||
|
||||
## Convert a XML section
|
||||
#
|
||||
# Parse a section of an XML file into a DOM(Document Object Model) and return the DOM.
|
||||
#
|
||||
# @param FileName: The filename to be parsed
|
||||
# @param SectionTag: The tag name of the section to be parsed
|
||||
#
|
||||
# @retval XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
|
||||
#
|
||||
def XmlParseFileSection (self, FileName, SectionTag):
|
||||
if self.Verbose:
|
||||
print FileName
|
||||
return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
|
||||
|
||||
## Save a XML file
|
||||
#
|
||||
# Save a DOM(Document Object Model) into an XML file.
|
||||
#
|
||||
# @param Dom: The Dom to be saved
|
||||
# @param FileName: The filename
|
||||
#
|
||||
# @retval XmlSaveFile (Dom, self.WorkspaceFile(FileName))
|
||||
#
|
||||
def XmlSaveFile (self, Dom, FileName):
|
||||
if self.Verbose:
|
||||
print FileName
|
||||
return XmlSaveFile (Dom, self.WorkspaceFile(FileName))
|
||||
|
||||
## Convert Text File To Dictionary
|
||||
#
|
||||
# Convert a workspace relative text file to a dictionary of (name:value) pairs.
|
||||
#
|
||||
# @param FileName: Text filename
|
||||
# @param Dictionary: Dictionary to store data
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
|
||||
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
|
||||
#
|
||||
# @retval ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
|
||||
#
|
||||
def ConvertTextFileToDictionary(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
if self.Verbose:
|
||||
print FileName
|
||||
return ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
|
||||
|
||||
## Convert Dictionary To Text File
|
||||
#
|
||||
# Convert a dictionary of (name:value) pairs to a workspace relative text file.
|
||||
#
|
||||
# @param FileName: Text filename
|
||||
# @param Dictionary: Dictionary to store data
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
|
||||
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
|
||||
#
|
||||
# @retval ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
|
||||
#
|
||||
def ConvertDictionaryToTextFile(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
if self.Verbose:
|
||||
print FileName
|
||||
return ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
|
||||
|
||||
## Convert Text File To Dictionary
|
||||
#
|
||||
# Convert a text file to a dictionary of (name:value) pairs.
|
||||
#
|
||||
# @param FileName: Text filename
|
||||
# @param Dictionary: Dictionary to store data
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
|
||||
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
|
||||
#
|
||||
# @retval True Convert successfully
|
||||
# @retval False Open file failed
|
||||
#
|
||||
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
try:
|
||||
F = open(FileName,'r')
|
||||
except:
|
||||
return False
|
||||
Keys = []
|
||||
for Line in F:
|
||||
LineList = Line.split(KeySplitCharacter,1)
|
||||
if len(LineList) >= 2:
|
||||
Key = LineList[0].split()
|
||||
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
|
||||
if ValueSplitFlag:
|
||||
Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
|
||||
else:
|
||||
Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
|
||||
Keys += [Key[0]]
|
||||
F.close()
|
||||
return True
|
||||
|
||||
## Convert Dictionary To Text File
|
||||
#
|
||||
# Convert a dictionary of (name:value) pairs to a text file.
|
||||
#
|
||||
# @param FileName: Text filename
|
||||
# @param Dictionary: Dictionary to store data
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
|
||||
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
|
||||
#
|
||||
# @retval True Convert successfully
|
||||
# @retval False Open file failed
|
||||
#
|
||||
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
try:
|
||||
F = open(FileName,'r')
|
||||
Lines = []
|
||||
Lines = F.readlines()
|
||||
F.close()
|
||||
except:
|
||||
Lines = []
|
||||
Keys = Dictionary.keys()
|
||||
MaxLength = 0
|
||||
for Key in Keys:
|
||||
if len(Key) > MaxLength:
|
||||
MaxLength = len(Key)
|
||||
Index = 0
|
||||
for Line in Lines:
|
||||
LineList = Line.split(KeySplitCharacter,1)
|
||||
if len(LineList) >= 2:
|
||||
Key = LineList[0].split()
|
||||
if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:
|
||||
if ValueSplitFlag:
|
||||
Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, ' '.join(Dictionary[Key[0]]))
|
||||
else:
|
||||
Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])
|
||||
Lines.pop(Index)
|
||||
if Key[0] in Keys:
|
||||
Lines.insert(Index,Line)
|
||||
Keys.remove(Key[0])
|
||||
Index += 1
|
||||
for RemainingKey in Keys:
|
||||
if ValueSplitFlag:
|
||||
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey]))
|
||||
else:
|
||||
Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])
|
||||
Lines.append(Line)
|
||||
try:
|
||||
F = open(FileName,'w')
|
||||
except:
|
||||
return False
|
||||
F.writelines(Lines)
|
||||
F.close()
|
||||
return True
|
||||
|
||||
## Create a new directory
|
||||
#
|
||||
# @param Directory: Directory to be created
|
||||
#
|
||||
def CreateDirectory(Directory):
|
||||
if not os.access(Directory, os.F_OK):
|
||||
os.makedirs (Directory)
|
||||
|
||||
## Create a new file
|
||||
#
|
||||
# @param Directory: Directory to be created
|
||||
# @param FileName: Filename to be created
|
||||
# @param Mode: The mode of open file, defautl is 'w'
|
||||
#
|
||||
def CreateFile(Directory, FileName, Mode='w'):
|
||||
CreateDirectory (Directory)
|
||||
return open(os.path.join(Directory, FileName), Mode)
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
# Nothing to do here. Could do some unit tests
|
||||
pass
|
1669
BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
Normal file
1669
BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
Normal file
File diff suppressed because it is too large
Load Diff
269
BaseTools/Source/Python/Common/EdkLogger.py
Normal file
269
BaseTools/Source/Python/Common/EdkLogger.py
Normal file
@@ -0,0 +1,269 @@
|
||||
## @file
|
||||
# This file implements the log mechanism for Python tools.
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
## Import modules
|
||||
import sys, os, logging
|
||||
import traceback
|
||||
from BuildToolError import *
|
||||
|
||||
## Log level constants
|
||||
DEBUG_0 = 1
|
||||
DEBUG_1 = 2
|
||||
DEBUG_2 = 3
|
||||
DEBUG_3 = 4
|
||||
DEBUG_4 = 5
|
||||
DEBUG_5 = 6
|
||||
DEBUG_6 = 7
|
||||
DEBUG_7 = 8
|
||||
DEBUG_8 = 9
|
||||
DEBUG_9 = 10
|
||||
VERBOSE = 15
|
||||
INFO = 20
|
||||
WARN = 30
|
||||
QUIET = 40
|
||||
ERROR = 50
|
||||
|
||||
IsRaiseError = True
|
||||
|
||||
# Tool name
|
||||
_ToolName = os.path.basename(sys.argv[0])
|
||||
|
||||
# For validation purpose
|
||||
_LogLevels = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5, DEBUG_6, DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO, ERROR, QUIET]
|
||||
|
||||
# For DEBUG level (All DEBUG_0~9 are applicable)
|
||||
_DebugLogger = logging.getLogger("tool_debug")
|
||||
_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
|
||||
|
||||
# For VERBOSE, INFO, WARN level
|
||||
_InfoLogger = logging.getLogger("tool_info")
|
||||
_InfoFormatter = logging.Formatter("%(message)s")
|
||||
|
||||
# For ERROR level
|
||||
_ErrorLogger = logging.getLogger("tool_error")
|
||||
_ErrorFormatter = logging.Formatter("%(message)s")
|
||||
|
||||
# String templates for ERROR/WARN/DEBUG log message
|
||||
_ErrorMessageTemplate = '\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s'
|
||||
_ErrorMessageTemplateWithoutFile = '\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
|
||||
_WarningMessageTemplate = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
|
||||
_WarningMessageTemplateWithoutFile = '%(tool)s: : warning: %(msg)s'
|
||||
_DebugMessageTemplate = '%(file)s(%(line)s): debug: \n %(msg)s'
|
||||
|
||||
#
|
||||
# Flag used to take WARN as ERROR.
|
||||
# By default, only ERROR message will break the tools execution.
|
||||
#
|
||||
_WarningAsError = False
|
||||
|
||||
## Log debug message
|
||||
#
|
||||
# @param Level DEBUG level (DEBUG0~9)
|
||||
# @param Message Debug information
|
||||
# @param ExtraData More information associated with "Message"
|
||||
#
|
||||
def debug(Level, Message, ExtraData=None):
|
||||
if _DebugLogger.level > Level:
|
||||
return
|
||||
if Level > DEBUG_9:
|
||||
return
|
||||
|
||||
# Find out the caller method information
|
||||
CallerStack = traceback.extract_stack()[-2]
|
||||
TemplateDict = {
|
||||
"file" : CallerStack[0],
|
||||
"line" : CallerStack[1],
|
||||
"msg" : Message,
|
||||
}
|
||||
|
||||
if ExtraData != None:
|
||||
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
|
||||
else:
|
||||
LogText = _DebugMessageTemplate % TemplateDict
|
||||
|
||||
_DebugLogger.log(Level, LogText)
|
||||
|
||||
## Log verbose message
|
||||
#
|
||||
# @param Message Verbose information
|
||||
#
|
||||
def verbose(Message):
|
||||
return _InfoLogger.log(VERBOSE, Message)
|
||||
|
||||
## Log warning message
|
||||
#
|
||||
# Warning messages are those which might be wrong but won't fail the tool.
|
||||
#
|
||||
# @param ToolName The name of the tool. If not given, the name of caller
|
||||
# method will be used.
|
||||
# @param Message Warning information
|
||||
# @param File The name of file which caused the warning.
|
||||
# @param Line The line number in the "File" which caused the warning.
|
||||
# @param ExtraData More information associated with "Message"
|
||||
#
|
||||
def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
|
||||
if _InfoLogger.level > WARN:
|
||||
return
|
||||
|
||||
# if no tool name given, use caller's source file name as tool name
|
||||
if ToolName == None or ToolName == "":
|
||||
ToolName = os.path.basename(traceback.extract_stack()[-2][0])
|
||||
|
||||
if Line == None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
||||
TemplateDict = {
|
||||
"tool" : ToolName,
|
||||
"file" : File,
|
||||
"line" : Line,
|
||||
"msg" : Message,
|
||||
}
|
||||
|
||||
if File != None:
|
||||
LogText = _WarningMessageTemplate % TemplateDict
|
||||
else:
|
||||
LogText = _WarningMessageTemplateWithoutFile % TemplateDict
|
||||
|
||||
if ExtraData != None:
|
||||
LogText += "\n %s" % ExtraData
|
||||
|
||||
_InfoLogger.log(WARN, LogText)
|
||||
|
||||
# Raise an execption if indicated
|
||||
if _WarningAsError == True:
|
||||
raise FatalError(WARNING_AS_ERROR)
|
||||
|
||||
## Log INFO message
|
||||
info = _InfoLogger.info
|
||||
|
||||
## Log ERROR message
|
||||
#
|
||||
# Once an error messages is logged, the tool's execution will be broken by raising
|
||||
# an execption. If you don't want to break the execution later, you can give
|
||||
# "RaiseError" with "False" value.
|
||||
#
|
||||
# @param ToolName The name of the tool. If not given, the name of caller
|
||||
# method will be used.
|
||||
# @param ErrorCode The error code
|
||||
# @param Message Warning information
|
||||
# @param File The name of file which caused the error.
|
||||
# @param Line The line number in the "File" which caused the warning.
|
||||
# @param ExtraData More information associated with "Message"
|
||||
# @param RaiseError Raise an exception to break the tool's executuion if
|
||||
# it's True. This is the default behavior.
|
||||
#
|
||||
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
|
||||
if Line == None:
|
||||
Line = "..."
|
||||
else:
|
||||
Line = "%d" % Line
|
||||
|
||||
if Message == None:
|
||||
if ErrorCode in gErrorMessage:
|
||||
Message = gErrorMessage[ErrorCode]
|
||||
else:
|
||||
Message = gErrorMessage[UNKNOWN_ERROR]
|
||||
|
||||
if ExtraData == None:
|
||||
ExtraData = ""
|
||||
|
||||
TemplateDict = {
|
||||
"tool" : _ToolName,
|
||||
"file" : File,
|
||||
"line" : Line,
|
||||
"errorcode" : ErrorCode,
|
||||
"msg" : Message,
|
||||
"extra" : ExtraData
|
||||
}
|
||||
|
||||
if File != None:
|
||||
LogText = _ErrorMessageTemplate % TemplateDict
|
||||
else:
|
||||
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
|
||||
|
||||
_ErrorLogger.log(ERROR, LogText)
|
||||
if RaiseError:
|
||||
raise FatalError(ErrorCode)
|
||||
|
||||
# Log information which should be always put out
|
||||
quiet = _ErrorLogger.error
|
||||
|
||||
## Initialize log system
|
||||
def Initialize():
|
||||
#
|
||||
# Since we use different format to log different levels of message into different
|
||||
# place (stdout or stderr), we have to use different "Logger" objects to do this.
|
||||
#
|
||||
# For DEBUG level (All DEBUG_0~9 are applicable)
|
||||
_DebugLogger.setLevel(INFO)
|
||||
_DebugChannel = logging.StreamHandler(sys.stdout)
|
||||
_DebugChannel.setFormatter(_DebugFormatter)
|
||||
_DebugLogger.addHandler(_DebugChannel)
|
||||
|
||||
# For VERBOSE, INFO, WARN level
|
||||
_InfoLogger.setLevel(INFO)
|
||||
_InfoChannel = logging.StreamHandler(sys.stdout)
|
||||
_InfoChannel.setFormatter(_InfoFormatter)
|
||||
_InfoLogger.addHandler(_InfoChannel)
|
||||
|
||||
# For ERROR level
|
||||
_ErrorLogger.setLevel(INFO)
|
||||
_ErrorCh = logging.StreamHandler(sys.stderr)
|
||||
_ErrorCh.setFormatter(_ErrorFormatter)
|
||||
_ErrorLogger.addHandler(_ErrorCh)
|
||||
|
||||
## Set log level
|
||||
#
|
||||
# @param Level One of log level in _LogLevel
|
||||
def SetLevel(Level):
|
||||
if Level not in _LogLevels:
|
||||
info("Not supported log level (%d). Use default level instead." % Level)
|
||||
Level = INFO
|
||||
_DebugLogger.setLevel(Level)
|
||||
_InfoLogger.setLevel(Level)
|
||||
_ErrorLogger.setLevel(Level)
|
||||
|
||||
## Get current log level
|
||||
def GetLevel():
|
||||
return _InfoLogger.getEffectiveLevel()
|
||||
|
||||
## Raise up warning as error
|
||||
def SetWarningAsError():
|
||||
global _WarningAsError
|
||||
_WarningAsError = True
|
||||
|
||||
## Specify a file to store the log message as well as put on console
|
||||
#
|
||||
# @param LogFile The file path used to store the log message
|
||||
#
|
||||
def SetLogFile(LogFile):
|
||||
if os.path.exists(LogFile):
|
||||
os.remove(LogFile)
|
||||
|
||||
_Ch = logging.FileHandler(LogFile)
|
||||
_Ch.setFormatter(_DebugFormatter)
|
||||
_DebugLogger.addHandler(_Ch)
|
||||
|
||||
_Ch= logging.FileHandler(LogFile)
|
||||
_Ch.setFormatter(_InfoFormatter)
|
||||
_InfoLogger.addHandler(_Ch)
|
||||
|
||||
_Ch = logging.FileHandler(LogFile)
|
||||
_Ch.setFormatter(_ErrorFormatter)
|
||||
_ErrorLogger.addHandler(_Ch)
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
116
BaseTools/Source/Python/Common/FdfClassObject.py
Normal file
116
BaseTools/Source/Python/Common/FdfClassObject.py
Normal file
@@ -0,0 +1,116 @@
|
||||
## @file
|
||||
# This file is used to define each component of FDF file
|
||||
#
|
||||
# Copyright (c) 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
from FdfParserLite import FdfParser
|
||||
from Table.TableFdf import TableFdf
|
||||
from CommonDataClass.DataClass import MODEL_FILE_FDF, MODEL_PCD, MODEL_META_DATA_COMPONENT
|
||||
from String import NormPath
|
||||
|
||||
## FdfObject
|
||||
#
|
||||
# This class defined basic Fdf object which is used by inheriting
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
#
|
||||
class FdfObject(object):
|
||||
def __init__(self):
|
||||
object.__init__()
|
||||
|
||||
## Fdf
|
||||
#
|
||||
# This class defined the structure used in Fdf object
|
||||
#
|
||||
# @param FdfObject: Inherited from FdfObject class
|
||||
# @param Filename: Input value for Ffilename of Fdf file, default is None
|
||||
# @param WorkspaceDir: Input value for current workspace directory, default is None
|
||||
#
|
||||
class Fdf(FdfObject):
|
||||
def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
|
||||
self.WorkspaceDir = WorkspaceDir
|
||||
self.IsToDatabase = IsToDatabase
|
||||
|
||||
self.Cur = Database.Cur
|
||||
self.TblFile = Database.TblFile
|
||||
self.TblFdf = Database.TblFdf
|
||||
self.FileID = -1
|
||||
self.FileList = {}
|
||||
|
||||
#
|
||||
# Load Fdf file if filename is not None
|
||||
#
|
||||
if Filename != None:
|
||||
self.LoadFdfFile(Filename)
|
||||
|
||||
#
|
||||
# Insert a FDF file record into database
|
||||
#
|
||||
def InsertFile(self, Filename):
|
||||
FileID = -1
|
||||
Filename = NormPath(Filename)
|
||||
if Filename not in self.FileList:
|
||||
FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
|
||||
self.FileList[Filename] = FileID
|
||||
|
||||
return self.FileList[Filename]
|
||||
|
||||
|
||||
## Load Fdf file
|
||||
#
|
||||
# Load the file if it exists
|
||||
#
|
||||
# @param Filename: Input value for filename of Fdf file
|
||||
#
|
||||
def LoadFdfFile(self, Filename):
|
||||
FileList = []
|
||||
#
|
||||
# Parse Fdf file
|
||||
#
|
||||
Filename = NormPath(Filename)
|
||||
Fdf = FdfParser(Filename)
|
||||
Fdf.ParseFile()
|
||||
|
||||
#
|
||||
# Insert inf file and pcd information
|
||||
#
|
||||
if self.IsToDatabase:
|
||||
(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
|
||||
(0, '', '', '', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
|
||||
for Index in range(0, len(Fdf.Profile.PcdDict)):
|
||||
pass
|
||||
for Key in Fdf.Profile.PcdDict.keys():
|
||||
Model = MODEL_PCD
|
||||
Value1 = ''
|
||||
Value2 = ".".join((Key[1], Key[0]))
|
||||
FileName = Fdf.Profile.PcdFileLineDict[Key][0]
|
||||
StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
|
||||
BelongsToFile = self.InsertFile(FileName)
|
||||
self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
|
||||
for Index in range(0, len(Fdf.Profile.InfList)):
|
||||
Model = MODEL_META_DATA_COMPONENT
|
||||
Value1 = Fdf.Profile.InfList[Index]
|
||||
Value2 = ''
|
||||
FileName = Fdf.Profile.InfFileLineList[Index][0]
|
||||
StartLine = Fdf.Profile.InfFileLineList[Index][1]
|
||||
BelongsToFile = self.InsertFile(FileName)
|
||||
self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
pass
|
3603
BaseTools/Source/Python/Common/FdfParserLite.py
Normal file
3603
BaseTools/Source/Python/Common/FdfParserLite.py
Normal file
File diff suppressed because it is too large
Load Diff
37
BaseTools/Source/Python/Common/GlobalData.py
Normal file
37
BaseTools/Source/Python/Common/GlobalData.py
Normal file
@@ -0,0 +1,37 @@
|
||||
## @file
|
||||
# This file is used to define common static strings used by INF/DEC/DSC files
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
import re
|
||||
|
||||
gIsWindows = None
|
||||
|
||||
gEdkCompatibilityPkg = "EdkCompatibilityPkg"
|
||||
gWorkspace = "."
|
||||
gEdkSource = "EdkCompatibilityPkg"
|
||||
gEfiSource = "."
|
||||
gEcpSource = "EdkCompatibilityPkg"
|
||||
|
||||
gOptions = None
|
||||
gCaseInsensitive = False
|
||||
gGlobalDefines = {}
|
||||
gAllFiles = None
|
||||
|
||||
gEdkGlobal = {}
|
||||
gOverrideDir = {}
|
||||
|
||||
# for debug trace purpose when problem occurs
|
||||
gProcessingFile = ''
|
||||
gBuildingModule = ''
|
||||
|
||||
## Regular expression for matching macro used in DSC/DEC/INF file inclusion
|
||||
gMacroPattern = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
|
||||
|
58
BaseTools/Source/Python/Common/Identification.py
Normal file
58
BaseTools/Source/Python/Common/Identification.py
Normal file
@@ -0,0 +1,58 @@
|
||||
## @file
|
||||
# This file is used to define the identification of INF/DEC/DSC files
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
## Identification
|
||||
#
|
||||
# This class defined basic Identification information structure which is used by INF/DEC/DSC files
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
#
|
||||
# @var FileName: To store data for Filename
|
||||
# @var FileFullPath: To store data for full path of the file
|
||||
# @var FileRelativePath: To store data for relative path of the file
|
||||
# @var RunStatus: Status of build system running
|
||||
#
|
||||
class Identification(object):
|
||||
def __init__(self):
|
||||
self.FileName = ''
|
||||
self.FileFullPath = ''
|
||||
self.FileRelativePath = ''
|
||||
self.PackagePath = ''
|
||||
|
||||
## GetFileName
|
||||
#
|
||||
# Reserved
|
||||
#
|
||||
def GetFileName(self, FileFullPath, FileRelativePath):
|
||||
pass
|
||||
|
||||
## GetFileName
|
||||
#
|
||||
# Reserved
|
||||
#
|
||||
def GetFileFullPath(self, FileName, FileRelativePath):
|
||||
pass
|
||||
|
||||
## GetFileName
|
||||
#
|
||||
# Reserved
|
||||
#
|
||||
def GetFileRelativePath(self, FileName, FileFullPath):
|
||||
pass
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
id = Identification()
|
1116
BaseTools/Source/Python/Common/InfClassObject.py
Normal file
1116
BaseTools/Source/Python/Common/InfClassObject.py
Normal file
File diff suppressed because it is too large
Load Diff
876
BaseTools/Source/Python/Common/InfClassObjectLight.py
Normal file
876
BaseTools/Source/Python/Common/InfClassObjectLight.py
Normal file
@@ -0,0 +1,876 @@
|
||||
## @file
|
||||
# This file is used to define each component of INF file
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
import re
|
||||
import EdkLogger
|
||||
|
||||
from CommonDataClass.ModuleClass import *
|
||||
from CommonDataClass import CommonClass
|
||||
from String import *
|
||||
from DataType import *
|
||||
from BuildToolError import *
|
||||
from Misc import sdict
|
||||
from Misc import GetFiles
|
||||
from Parsing import *
|
||||
|
||||
# Global variable
|
||||
Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
|
||||
TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
|
||||
TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
|
||||
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
|
||||
TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
|
||||
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
|
||||
TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
|
||||
TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
|
||||
TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
|
||||
TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
|
||||
TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
|
||||
TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
|
||||
TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
|
||||
TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
|
||||
TAB_GUIDS.upper() : MODEL_EFI_GUID,
|
||||
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
|
||||
TAB_PPIS.upper() : MODEL_EFI_PPI,
|
||||
TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
|
||||
TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
|
||||
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
|
||||
}
|
||||
|
||||
gComponentType2ModuleType = {
|
||||
"LIBRARY" : "BASE",
|
||||
"SECURITY_CORE" : "SEC",
|
||||
"PEI_CORE" : "PEI_CORE",
|
||||
"COMBINED_PEIM_DRIVER" : "PEIM",
|
||||
"PIC_PEIM" : "PEIM",
|
||||
"RELOCATABLE_PEIM" : "PEIM",
|
||||
"PE32_PEIM" : "PEIM",
|
||||
"BS_DRIVER" : "DXE_DRIVER",
|
||||
"RT_DRIVER" : "DXE_RUNTIME_DRIVER",
|
||||
"SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
|
||||
"APPLICATION" : "UEFI_APPLICATION",
|
||||
"LOGO" : "BASE",
|
||||
}
|
||||
|
||||
class InfHeader(ModuleHeaderClass):
|
||||
_Mapping_ = {
|
||||
# Required Fields
|
||||
TAB_INF_DEFINES_BASE_NAME : "Name",
|
||||
TAB_INF_DEFINES_FILE_GUID : "Guid",
|
||||
TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
|
||||
TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
|
||||
TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
|
||||
|
||||
# Optional Fields
|
||||
TAB_INF_DEFINES_INF_VERSION : "InfVersion",
|
||||
TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule",
|
||||
TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType",
|
||||
TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName",
|
||||
TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber",
|
||||
TAB_INF_DEFINES_BUILD_TYPE : "BuildType",
|
||||
TAB_INF_DEFINES_FFS_EXT : "FfsExt",
|
||||
TAB_INF_DEFINES_FV_EXT : "FvExt",
|
||||
TAB_INF_DEFINES_SOURCE_FV : "SourceFv",
|
||||
TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber",
|
||||
TAB_INF_DEFINES_VERSION_STRING : "VersionString",
|
||||
TAB_INF_DEFINES_VERSION : "Version",
|
||||
TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver",
|
||||
TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H : "TianoR8FlashMap_h",
|
||||
TAB_INF_DEFINES_SHADOW : "Shadow",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
ModuleHeaderClass.__init__(self)
|
||||
self.VersionNumber = ''
|
||||
self.VersionString = ''
|
||||
#print self.__dict__
|
||||
def __setitem__(self, key, value):
|
||||
self.__dict__[self._Mapping_[key]] = value
|
||||
def __getitem__(self, key):
|
||||
return self.__dict__[self._Mapping_[key]]
|
||||
## "in" test support
|
||||
def __contains__(self, key):
|
||||
return key in self._Mapping_
|
||||
|
||||
## InfObject
|
||||
#
|
||||
# This class defined basic Inf object which is used by inheriting
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
#
|
||||
class InfObject(object):
|
||||
def __init__(self):
|
||||
object.__init__()
|
||||
|
||||
## Inf
|
||||
#
|
||||
# This class defined the structure used in Inf object
|
||||
#
|
||||
# @param InfObject: Inherited from InfObject class
|
||||
# @param Ffilename: Input value for Ffilename of Inf file, default is None
|
||||
# @param IsMergeAllArches: Input value for IsMergeAllArches
|
||||
# True is to merge all arches
|
||||
# Fales is not to merge all arches
|
||||
# default is False
|
||||
# @param IsToModule: Input value for IsToModule
|
||||
# True is to transfer to ModuleObject automatically
|
||||
# False is not to transfer to ModuleObject automatically
|
||||
# default is False
|
||||
# @param WorkspaceDir: Input value for current workspace directory, default is None
|
||||
#
|
||||
# @var Identification: To store value for Identification, it is a structure as Identification
|
||||
# @var UserExtensions: To store value for UserExtensions
|
||||
# @var Module: To store value for Module, it is a structure as ModuleClass
|
||||
# @var WorkspaceDir: To store value for WorkspaceDir
|
||||
# @var KeyList: To store value for KeyList, a list for all Keys used in Inf
|
||||
#
|
||||
class Inf(InfObject):
|
||||
def __init__(self, Filename = None, IsToModule = False, WorkspaceDir = None, PackageDir = None, SupArchList = DataType.ARCH_LIST):
|
||||
self.Identification = IdentificationClass()
|
||||
self.Module = ModuleClass()
|
||||
self.WorkspaceDir = WorkspaceDir
|
||||
self.PackageDir = PackageDir
|
||||
self.SupArchList = SupArchList
|
||||
|
||||
self.KeyList = [
|
||||
TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS,
|
||||
TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_INF_FIXED_PCD,
|
||||
TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD, TAB_INF_PCD_EX,
|
||||
TAB_DEPEX, TAB_INF_DEFINES
|
||||
]
|
||||
# Upper all KEYs to ignore case sensitive when parsing
|
||||
self.KeyList = map(lambda c: c.upper(), self.KeyList)
|
||||
|
||||
# Init RecordSet
|
||||
self.RecordSet = {}
|
||||
for Key in self.KeyList:
|
||||
self.RecordSet[Section[Key]] = []
|
||||
|
||||
# Init Comment
|
||||
self.SectionHeaderCommentDict = {}
|
||||
|
||||
# Load Inf file if filename is not None
|
||||
if Filename != None:
|
||||
self.LoadInfFile(Filename)
|
||||
|
||||
# Transfer to Module Object if IsToModule is True
|
||||
if IsToModule:
|
||||
self.InfToModule()
|
||||
|
||||
## Module Object to INF file
|
||||
def ModuleToInf(self, Module):
|
||||
Inf = ''
|
||||
InfList = sdict()
|
||||
SectionHeaderCommentDict = {}
|
||||
if Module == None:
|
||||
return Inf
|
||||
|
||||
ModuleHeader = Module.ModuleHeader
|
||||
TmpList = []
|
||||
# Common define items
|
||||
if ModuleHeader.Name:
|
||||
TmpList.append(TAB_INF_DEFINES_BASE_NAME + ' = ' + ModuleHeader.Name)
|
||||
if ModuleHeader.Guid:
|
||||
TmpList.append(TAB_INF_DEFINES_FILE_GUID + ' = ' + ModuleHeader.Guid)
|
||||
if ModuleHeader.Version:
|
||||
TmpList.append(TAB_INF_DEFINES_VERSION_STRING + ' = ' + ModuleHeader.Version)
|
||||
if ModuleHeader.ModuleType:
|
||||
TmpList.append(TAB_INF_DEFINES_MODULE_TYPE + ' = ' + ModuleHeader.ModuleType)
|
||||
if ModuleHeader.PcdIsDriver:
|
||||
TmpList.append(TAB_INF_DEFINES_PCD_IS_DRIVER + ' = ' + ModuleHeader.PcdIsDriver)
|
||||
# Externs
|
||||
for Item in Module.Externs:
|
||||
if Item.EntryPoint:
|
||||
TmpList.append(TAB_INF_DEFINES_ENTRY_POINT + ' = ' + Item.EntryPoint)
|
||||
if Item.UnloadImage:
|
||||
TmpList.append(TAB_INF_DEFINES_UNLOAD_IMAGE + ' = ' + Item.UnloadImage)
|
||||
if Item.Constructor:
|
||||
TmpList.append(TAB_INF_DEFINES_CONSTRUCTOR + ' = ' + Item.Constructor)
|
||||
if Item.Destructor:
|
||||
TmpList.append(TAB_INF_DEFINES_DESTRUCTOR + ' = ' + Item.Destructor)
|
||||
# Other define items
|
||||
if Module.UserExtensions != None:
|
||||
for Item in Module.UserExtensions.Defines:
|
||||
TmpList.append(Item)
|
||||
InfList['Defines'] = TmpList
|
||||
if ModuleHeader.Description != '':
|
||||
SectionHeaderCommentDict['Defines'] = ModuleHeader.Description
|
||||
|
||||
if Module.UserExtensions != None:
|
||||
InfList['BuildOptions'] = Module.UserExtensions.BuildOptions
|
||||
|
||||
for Item in Module.Includes:
|
||||
Key = 'Includes.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
Value.append(Item.FilePath)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.LibraryClasses:
|
||||
Key = 'LibraryClasses.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
NewValue = Item.LibraryClass
|
||||
if Item.RecommendedInstance:
|
||||
NewValue = NewValue + '|' + Item.RecommendedInstance
|
||||
if Item.FeatureFlag:
|
||||
NewValue = NewValue + '|' + Item.FeatureFlag
|
||||
Value.append(NewValue)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.PackageDependencies:
|
||||
Key = 'Packages.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
Value.append(Item.FilePath)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.PcdCodes:
|
||||
Key = 'Pcds' + Item.ItemType + '.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
NewValue = Item.TokenSpaceGuidCName + '.' + Item.CName
|
||||
if Item.DefaultValue != '':
|
||||
NewValue = NewValue + '|' + Item.DefaultValue
|
||||
Value.append(NewValue)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.Sources:
|
||||
Key = 'Sources.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
NewValue = Item.SourceFile
|
||||
if Item.ToolChainFamily != '':
|
||||
NewValue = NewValue + '|' + Item.ToolChainFamily
|
||||
if Item.TagName != '':
|
||||
NewValue = NewValue + '|' + Item.TagName
|
||||
if Item.ToolCode != '':
|
||||
NewValue = NewValue + '|' + Item.ToolCode
|
||||
if Item.FeatureFlag != '':
|
||||
NewValue = NewValue + '|' + Item.FeatureFlag
|
||||
Value.append(NewValue)
|
||||
if Item.HelpText != '':
|
||||
SectionHeaderCommentDict[Key] = Item.HelpText
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.Guids:
|
||||
Key = 'Guids.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
Value.append(Item.CName)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.Protocols:
|
||||
Key = 'Protocols.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
Value.append(Item.CName)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.Ppis:
|
||||
Key = 'Ppis.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
Value.append(Item.CName)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
if Module.PeiDepex:
|
||||
Key = 'Depex'
|
||||
Value = Module.PeiDepex.Depex
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
if Module.DxeDepex:
|
||||
Key = 'Depex'
|
||||
Value = Module.DxeDepex.Depex
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
if Module.SmmDepex:
|
||||
Key = 'Depex'
|
||||
Value = Module.SmmDepex.Depex
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
for Item in Module.Binaries:
|
||||
Key = 'Binaries.' + GetStringOfList(Item.SupArchList)
|
||||
Value = GetHelpTextList(Item.HelpTextList)
|
||||
NewValue = Item.FileType + '|' + Item.BinaryFile + '|' + Item.Target
|
||||
if Item.FeatureFlag != '':
|
||||
NewValue = NewValue + '|' + Item.FeatureFlag
|
||||
Value.append(NewValue)
|
||||
GenMetaDatSectionItem(Key, Value, InfList)
|
||||
|
||||
# Transfer Module to Inf
|
||||
for Key in InfList:
|
||||
if Key in SectionHeaderCommentDict:
|
||||
List = SectionHeaderCommentDict[Key].split('\r')
|
||||
for Item in List:
|
||||
Inf = Inf + Item + '\n'
|
||||
Inf = Inf + '[' + Key + ']' + '\n'
|
||||
for Value in InfList[Key]:
|
||||
if type(Value) == type([]):
|
||||
for SubValue in Value:
|
||||
Inf = Inf + ' ' + SubValue + '\n'
|
||||
else:
|
||||
Inf = Inf + ' ' + Value + '\n'
|
||||
Inf = Inf + '\n'
|
||||
|
||||
return Inf
|
||||
|
||||
|
||||
## Transfer to Module Object
|
||||
#
|
||||
# Transfer all contents of an Inf file to a standard Module Object
|
||||
#
|
||||
def InfToModule(self):
|
||||
# Init global information for the file
|
||||
ContainerFile = self.Identification.FullPath
|
||||
|
||||
# Generate Module Header
|
||||
self.GenModuleHeader(ContainerFile)
|
||||
|
||||
# Generate BuildOptions
|
||||
self.GenBuildOptions(ContainerFile)
|
||||
|
||||
# Generate Includes
|
||||
self.GenIncludes(ContainerFile)
|
||||
|
||||
# Generate LibraryClasses
|
||||
self.GenLibraryClasses(ContainerFile)
|
||||
|
||||
# Generate Packages
|
||||
self.GenPackages(ContainerFile)
|
||||
|
||||
# Generate Pcds
|
||||
self.GenPcds(ContainerFile)
|
||||
|
||||
# Generate Sources
|
||||
self.GenSources(ContainerFile)
|
||||
|
||||
# Generate Guids
|
||||
self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
|
||||
|
||||
# Generate Protocols
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
|
||||
|
||||
# Generate Ppis
|
||||
self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
|
||||
|
||||
# Generate Depexes
|
||||
self.GenDepexes(ContainerFile)
|
||||
|
||||
# Generate Binaries
|
||||
self.GenBinaries(ContainerFile)
|
||||
|
||||
# Init MiscFiles
|
||||
self.GenMiscFiles(ContainerFile)
|
||||
|
||||
## GenMiscFiles
|
||||
#
|
||||
def GenMiscFiles(self, ContainerFile):
|
||||
MiscFiles = MiscFileClass()
|
||||
MiscFiles.Name = 'ModuleFiles'
|
||||
for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
|
||||
File = CommonClass.FileClass()
|
||||
File.Filename = Item
|
||||
MiscFiles.Files.append(File)
|
||||
self.Module.MiscFiles = MiscFiles
|
||||
|
||||
## Load Inf file
|
||||
#
|
||||
# Load the file if it exists
|
||||
#
|
||||
# @param Filename: Input value for filename of Inf file
|
||||
#
|
||||
def LoadInfFile(self, Filename):
|
||||
# Insert a record for file
|
||||
Filename = NormPath(Filename)
|
||||
|
||||
self.Identification.FullPath = Filename
|
||||
(self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
|
||||
if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
|
||||
self.Identification.ModulePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
|
||||
if self.PackageDir:
|
||||
self.Identification.PackagePath = self.PackageDir
|
||||
if self.Identification.ModulePath.find(self.PackageDir) == 0:
|
||||
self.Identification.ModulePath = self.Identification.ModulePath[len(self.PackageDir) + 1:]
|
||||
|
||||
# Init common datas
|
||||
IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
|
||||
[], [], TAB_UNKNOWN, [], [], []
|
||||
LineNo = 0
|
||||
|
||||
# Parse file content
|
||||
IsFindBlockComment = False
|
||||
ReservedLine = ''
|
||||
Comment = ''
|
||||
for Line in open(Filename, 'r'):
|
||||
LineNo = LineNo + 1
|
||||
# Remove comment block
|
||||
if Line.find(TAB_COMMENT_R8_START) > -1:
|
||||
ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
|
||||
if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
ReservedLine = ''
|
||||
else:
|
||||
Comment = Comment + Line[len(ReservedLine):] + '\n'
|
||||
IsFindBlockComment = True
|
||||
if not ReservedLine:
|
||||
continue
|
||||
if Line.find(TAB_COMMENT_R8_END) > -1:
|
||||
Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
|
||||
Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
|
||||
ReservedLine = ''
|
||||
IsFindBlockComment = False
|
||||
if IsFindBlockComment:
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
continue
|
||||
|
||||
# Remove comments at tail and remove spaces again
|
||||
if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
|
||||
Comment = Comment + Line.strip() + '\n'
|
||||
Line = CleanString(Line)
|
||||
if Line == '':
|
||||
continue
|
||||
|
||||
## Find a new section tab
|
||||
# First insert previous section items
|
||||
# And then parse the content of the new section
|
||||
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
|
||||
if Line[1:3] == "--":
|
||||
continue
|
||||
Model = Section[CurrentSection.upper()]
|
||||
# Insert items data of previous section
|
||||
InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
|
||||
|
||||
# Parse the new section
|
||||
SectionItemList = []
|
||||
ArchList = []
|
||||
ThirdList = []
|
||||
|
||||
CurrentSection = ''
|
||||
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
|
||||
for Item in LineList:
|
||||
ItemList = GetSplitValueList(Item, TAB_SPLIT)
|
||||
if CurrentSection == '':
|
||||
CurrentSection = ItemList[0]
|
||||
else:
|
||||
if CurrentSection != ItemList[0]:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
if CurrentSection.upper() not in self.KeyList:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
ItemList.append('')
|
||||
ItemList.append('')
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
else:
|
||||
if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
ArchList.append(ItemList[1].upper())
|
||||
ThirdList.append(ItemList[2])
|
||||
|
||||
if Comment:
|
||||
if Comment.endswith('\n'):
|
||||
Comment = Comment[:len(Comment) - len('\n')]
|
||||
self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
|
||||
Comment = ''
|
||||
continue
|
||||
|
||||
# Not in any defined section
|
||||
if CurrentSection == TAB_UNKNOWN:
|
||||
ErrorMsg = "%s is not in any defined section" % Line
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
# Add a section item
|
||||
SectionItemList.append([Line, LineNo, Comment])
|
||||
Comment = ''
|
||||
# End of parse
|
||||
#End of For
|
||||
|
||||
# Insert items data of last section
|
||||
Model = Section[CurrentSection.upper()]
|
||||
InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
|
||||
if Comment != '':
|
||||
self.SectionHeaderCommentDict[Model] = Comment
|
||||
Comment = ''
|
||||
|
||||
## Show detailed information of Module
|
||||
#
|
||||
# Print all members and their values of Module class
|
||||
#
|
||||
def ShowModule(self):
|
||||
M = self.Module
|
||||
print 'Filename =', M.ModuleHeader.FileName
|
||||
print 'FullPath =', M.ModuleHeader.FullPath
|
||||
print 'RelaPath =', M.ModuleHeader.RelaPath
|
||||
print 'PackagePath =', M.ModuleHeader.PackagePath
|
||||
print 'ModulePath =', M.ModuleHeader.ModulePath
|
||||
print 'CombinePath =', M.ModuleHeader.CombinePath
|
||||
|
||||
print 'BaseName =', M.ModuleHeader.Name
|
||||
print 'Guid =', M.ModuleHeader.Guid
|
||||
print 'Version =', M.ModuleHeader.Version
|
||||
|
||||
print '\nIncludes ='
|
||||
for Item in M.Includes:
|
||||
print Item.FilePath, Item.SupArchList
|
||||
print '\nLibraryClasses ='
|
||||
for Item in M.LibraryClasses:
|
||||
print Item.LibraryClass, Item.RecommendedInstance, Item.RecommendedInstanceGuid, Item.RecommendedInstanceVersion, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define
|
||||
print '\nPackageDependencies ='
|
||||
for Item in M.PackageDependencies:
|
||||
print Item.FilePath, Item.SupArchList, Item.FeatureFlag
|
||||
print '\nPcds ='
|
||||
for Item in M.PcdCodes:
|
||||
print '\tCName=',Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList
|
||||
print '\nSources ='
|
||||
for Source in M.Sources:
|
||||
print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList
|
||||
print '\nGuids ='
|
||||
for Item in M.Guids:
|
||||
print Item.CName, Item.SupArchList, Item.FeatureFlag
|
||||
print '\nProtocols ='
|
||||
for Item in M.Protocols:
|
||||
print Item.CName, Item.SupArchList, Item.FeatureFlag
|
||||
print '\nPpis ='
|
||||
for Item in M.Ppis:
|
||||
print Item.CName, Item.SupArchList, Item.FeatureFlag
|
||||
print '\nDepex ='
|
||||
for Item in M.Depex:
|
||||
print Item.Depex, Item.SupArchList, Item.Define
|
||||
print '\nBinaries ='
|
||||
for Binary in M.Binaries:
|
||||
print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList
|
||||
print '\n*** FileList ***'
|
||||
for Item in M.MiscFiles.Files:
|
||||
print Item.Filename
|
||||
print '****************\n'
|
||||
|
||||
## Convert [Defines] section content to ModuleHeaderClass
|
||||
#
|
||||
# Convert [Defines] section content to ModuleHeaderClass
|
||||
#
|
||||
# @param Defines The content under [Defines] section
|
||||
# @param ModuleHeader An object of ModuleHeaderClass
|
||||
# @param Arch The supported ARCH
|
||||
#
|
||||
def GenModuleHeader(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate ModuleHeader ...")
|
||||
# Update all defines item in database
|
||||
RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
|
||||
|
||||
ModuleHeader = ModuleHeaderClass()
|
||||
ModuleExtern = ModuleExternClass()
|
||||
OtherDefines = []
|
||||
for Record in RecordSet:
|
||||
ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
|
||||
if len(ValueList) != 2:
|
||||
OtherDefines.append(Record[0])
|
||||
else:
|
||||
Name = ValueList[0]
|
||||
Value = ValueList[1]
|
||||
if Name == TAB_INF_DEFINES_BASE_NAME:
|
||||
ModuleHeader.Name = Value
|
||||
ModuleHeader.BaseName = Value
|
||||
elif Name == TAB_INF_DEFINES_FILE_GUID:
|
||||
ModuleHeader.Guid = Value
|
||||
elif Name == TAB_INF_DEFINES_VERSION_STRING:
|
||||
ModuleHeader.Version = Value
|
||||
elif Name == TAB_INF_DEFINES_PCD_IS_DRIVER:
|
||||
ModuleHeader.PcdIsDriver = Value
|
||||
elif Name == TAB_INF_DEFINES_MODULE_TYPE:
|
||||
ModuleHeader.ModuleType = Value
|
||||
elif Name == TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION:
|
||||
ModuleHeader.UefiSpecificationVersion = Value
|
||||
elif Name == TAB_INF_DEFINES_PI_SPECIFICATION_VERSION:
|
||||
ModuleHeader.PiSpecificationVersion = Value
|
||||
elif Name == TAB_INF_DEFINES_ENTRY_POINT:
|
||||
ModuleExtern.EntryPoint = Value
|
||||
elif Name == TAB_INF_DEFINES_UNLOAD_IMAGE:
|
||||
ModuleExtern.UnloadImage = Value
|
||||
elif Name == TAB_INF_DEFINES_CONSTRUCTOR:
|
||||
ModuleExtern.Constructor = Value
|
||||
elif Name == TAB_INF_DEFINES_DESTRUCTOR:
|
||||
ModuleExtern.Destructor = Value
|
||||
else:
|
||||
OtherDefines.append(Record[0])
|
||||
ModuleHeader.FileName = self.Identification.FileName
|
||||
ModuleHeader.FullPath = self.Identification.FullPath
|
||||
ModuleHeader.RelaPath = self.Identification.RelaPath
|
||||
ModuleHeader.PackagePath = self.Identification.PackagePath
|
||||
ModuleHeader.ModulePath = self.Identification.ModulePath
|
||||
ModuleHeader.CombinePath = os.path.normpath(os.path.join(ModuleHeader.PackagePath, ModuleHeader.ModulePath, ModuleHeader.FileName))
|
||||
|
||||
if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
|
||||
ModuleHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
|
||||
self.Module.ModuleHeader = ModuleHeader
|
||||
self.Module.Externs.append(ModuleExtern)
|
||||
UE = self.Module.UserExtensions
|
||||
if UE == None:
|
||||
UE = UserExtensionsClass()
|
||||
UE.Defines = OtherDefines
|
||||
self.Module.UserExtensions = UE
|
||||
|
||||
## GenBuildOptions
|
||||
#
|
||||
# Gen BuildOptions of Inf
|
||||
# [<Family>:]<ToolFlag>=Flag
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenBuildOptions(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
|
||||
BuildOptions = {}
|
||||
# Get all BuildOptions
|
||||
RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION]
|
||||
UE = self.Module.UserExtensions
|
||||
if UE == None:
|
||||
UE = UserExtensionsClass()
|
||||
for Record in RecordSet:
|
||||
UE.BuildOptions.append(Record[0])
|
||||
self.Module.UserExtensions = UE
|
||||
|
||||
## GenIncludes
|
||||
#
|
||||
# Gen Includes of Inf
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenIncludes(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
|
||||
Includes = sdict()
|
||||
# Get all Includes
|
||||
RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
|
||||
for Record in RecordSet:
|
||||
Include = IncludeClass()
|
||||
Include.FilePath = Record[0]
|
||||
Include.SupArchList = Record[1]
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
Include.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
self.Module.Includes.append(Include)
|
||||
#self.Module.FileList.extend(GetFiles(os.path.normpath(os.path.join(self.Identification.FileRelativePath, Include.FilePath)), ['CVS', '.svn']))
|
||||
|
||||
## GenLibraryClasses
|
||||
#
|
||||
# Get LibraryClass of Inf
|
||||
# <LibraryClassKeyWord>|<LibraryInstance>
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenLibraryClasses(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
|
||||
LibraryClasses = {}
|
||||
# Get all LibraryClasses
|
||||
RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
|
||||
for Record in RecordSet:
|
||||
(LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
|
||||
LibraryClass = CommonClass.LibraryClassClass()
|
||||
LibraryClass.LibraryClass = LibClassName
|
||||
LibraryClass.RecommendedInstance = LibClassIns
|
||||
LibraryClass.FeatureFlag = Pcd
|
||||
LibraryClass.SupArchList = Record[1]
|
||||
LibraryClass.SupModuleList = Record[4]
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
LibraryClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
self.Module.LibraryClasses.append(LibraryClass)
|
||||
|
||||
## GenPackages
|
||||
#
|
||||
# Gen Packages of Inf
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenPackages(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES)
|
||||
Packages = {}
|
||||
# Get all Packages
|
||||
RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE]
|
||||
for Record in RecordSet:
|
||||
(PackagePath, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2])
|
||||
Package = ModulePackageDependencyClass()
|
||||
Package.FilePath = NormPath(PackagePath)
|
||||
Package.SupArchList = Record[1]
|
||||
Package.FeatureFlag = Pcd
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
Package.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
self.Module.PackageDependencies.append(Package)
|
||||
|
||||
def AddPcd(self, CName, TokenSpaceGuidCName, DefaultValue, ItemType, Arch, HelpTextList):
|
||||
Pcd = PcdClass()
|
||||
Pcd.CName = CName
|
||||
Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
|
||||
Pcd.DefaultValue = DefaultValue
|
||||
Pcd.ItemType = ItemType
|
||||
Pcd.SupArchList = Arch
|
||||
if GenerateHelpText(HelpTextList, ''):
|
||||
Pcd.HelpTextList.append(GenerateHelpText(HelpTextList, ''))
|
||||
self.Module.PcdCodes.append(Pcd)
|
||||
|
||||
## GenPcds
|
||||
#
|
||||
# Gen Pcds of Inf
|
||||
# <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenPcds(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
|
||||
Pcds = {}
|
||||
PcdToken = {}
|
||||
|
||||
# Get all Pcds
|
||||
RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
|
||||
RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
|
||||
RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
|
||||
RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
|
||||
RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
|
||||
|
||||
# Go through each arch
|
||||
for Record in RecordSet1:
|
||||
(TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
|
||||
for Record in RecordSet2:
|
||||
(TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
|
||||
for Record in RecordSet3:
|
||||
(TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
|
||||
for Record in RecordSet4:
|
||||
(TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
|
||||
for Record in RecordSet5:
|
||||
(TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], '', ContainerFile, Record[2])
|
||||
self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
|
||||
|
||||
## GenSources
|
||||
#
|
||||
# Gen Sources of Inf
|
||||
# <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenSources(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES)
|
||||
Sources = {}
|
||||
|
||||
# Get all Sources
|
||||
RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE]
|
||||
for Record in RecordSet:
|
||||
(Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
|
||||
Source = ModuleSourceFileClass(Filename, TagName, ToolCode, Family, Pcd, Record[1])
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
Source.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
if MODEL_EFI_SOURCE_FILE in self.SectionHeaderCommentDict:
|
||||
Source.HelpText = self.SectionHeaderCommentDict[MODEL_EFI_SOURCE_FILE]
|
||||
self.Module.Sources.append(Source)
|
||||
#self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
|
||||
|
||||
## GenDepexes
|
||||
#
|
||||
# Gen Depex of Inf
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenDepexes(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX)
|
||||
Depex = {}
|
||||
# Get all Depexes
|
||||
RecordSet = self.RecordSet[MODEL_EFI_DEPEX]
|
||||
DepexString = ''
|
||||
for Record in RecordSet:
|
||||
DepexString = DepexString + Record[0] + '\n'
|
||||
Dep = ModuleDepexClass()
|
||||
if DepexString.endswith('\n'):
|
||||
DepexString = DepexString[:len(DepexString) - len('\n')]
|
||||
Dep.Depex = DepexString
|
||||
if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
|
||||
self.Module.SmmDepex = Dep
|
||||
elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
|
||||
self.Module.PeiDepex = Dep
|
||||
else:
|
||||
self.Module.DxeDepex = Dep
|
||||
# for Record in RecordSet:
|
||||
#
|
||||
# Dep = ModuleDepexClass()
|
||||
# Dep.Depex = Record[0]
|
||||
# Dep.SupArchList = Record[1]
|
||||
# if GenerateHelpText(Record[5], ''):
|
||||
# Dep.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
# DepexString = DepexString + Dep
|
||||
# List.append(Dep)
|
||||
# self.Module.Depex = List
|
||||
# if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
|
||||
# self.Module.SmmDepex = List
|
||||
# elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
|
||||
# self.Module.PeiDepex = List
|
||||
# else:
|
||||
# self.Module.DxeDepex = List
|
||||
|
||||
## GenBinaries
|
||||
#
|
||||
# Gen Binary of Inf
|
||||
# <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
|
||||
#
|
||||
# @param ContainerFile: The Dec file full path
|
||||
#
|
||||
def GenBinaries(self, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES)
|
||||
Binaries = {}
|
||||
|
||||
# Get all Guids
|
||||
RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE]
|
||||
for Record in RecordSet:
|
||||
(FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
|
||||
Binary = ModuleBinaryFileClass(Filename, FileType, Target, Pcd, Record[1])
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
Binary.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
self.Module.Binaries.append(Binary)
|
||||
#self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
|
||||
|
||||
## GenGuids
|
||||
#
|
||||
# Gen Guids of Inf
|
||||
# <CName>=<GuidValue>
|
||||
#
|
||||
# @param ContainerFile: The Inf file full path
|
||||
#
|
||||
def GenGuidProtocolPpis(self, Type, ContainerFile):
|
||||
EdkLogger.debug(2, "Generate %s ..." % Type)
|
||||
Lists = {}
|
||||
# Get all Items
|
||||
if Type == TAB_GUIDS:
|
||||
ListMember = self.Module.Guids
|
||||
elif Type == TAB_PROTOCOLS:
|
||||
ListMember = self.Module.Protocols
|
||||
elif Type == TAB_PPIS:
|
||||
ListMember = self.Module.Ppis
|
||||
|
||||
RecordSet = self.RecordSet[Section[Type.upper()]]
|
||||
for Record in RecordSet:
|
||||
(Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2])
|
||||
ListClass = GuidProtocolPpiCommonClass()
|
||||
ListClass.CName = Name
|
||||
ListClass.SupArchList = Record[1]
|
||||
ListClass.FeatureFlag = Value
|
||||
if GenerateHelpText(Record[5], ''):
|
||||
ListClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
|
||||
ListMember.append(ListClass)
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
EdkLogger.Initialize()
|
||||
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||
|
||||
W = os.getenv('WORKSPACE')
|
||||
F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf')
|
||||
|
||||
P = Inf(os.path.normpath(F), True, W, 'MdeModulePkg')
|
||||
P.ShowModule()
|
||||
print P.ModuleToInf(P.Module)
|
567
BaseTools/Source/Python/Common/MigrationUtilities.py
Normal file
567
BaseTools/Source/Python/Common/MigrationUtilities.py
Normal file
@@ -0,0 +1,567 @@
|
||||
## @file
|
||||
# Contains several utilitities shared by migration tools.
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
import re
|
||||
import EdkLogger
|
||||
from optparse import OptionParser
|
||||
from Common.BuildToolError import *
|
||||
from XmlRoutines import *
|
||||
from CommonDataClass.CommonClass import *
|
||||
|
||||
## Set all fields of CommonClass object.
|
||||
#
|
||||
# Set all attributes of CommonClass object from XML Dom object of XmlCommon.
|
||||
#
|
||||
# @param Common The destine CommonClass object.
|
||||
# @param XmlCommon The source XML Dom object.
|
||||
#
|
||||
def SetCommon(Common, XmlCommon):
|
||||
XmlTag = "Usage"
|
||||
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
|
||||
|
||||
XmlTag = "FeatureFlag"
|
||||
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
|
||||
|
||||
XmlTag = "SupArchList"
|
||||
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
|
||||
|
||||
XmlTag = XmlNodeName(XmlCommon) + "/" + "HelpText"
|
||||
Common.HelpText = XmlElement(XmlCommon, XmlTag)
|
||||
|
||||
|
||||
## Set some fields of CommonHeaderClass object.
|
||||
#
|
||||
# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from
|
||||
# XML Dom object of XmlCommonHeader, NameTag and FileName.
|
||||
#
|
||||
# @param CommonHeader The destine CommonClass object.
|
||||
# @param XmlCommonHeader The source XML Dom object.
|
||||
# @param NameTag The name tag in XML Dom object.
|
||||
# @param FileName The file name of the XML file.
|
||||
#
|
||||
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
|
||||
XmlParentTag = XmlNodeName(XmlCommonHeader)
|
||||
|
||||
XmlTag = XmlParentTag + "/" + NameTag
|
||||
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParentTag + "/" + "GuidValue"
|
||||
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParentTag + "/" + "Version"
|
||||
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
CommonHeader.FileName = os.path.basename(FileName)
|
||||
CommonHeader.FullPath = os.path.abspath(FileName)
|
||||
|
||||
|
||||
## Regular expression to match specification and value.
|
||||
mReSpecification = re.compile(r"(?P<Specification>\w+)\s+(?P<Value>\w*)")
|
||||
|
||||
## Add specification to specification dictionary.
|
||||
#
|
||||
# Abstract specification name, value pair from Specification String and add them
|
||||
# to specification dictionary.
|
||||
#
|
||||
# @param SpecificationDict The destine Specification dictionary.
|
||||
# @param SpecificationString The source Specification String from which the
|
||||
# specification name and value pair is abstracted.
|
||||
#
|
||||
def AddToSpecificationDict(SpecificationDict, SpecificationString):
|
||||
"""Abstract specification name, value pair from Specification String"""
|
||||
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
|
||||
Specification = SpecificationMatch.group("Specification")
|
||||
Value = SpecificationMatch.group("Value")
|
||||
SpecificationDict[Specification] = Value
|
||||
|
||||
## Set all fields of CommonHeaderClass object.
|
||||
#
|
||||
# Set all attributes of CommonHeaderClass object from XML Dom object of
|
||||
# XmlCommonHeader, NameTag and FileName.
|
||||
#
|
||||
# @param CommonHeader The destine CommonClass object.
|
||||
# @param XmlCommonHeader The source XML Dom object.
|
||||
# @param NameTag The name tag in XML Dom object.
|
||||
# @param FileName The file name of the XML file.
|
||||
#
|
||||
def SetCommonHeader(CommonHeader, XmlCommonHeader):
|
||||
"""Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
|
||||
XmlParent = XmlNodeName(XmlCommonHeader)
|
||||
|
||||
XmlTag = XmlParent + "/" + "Abstract"
|
||||
CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParent + "/" + "Description"
|
||||
CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParent + "/" + "Copyright"
|
||||
CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParent + "/" + "License"
|
||||
CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
XmlTag = XmlParent + "/" + "Specification"
|
||||
Specification = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
AddToSpecificationDict(CommonHeader.Specification, Specification)
|
||||
|
||||
XmlTag = XmlParent + "/" + "ModuleType"
|
||||
CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
|
||||
|
||||
|
||||
## Load a new Cloned Record class object.
|
||||
#
|
||||
# Read an input XML ClonedRecord DOM object and return an object of Cloned Record
|
||||
# contained in the DOM object.
|
||||
#
|
||||
# @param XmlCloned A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.
|
||||
#
|
||||
def LoadClonedRecord(XmlCloned):
|
||||
ClonedRecord = ClonedRecordClass()
|
||||
|
||||
XmlTag = "Id"
|
||||
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
|
||||
|
||||
XmlTag = "FarGuid"
|
||||
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
|
||||
|
||||
XmlTag = "Cloned/PackageGuid"
|
||||
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
|
||||
|
||||
XmlTag = "Cloned/PackageVersion"
|
||||
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
|
||||
|
||||
XmlTag = "Cloned/ModuleGuid"
|
||||
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
|
||||
|
||||
XmlTag = "Cloned/ModuleVersion"
|
||||
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
|
||||
|
||||
return ClonedRecord
|
||||
|
||||
|
||||
## Load a new Guid/Protocol/Ppi common class object.
|
||||
#
|
||||
# Read an input XML Guid/Protocol/Ppi DOM object and return an object of
|
||||
# Guid/Protocol/Ppi contained in the DOM object.
|
||||
#
|
||||
# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object
|
||||
# created by XmlGuidProtocolPpiCommon.
|
||||
#
|
||||
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
|
||||
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
|
||||
|
||||
XmlTag = "Name"
|
||||
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
|
||||
|
||||
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
|
||||
if XmlParent == "Entry":
|
||||
XmlTag = "%s/C_Name" % XmlParent
|
||||
elif XmlParent == "GuidCNames":
|
||||
XmlTag = "%s/GuidCName" % XmlParent
|
||||
else:
|
||||
XmlTag = "%s/%sCName" % (XmlParent, XmlParent)
|
||||
|
||||
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
|
||||
|
||||
XmlTag = XmlParent + "/" + "GuidValue"
|
||||
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
|
||||
|
||||
if XmlParent.endswith("Notify"):
|
||||
GuidProtocolPpiCommon.Notify = True
|
||||
|
||||
XmlTag = "GuidTypeList"
|
||||
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
|
||||
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
|
||||
|
||||
XmlTag = "SupModuleList"
|
||||
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
|
||||
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
|
||||
|
||||
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
|
||||
|
||||
return GuidProtocolPpiCommon
|
||||
|
||||
|
||||
## Load a new Pcd class object.
|
||||
#
|
||||
# Read an input XML Pcd DOM object and return an object of Pcd
|
||||
# contained in the DOM object.
|
||||
#
|
||||
# @param XmlPcd A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel Pcd A new Pcd object created by XmlPcd.
|
||||
#
|
||||
def LoadPcd(XmlPcd):
|
||||
"""Return a new PcdClass object equivalent to XmlPcd"""
|
||||
Pcd = PcdClass()
|
||||
|
||||
XmlTag = "PcdEntry/C_Name"
|
||||
Pcd.CName = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/Token"
|
||||
Pcd.Token = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/TokenSpaceGuidCName"
|
||||
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/DatumType"
|
||||
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/MaxDatumSize"
|
||||
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/DefaultValue"
|
||||
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdItemType"
|
||||
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
|
||||
|
||||
XmlTag = "PcdEntry/ValidUsage"
|
||||
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
|
||||
|
||||
XmlTag = "SupModuleList"
|
||||
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
|
||||
|
||||
SetCommon(Pcd, XmlPcd)
|
||||
|
||||
return Pcd
|
||||
|
||||
|
||||
## Load a new LibraryClass class object.
|
||||
#
|
||||
# Read an input XML LibraryClass DOM object and return an object of LibraryClass
|
||||
# contained in the DOM object.
|
||||
#
|
||||
# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.
|
||||
#
|
||||
def LoadLibraryClass(XmlLibraryClass):
|
||||
LibraryClass = LibraryClassClass()
|
||||
|
||||
XmlTag = "LibraryClass/Keyword"
|
||||
LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
|
||||
if LibraryClass.LibraryClass == "":
|
||||
XmlTag = "Name"
|
||||
LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
|
||||
|
||||
XmlTag = "LibraryClass/IncludeHeader"
|
||||
LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
|
||||
|
||||
XmlTag = "RecommendedInstanceVersion"
|
||||
RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
|
||||
LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
|
||||
|
||||
XmlTag = "RecommendedInstanceGuid"
|
||||
RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
|
||||
LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
|
||||
|
||||
XmlTag = "SupModuleList"
|
||||
SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
|
||||
LibraryClass.SupModuleList = SupModules.split()
|
||||
|
||||
SetCommon(LibraryClass, XmlLibraryClass)
|
||||
|
||||
return LibraryClass
|
||||
|
||||
|
||||
## Load a new Build Option class object.
|
||||
#
|
||||
# Read an input XML BuildOption DOM object and return an object of Build Option
|
||||
# contained in the DOM object.
|
||||
#
|
||||
# @param XmlBuildOption A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel BuildOption A new Build Option object created by XmlBuildOption.
|
||||
#
|
||||
def LoadBuildOption(XmlBuildOption):
|
||||
"""Return a new BuildOptionClass object equivalent to XmlBuildOption"""
|
||||
BuildOption = BuildOptionClass()
|
||||
|
||||
BuildOption.Option = XmlElementData(XmlBuildOption)
|
||||
|
||||
XmlTag = "BuildTargets"
|
||||
BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
|
||||
|
||||
XmlTag = "ToolChainFamily"
|
||||
BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
|
||||
|
||||
XmlTag = "TagName"
|
||||
BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
|
||||
|
||||
XmlTag = "ToolCode"
|
||||
BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
|
||||
|
||||
XmlTag = "SupArchList"
|
||||
BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
|
||||
|
||||
return BuildOption
|
||||
|
||||
|
||||
## Load a new User Extensions class object.
|
||||
#
|
||||
# Read an input XML UserExtensions DOM object and return an object of User
|
||||
# Extensions contained in the DOM object.
|
||||
#
|
||||
# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.
|
||||
#
|
||||
# @retvel UserExtensions A new User Extensions object created by
|
||||
# XmlUserExtensions.
|
||||
#
|
||||
def LoadUserExtensions(XmlUserExtensions):
|
||||
UserExtensions = UserExtensionsClass()
|
||||
|
||||
XmlTag = "UserID"
|
||||
UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
|
||||
|
||||
XmlTag = "Identifier"
|
||||
UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
|
||||
|
||||
UserExtensions.Content = XmlElementData(XmlUserExtensions)
|
||||
|
||||
return UserExtensions
|
||||
|
||||
|
||||
## Store content to a text file object.
|
||||
#
|
||||
# Write some text file content to a text file object. The contents may echo
|
||||
# in screen in a verbose way.
|
||||
#
|
||||
# @param TextFile The text file object.
|
||||
# @param Content The string object to be written to a text file.
|
||||
#
|
||||
def StoreTextFile(TextFile, Content):
|
||||
EdkLogger.verbose(Content)
|
||||
TextFile.write(Content)
|
||||
|
||||
|
||||
## Add item to a section.
|
||||
#
|
||||
# Add an Item with specific CPU architecture to section dictionary.
|
||||
# The possible duplication is ensured to be removed.
|
||||
#
|
||||
# @param Section Section dictionary indexed by CPU architecture.
|
||||
# @param Arch CPU architecture: Ia32, X64, Ipf, ARM, Ebc or Common.
|
||||
# @param Item The Item to be added to section dictionary.
|
||||
#
|
||||
def AddToSection(Section, Arch, Item):
|
||||
SectionArch = Section.get(Arch, [])
|
||||
if Item not in SectionArch:
|
||||
SectionArch.append(Item)
|
||||
Section[Arch] = SectionArch
|
||||
|
||||
|
||||
## Get section contents.
|
||||
#
|
||||
# Return the content of section named SectionName.
|
||||
# the contents is based on Methods and ObjectLists.
|
||||
#
|
||||
# @param SectionName The name of the section.
|
||||
# @param Method A function returning a string item of an object.
|
||||
# @param ObjectList The list of object.
|
||||
#
|
||||
# @retval Section The string content of a section.
|
||||
#
|
||||
def GetSection(SectionName, Method, ObjectList):
|
||||
SupportedArches = ["common", "Ia32", "X64", "Ipf", "Ebc", "ARM"]
|
||||
SectionDict = {}
|
||||
for Object in ObjectList:
|
||||
Item = Method(Object)
|
||||
if Item == "":
|
||||
continue
|
||||
Item = " %s" % Item
|
||||
Arches = Object.SupArchList
|
||||
if len(Arches) == 0:
|
||||
AddToSection(SectionDict, "common", Item)
|
||||
else:
|
||||
for Arch in SupportedArches:
|
||||
if Arch.upper() in Arches:
|
||||
AddToSection(SectionDict, Arch, Item)
|
||||
|
||||
Section = ""
|
||||
for Arch in SupportedArches:
|
||||
SectionArch = "\n".join(SectionDict.get(Arch, []))
|
||||
if SectionArch != "":
|
||||
Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
|
||||
Section += "\n"
|
||||
if Section != "":
|
||||
Section += "\n"
|
||||
return Section
|
||||
|
||||
|
||||
## Store file header to a text file.
|
||||
#
|
||||
# Write standard file header to a text file. The content includes copyright,
|
||||
# abstract, description and license extracted from CommonHeader class object.
|
||||
#
|
||||
# @param TextFile The text file object.
|
||||
# @param CommonHeader The source CommonHeader class object.
|
||||
#
|
||||
def StoreHeader(TextFile, CommonHeader):
|
||||
CopyRight = CommonHeader.Copyright
|
||||
Abstract = CommonHeader.Abstract
|
||||
Description = CommonHeader.Description
|
||||
License = CommonHeader.License
|
||||
|
||||
Header = "#/** @file\n#\n"
|
||||
Header += "# " + Abstract + "\n#\n"
|
||||
Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
|
||||
Header += "# " + CopyRight + "\n#\n"
|
||||
Header += "# " + License.replace("\n", "\n# ").replace(" ", " ")
|
||||
Header += "\n#\n#**/\n\n"
|
||||
|
||||
StoreTextFile(TextFile, Header)
|
||||
|
||||
## Store file header to a text file.
|
||||
#
|
||||
# Write Defines section to a text file. DefinesTupleList determines the content.
|
||||
#
|
||||
# @param TextFile The text file object.
|
||||
# @param DefinesTupleList The list of (Tag, Value) to be added as one item.
|
||||
#
|
||||
def StoreDefinesSection(TextFile, DefinesTupleList):
|
||||
Section = "[Defines]\n"
|
||||
for DefineItem in DefinesTupleList:
|
||||
Section += " %-30s = %s\n" % DefineItem
|
||||
|
||||
Section += "\n\n"
|
||||
StoreTextFile(TextFile, Section)
|
||||
|
||||
|
||||
## Return one User Extension section.
|
||||
#
|
||||
# Read the input UserExtentsions class object and return one section.
|
||||
#
|
||||
# @param UserExtensions An input UserExtensions class object.
|
||||
#
|
||||
# @retval UserExtensionSection A section representing UserExtensions object.
|
||||
#
|
||||
def GetUserExtensions(UserExtensions):
|
||||
UserId = UserExtensions.UserID
|
||||
Identifier = UserExtensions.Identifier
|
||||
Content = UserExtensions.Content
|
||||
|
||||
return "[UserExtensions.%s.%s]\n %s\n\n" % (UserId, Identifier, Content)
|
||||
|
||||
## Regular expression to match an equation.
|
||||
mReEquation = re.compile(r"\s*(\S+)\s*=\s*(\S*)\s*")
|
||||
|
||||
## Return a value tuple matching information in a text fle.
|
||||
#
|
||||
# Parse the text file and return a value tuple corresponding to an input tag
|
||||
# tuple. In case of any error, an tuple of empty strings is returned.
|
||||
#
|
||||
# @param FileName The file name of the text file.
|
||||
# @param TagTuple A tuple of tags as the key to the value.
|
||||
#
|
||||
# @param ValueTupe The returned tuple corresponding to the tag tuple.
|
||||
#
|
||||
def GetTextFileInfo(FileName, TagTuple):
|
||||
ValueTuple = [""] * len(TagTuple)
|
||||
try:
|
||||
for Line in open(FileName):
|
||||
Line = Line.split("#", 1)[0]
|
||||
MatchEquation = mReEquation.match(Line)
|
||||
if MatchEquation:
|
||||
Tag = MatchEquation.group(1).upper()
|
||||
Value = MatchEquation.group(2)
|
||||
for Index in range(len(TagTuple)):
|
||||
if TagTuple[Index] == Tag:
|
||||
ValueTuple[Index] = Value
|
||||
except:
|
||||
EdkLogger.info("IO Error in reading file %s" % FileName)
|
||||
|
||||
return ValueTuple
|
||||
|
||||
|
||||
## Return a value tuple matching information in an XML fle.
|
||||
#
|
||||
# Parse the XML file and return a value tuple corresponding to an input tag
|
||||
# tuple. In case of any error, an tuple of empty strings is returned.
|
||||
#
|
||||
# @param FileName The file name of the XML file.
|
||||
# @param TagTuple A tuple of tags as the key to the value.
|
||||
#
|
||||
# @param ValueTupe The returned tuple corresponding to the tag tuple.
|
||||
#
|
||||
def GetXmlFileInfo(FileName, TagTuple):
|
||||
XmlDom = XmlParseFile(FileName)
|
||||
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
|
||||
|
||||
|
||||
## Parse migration command line options
|
||||
#
|
||||
# Use standard Python module optparse to parse command line option of this tool.
|
||||
#
|
||||
# @param Source The source file type.
|
||||
# @param Destinate The destinate file type.
|
||||
#
|
||||
# @retval Options A optparse object containing the parsed options.
|
||||
# @retval InputFile Path of an source file to be migrated.
|
||||
#
|
||||
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):
|
||||
# use clearer usage to override default usage message
|
||||
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
|
||||
Version = "%s Version %.2f" % (ToolName, VersionNumber)
|
||||
Copyright = "Copyright (c) 2007, Intel Corporation. All rights reserved."
|
||||
|
||||
Parser = OptionParser(description=Copyright, version=Version, usage=UsageString)
|
||||
Parser.add_option("-o", "--output", dest="OutputFile", help="The name of the %s file to be created." % Destinate)
|
||||
Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help="Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate))
|
||||
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
|
||||
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
|
||||
|
||||
Options, Args = Parser.parse_args()
|
||||
|
||||
# Set logging level
|
||||
if Options.verbose:
|
||||
EdkLogger.setLevel(EdkLogger.VERBOSE)
|
||||
elif Options.quiet:
|
||||
EdkLogger.setLevel(EdkLogger.QUIET)
|
||||
else:
|
||||
EdkLogger.setLevel(EdkLogger.INFO)
|
||||
|
||||
# error check
|
||||
if len(Args) == 0:
|
||||
raise MigrationError(PARAMETER_MISSING, name="Input file", usage=Parser.get_usage())
|
||||
if len(Args) > 1:
|
||||
raise MigrationError(PARAMETER_INVALID, name="Too many input files", usage=Parser.get_usage())
|
||||
|
||||
InputFile = Args[0]
|
||||
if not os.path.exists(InputFile):
|
||||
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
|
||||
|
||||
if Options.OutputFile:
|
||||
if Options.AutoWrite:
|
||||
raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage())
|
||||
else:
|
||||
if Options.AutoWrite:
|
||||
Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower()
|
||||
else:
|
||||
raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage())
|
||||
|
||||
return Options, InputFile
|
||||
|
||||
# This acts like the main() function for the script, unless it is 'import'ed
|
||||
# into another script.
|
||||
if __name__ == '__main__':
|
||||
pass
|
1327
BaseTools/Source/Python/Common/Misc.py
Normal file
1327
BaseTools/Source/Python/Common/Misc.py
Normal file
File diff suppressed because it is too large
Load Diff
935
BaseTools/Source/Python/Common/Parsing.py
Normal file
935
BaseTools/Source/Python/Common/Parsing.py
Normal file
@@ -0,0 +1,935 @@
|
||||
## @file
|
||||
# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
|
||||
#
|
||||
# Copyright (c) 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
from String import *
|
||||
from CommonDataClass.DataClass import *
|
||||
from DataType import *
|
||||
|
||||
## ParseContent
|
||||
#
|
||||
# Parse content of a DSC/INF/DEC file
|
||||
#
|
||||
def ParseContent(Lines, ):
|
||||
for Line in Lines:
|
||||
LineNo = LineNo + 1
|
||||
#
|
||||
# Remove comments at tail and remove spaces again
|
||||
#
|
||||
Line = CleanString(Line)
|
||||
if Line == '':
|
||||
continue
|
||||
|
||||
#
|
||||
# Find a new section tab
|
||||
# First insert previous section items
|
||||
# And then parse the content of the new section
|
||||
#
|
||||
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
|
||||
#
|
||||
# Insert items data of previous section
|
||||
#
|
||||
self.InsertSectionItemsIntoDatabase(FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
|
||||
#
|
||||
# Parse the new section
|
||||
#
|
||||
SectionItemList = []
|
||||
ArchList = []
|
||||
ThirdList = []
|
||||
|
||||
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
|
||||
for Item in LineList:
|
||||
ItemList = GetSplitValueList(Item, TAB_SPLIT)
|
||||
CurrentSection = ItemList[0]
|
||||
if CurrentSection.upper() not in self.KeyList:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
ItemList.append('')
|
||||
ItemList.append('')
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
|
||||
else:
|
||||
if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
|
||||
EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo)
|
||||
ArchList.append(ItemList[1].upper())
|
||||
ThirdList.append(ItemList[2])
|
||||
|
||||
continue
|
||||
|
||||
#
|
||||
# Not in any defined section
|
||||
#
|
||||
if CurrentSection == TAB_UNKNOWN:
|
||||
ErrorMsg = "%s is not in any defined section" % Line
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo)
|
||||
|
||||
#
|
||||
# Add a section item
|
||||
#
|
||||
SectionItemList.append([Line, LineNo])
|
||||
# End of parse
|
||||
#End of For
|
||||
|
||||
|
||||
## ParseDefineMacro
|
||||
#
|
||||
# Search whole table to find all defined Macro and replaced them with the real values
|
||||
#
|
||||
def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
|
||||
Macros = {}
|
||||
#
|
||||
# Find all DEFINE macros in section [Header] and its section
|
||||
#
|
||||
SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
|
||||
where Model = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
for Record in RecordSet:
|
||||
Macros[Record[0]] = Record[1]
|
||||
|
||||
#
|
||||
# Overrided by Global Macros
|
||||
#
|
||||
for Key in GlobalMacro.keys():
|
||||
Macros[Key] = GlobalMacro[Key]
|
||||
|
||||
#
|
||||
# Replace the Macros
|
||||
#
|
||||
for Key in RecordSets.keys():
|
||||
if RecordSets[Key] != []:
|
||||
for Item in RecordSets[Key]:
|
||||
Item[0] = ReplaceMacro(Item[0], Macros)
|
||||
|
||||
## ParseDefineMacro
|
||||
#
|
||||
# Search whole table to find all defined Macro and replaced them with the real values
|
||||
#
|
||||
def ParseDefineMacro(Table, GlobalMacro):
|
||||
Macros = {}
|
||||
#
|
||||
# Find all DEFINE macros
|
||||
#
|
||||
SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
|
||||
where Model = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
for Record in RecordSet:
|
||||
#***************************************************************************************************************************************************
|
||||
# The follow SqlCommand (expr replace) is not supported in Sqlite 3.3.4 which is used in Python 2.5 *
|
||||
# Reserved Only *
|
||||
# SqlCommand = """update %s set Value1 = replace(Value1, '%s', '%s') *
|
||||
# where ID in (select ID from %s *
|
||||
# where Model = %s *
|
||||
# and Value1 like '%%%s%%' *
|
||||
# and StartLine > %s *
|
||||
# and Enabled > -1 *
|
||||
# and Arch = '%s')""" % \ *
|
||||
# (self.TblDsc.Table, Record[0], Record[1], self.TblDsc.Table, Record[2], Record[1], Record[3], Record[4]) *
|
||||
#***************************************************************************************************************************************************
|
||||
Macros[Record[0]] = Record[1]
|
||||
|
||||
#
|
||||
# Overrided by Global Macros
|
||||
#
|
||||
for Key in GlobalMacro.keys():
|
||||
Macros[Key] = GlobalMacro[Key]
|
||||
|
||||
#
|
||||
# Found all defined macro and replaced
|
||||
#
|
||||
SqlCommand = """select ID, Value1 from %s
|
||||
where Model != %s
|
||||
and Value1 like '%%$(%%' and Value1 like '%%)%%'
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
|
||||
FoundRecords = Table.Exec(SqlCommand)
|
||||
for FoundRecord in FoundRecords:
|
||||
NewValue = ReplaceMacro(FoundRecord[1], Macros)
|
||||
SqlCommand = """update %s set Value1 = '%s'
|
||||
where ID = %s""" % (Table.Table, ConvertToSqlString2(NewValue), FoundRecord[0])
|
||||
Table.Exec(SqlCommand)
|
||||
|
||||
##QueryDefinesItem
|
||||
#
|
||||
# Search item of section [Defines] by name, return its values
|
||||
#
|
||||
# @param Table: The Table to be executed
|
||||
# @param Name: The Name of item of section [Defines]
|
||||
# @param Arch: The Arch of item of section [Defines]
|
||||
#
|
||||
# @retval RecordSet: A list of all matched records
|
||||
#
|
||||
def QueryDefinesItem(Table, Name, Arch, BelongsToFile):
|
||||
SqlCommand = """select Value2 from %s
|
||||
where Model = %s
|
||||
and Value1 = '%s'
|
||||
and Arch = '%s'
|
||||
and BelongsToFile = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(Arch), BelongsToFile)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
if len(RecordSet) < 1:
|
||||
SqlCommand = """select Value2 from %s
|
||||
where Model = %s
|
||||
and Value1 = '%s'
|
||||
and Arch = '%s'
|
||||
and BelongsToFile = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(TAB_ARCH_COMMON.upper()), BelongsToFile)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
if len(RecordSet) == 1:
|
||||
if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
|
||||
return [RecordSet[0][0]]
|
||||
else:
|
||||
return GetSplitValueList(RecordSet[0][0])
|
||||
elif len(RecordSet) < 1:
|
||||
return ['']
|
||||
elif len(RecordSet) > 1:
|
||||
RetVal = []
|
||||
for Record in RecordSet:
|
||||
if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
|
||||
RetVal.append(Record[0])
|
||||
else:
|
||||
Items = GetSplitValueList(Record[0])
|
||||
for Item in Items:
|
||||
RetVal.append(Item)
|
||||
return RetVal
|
||||
|
||||
##QueryDefinesItem
|
||||
#
|
||||
# Search item of section [Defines] by name, return its values
|
||||
#
|
||||
# @param Table: The Table to be executed
|
||||
# @param Name: The Name of item of section [Defines]
|
||||
# @param Arch: The Arch of item of section [Defines]
|
||||
#
|
||||
# @retval RecordSet: A list of all matched records
|
||||
#
|
||||
def QueryDefinesItem2(Table, Arch, BelongsToFile):
|
||||
SqlCommand = """select Value1, Value2, StartLine from %s
|
||||
where Model = %s
|
||||
and Arch = '%s'
|
||||
and BelongsToFile = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Arch), BelongsToFile)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
if len(RecordSet) < 1:
|
||||
SqlCommand = """select Value1, Value2, StartLine from %s
|
||||
where Model = %s
|
||||
and Arch = '%s'
|
||||
and BelongsToFile = %s
|
||||
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(TAB_ARCH_COMMON), BelongsToFile)
|
||||
RecordSet = Table.Exec(SqlCommand)
|
||||
|
||||
return RecordSet
|
||||
|
||||
##QueryDscItem
|
||||
#
|
||||
# Search all dsc item for a specific section
|
||||
#
|
||||
# @param Table: The Table to be executed
|
||||
# @param Model: The type of section
|
||||
#
|
||||
# @retval RecordSet: A list of all matched records
|
||||
#
|
||||
def QueryDscItem(Table, Model, BelongsToItem, BelongsToFile):
|
||||
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
|
||||
where Model = %s
|
||||
and BelongsToItem = %s
|
||||
and BelongsToFile = %s
|
||||
and Enabled > -1""" % (Table.Table, Model, BelongsToItem, BelongsToFile)
|
||||
return Table.Exec(SqlCommand)
|
||||
|
||||
##QueryDecItem
|
||||
#
|
||||
# Search all dec item for a specific section
|
||||
#
|
||||
# @param Table: The Table to be executed
|
||||
# @param Model: The type of section
|
||||
#
|
||||
# @retval RecordSet: A list of all matched records
|
||||
#
|
||||
def QueryDecItem(Table, Model, BelongsToItem):
|
||||
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
|
||||
where Model = %s
|
||||
and BelongsToItem = %s
|
||||
and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
|
||||
return Table.Exec(SqlCommand)
|
||||
|
||||
##QueryInfItem
|
||||
#
|
||||
# Search all dec item for a specific section
|
||||
#
|
||||
# @param Table: The Table to be executed
|
||||
# @param Model: The type of section
|
||||
#
|
||||
# @retval RecordSet: A list of all matched records
|
||||
#
|
||||
def QueryInfItem(Table, Model, BelongsToItem):
|
||||
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
|
||||
where Model = %s
|
||||
and BelongsToItem = %s
|
||||
and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
|
||||
return Table.Exec(SqlCommand)
|
||||
|
||||
## GetBuildOption
|
||||
#
|
||||
# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
|
||||
# Return (Family, ToolFlag, Flag)
|
||||
#
|
||||
# @param String: String with BuildOption statement
|
||||
# @param File: The file which defines build option, used in error report
|
||||
#
|
||||
# @retval truple() A truple structure as (Family, ToolChain, Flag)
|
||||
#
|
||||
def GetBuildOption(String, File, LineNo = -1):
|
||||
if String.find(TAB_EQUAL_SPLIT) < 0:
|
||||
RaiseParserError(String, 'BuildOptions', File, '[<Family>:]<ToolFlag>=Flag', LineNo)
|
||||
(Family, ToolChain, Flag) = ('', '', '')
|
||||
List = GetSplitValueList(String, TAB_EQUAL_SPLIT, MaxSplit = 1)
|
||||
if List[0].find(':') > -1:
|
||||
Family = List[0][ : List[0].find(':')].strip()
|
||||
ToolChain = List[0][List[0].find(':') + 1 : ].strip()
|
||||
else:
|
||||
ToolChain = List[0].strip()
|
||||
Flag = List[1].strip()
|
||||
|
||||
return (Family, ToolChain, Flag)
|
||||
|
||||
## Get Library Class
|
||||
#
|
||||
# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
|
||||
#
|
||||
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
|
||||
#
|
||||
def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo = -1):
|
||||
List = GetSplitValueList(Item[0])
|
||||
SupMod = SUP_MODULE_LIST_STRING
|
||||
if len(List) != 2:
|
||||
RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>|<LibraryInstance>')
|
||||
else:
|
||||
CheckFileType(List[1], '.Inf', ContainerFile, 'library class instance', Item[0], LineNo)
|
||||
CheckFileExist(WorkspaceDir, List[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
|
||||
if Item[1] != '':
|
||||
SupMod = Item[1]
|
||||
|
||||
return (List[0], List[1], SupMod)
|
||||
|
||||
## Get Library Class
|
||||
#
|
||||
# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]
|
||||
#
|
||||
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
|
||||
#
|
||||
def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
|
||||
ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
|
||||
SupMod = SUP_MODULE_LIST_STRING
|
||||
|
||||
if len(ItemList) > 5:
|
||||
RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]')
|
||||
else:
|
||||
CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', Item[0], LineNo)
|
||||
CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
|
||||
if ItemList[2] != '':
|
||||
CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', ContainerFile, LineNo)
|
||||
if Item[1] != '':
|
||||
SupMod = Item[1]
|
||||
|
||||
return (ItemList[0], ItemList[1], ItemList[2], SupMod)
|
||||
|
||||
## CheckPcdTokenInfo
|
||||
#
|
||||
# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
|
||||
#
|
||||
# @param TokenInfoString: String to be checked
|
||||
# @param Section: Used for error report
|
||||
# @param File: Used for error report
|
||||
#
|
||||
# @retval True PcdTokenInfo is in correct format
|
||||
#
|
||||
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
|
||||
Format = '<TokenSpaceGuidCName>.<PcdCName>'
|
||||
if TokenInfoString != '' and TokenInfoString != None:
|
||||
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
|
||||
if len(TokenInfoList) == 2:
|
||||
return True
|
||||
|
||||
RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
|
||||
|
||||
## Get Pcd
|
||||
#
|
||||
# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
|
||||
#
|
||||
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
|
||||
# @param ContainerFile: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
|
||||
#
|
||||
def GetPcd(Item, Type, ContainerFile, LineNo = -1):
|
||||
TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
|
||||
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
|
||||
|
||||
if len(List) < 4 or len(List) > 6:
|
||||
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]', LineNo)
|
||||
else:
|
||||
Value = List[1]
|
||||
MaximumDatumSize = List[2]
|
||||
Token = List[3]
|
||||
|
||||
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
|
||||
(TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
|
||||
|
||||
return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
|
||||
|
||||
## Get FeatureFlagPcd
|
||||
#
|
||||
# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
|
||||
#
|
||||
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
|
||||
# @param ContainerFile: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenInfo[1], TokenInfo[0], List[1], Type)
|
||||
#
|
||||
def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo = -1):
|
||||
TokenGuid, TokenName, Value = '', '', ''
|
||||
List = GetSplitValueList(Item)
|
||||
if len(List) != 2:
|
||||
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', LineNo)
|
||||
else:
|
||||
Value = List[1]
|
||||
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
|
||||
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
|
||||
|
||||
return (TokenName, TokenGuid, Value, Type)
|
||||
|
||||
## Get DynamicDefaultPcd
|
||||
#
|
||||
# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
|
||||
#
|
||||
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
|
||||
# @param ContainerFile: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
|
||||
#
|
||||
def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo = -1):
|
||||
TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
|
||||
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
|
||||
if len(List) < 4 or len(List) > 8:
|
||||
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
|
||||
else:
|
||||
Value = List[1]
|
||||
DatumTyp = List[2]
|
||||
MaxDatumSize = List[3]
|
||||
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
|
||||
(TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
|
||||
|
||||
return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
|
||||
|
||||
## Get DynamicHiiPcd
|
||||
#
|
||||
# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
|
||||
#
|
||||
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
|
||||
# @param ContainerFile: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], List[4], List[5], Type)
|
||||
#
|
||||
def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo = -1):
|
||||
TokenGuid, TokenName, L1, L2, L3, L4, L5 = '', '', '', '', '', '', ''
|
||||
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
|
||||
if len(List) < 6 or len(List) > 8:
|
||||
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]', LineNo)
|
||||
else:
|
||||
L1, L2, L3, L4, L5 = List[1], List[2], List[3], List[4], List[5]
|
||||
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
|
||||
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
|
||||
|
||||
return (TokenName, TokenGuid, L1, L2, L3, L4, L5, Type)
|
||||
|
||||
## Get DynamicVpdPcd
|
||||
#
|
||||
# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
|
||||
#
|
||||
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
|
||||
# @param ContainerFile: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], Type)
|
||||
#
|
||||
def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo = -1):
|
||||
TokenGuid, TokenName, L1, L2 = '', '', '', ''
|
||||
List = GetSplitValueList(Item + TAB_VALUE_SPLIT)
|
||||
if len(List) < 3 or len(List) > 4:
|
||||
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]', LineNo)
|
||||
else:
|
||||
L1, L2 = List[1], List[2]
|
||||
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
|
||||
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
|
||||
|
||||
return (TokenName, TokenGuid, L1, L2, Type)
|
||||
|
||||
## GetComponent
|
||||
#
|
||||
# Parse block of the components defined in dsc file
|
||||
# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param KeyValues: To store data after parsing
|
||||
#
|
||||
# @retval True Get component successfully
|
||||
#
|
||||
def GetComponent(Lines, KeyValues):
|
||||
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
|
||||
ListItem = None
|
||||
LibraryClassItem = []
|
||||
BuildOption = []
|
||||
Pcd = []
|
||||
|
||||
for Line in Lines:
|
||||
Line = Line[0]
|
||||
|
||||
#
|
||||
# Ignore !include statement
|
||||
#
|
||||
if Line.upper().find(TAB_INCLUDE.upper() + ' ') > -1 or Line.upper().find(TAB_DEFINE + ' ') > -1:
|
||||
continue
|
||||
|
||||
if findBlock == False:
|
||||
ListItem = Line
|
||||
#
|
||||
# find '{' at line tail
|
||||
#
|
||||
if Line.endswith('{'):
|
||||
findBlock = True
|
||||
ListItem = CleanString(Line.rsplit('{', 1)[0], DataType.TAB_COMMENT_SPLIT)
|
||||
|
||||
#
|
||||
# Parse a block content
|
||||
#
|
||||
if findBlock:
|
||||
if Line.find('<LibraryClasses>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<BuildOptions>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsFeatureFlag>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsPatchableInModule>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsFixedAtBuild>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsDynamic>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
|
||||
continue
|
||||
if Line.find('<PcdsDynamicEx>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
|
||||
continue
|
||||
if Line.endswith('}'):
|
||||
#
|
||||
# find '}' at line tail
|
||||
#
|
||||
KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
|
||||
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
|
||||
LibraryClassItem, BuildOption, Pcd = [], [], []
|
||||
continue
|
||||
|
||||
if findBlock:
|
||||
if findLibraryClass:
|
||||
LibraryClassItem.append(Line)
|
||||
elif findBuildOption:
|
||||
BuildOption.append(Line)
|
||||
elif findPcdsFeatureFlag:
|
||||
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
|
||||
elif findPcdsPatchableInModule:
|
||||
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
|
||||
elif findPcdsFixedAtBuild:
|
||||
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
|
||||
elif findPcdsDynamic:
|
||||
Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
|
||||
elif findPcdsDynamicEx:
|
||||
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
|
||||
else:
|
||||
KeyValues.append([ListItem, [], [], []])
|
||||
|
||||
return True
|
||||
|
||||
## GetExec
|
||||
#
|
||||
# Parse a string with format "InfFilename [EXEC = ExecFilename]"
|
||||
# Return (InfFilename, ExecFilename)
|
||||
#
|
||||
# @param String: String with EXEC statement
|
||||
#
|
||||
# @retval truple() A pair as (InfFilename, ExecFilename)
|
||||
#
|
||||
def GetExec(String):
|
||||
InfFilename = ''
|
||||
ExecFilename = ''
|
||||
if String.find('EXEC') > -1:
|
||||
InfFilename = String[ : String.find('EXEC')].strip()
|
||||
ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
|
||||
else:
|
||||
InfFilename = String.strip()
|
||||
|
||||
return (InfFilename, ExecFilename)
|
||||
|
||||
## GetComponents
|
||||
#
|
||||
# Parse block of the components defined in dsc file
|
||||
# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param Key: Reserved
|
||||
# @param KeyValues: To store data after parsing
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content
|
||||
#
|
||||
# @retval True Get component successfully
|
||||
#
|
||||
def GetComponents(Lines, Key, KeyValues, CommentCharacter):
|
||||
if Lines.find(DataType.TAB_SECTION_END) > -1:
|
||||
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
|
||||
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
|
||||
ListItem = None
|
||||
LibraryClassItem = []
|
||||
BuildOption = []
|
||||
Pcd = []
|
||||
|
||||
LineList = Lines.split('\n')
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line == None or Line == '':
|
||||
continue
|
||||
|
||||
if findBlock == False:
|
||||
ListItem = Line
|
||||
#
|
||||
# find '{' at line tail
|
||||
#
|
||||
if Line.endswith('{'):
|
||||
findBlock = True
|
||||
ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
|
||||
|
||||
#
|
||||
# Parse a block content
|
||||
#
|
||||
if findBlock:
|
||||
if Line.find('<LibraryClasses>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<BuildOptions>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsFeatureFlag>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsPatchableInModule>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsFixedAtBuild>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
|
||||
continue
|
||||
if Line.find('<PcdsDynamic>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
|
||||
continue
|
||||
if Line.find('<PcdsDynamicEx>') != -1:
|
||||
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
|
||||
continue
|
||||
if Line.endswith('}'):
|
||||
#
|
||||
# find '}' at line tail
|
||||
#
|
||||
KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
|
||||
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
|
||||
LibraryClassItem, BuildOption, Pcd = [], [], []
|
||||
continue
|
||||
|
||||
if findBlock:
|
||||
if findLibraryClass:
|
||||
LibraryClassItem.append(Line)
|
||||
elif findBuildOption:
|
||||
BuildOption.append(Line)
|
||||
elif findPcdsFeatureFlag:
|
||||
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
|
||||
elif findPcdsPatchableInModule:
|
||||
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
|
||||
elif findPcdsFixedAtBuild:
|
||||
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
|
||||
elif findPcdsDynamic:
|
||||
Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
|
||||
elif findPcdsDynamicEx:
|
||||
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
|
||||
else:
|
||||
KeyValues.append([ListItem, [], [], []])
|
||||
|
||||
return True
|
||||
|
||||
## Get Source
|
||||
#
|
||||
# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
|
||||
#
|
||||
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (List[0], List[1], List[2], List[3], List[4])
|
||||
#
|
||||
def GetSource(Item, ContainerFile, FileRelativePath, LineNo = -1):
|
||||
ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
|
||||
List = GetSplitValueList(ItemNew)
|
||||
if len(List) < 5 or len(List) > 9:
|
||||
RaiseParserError(Item, 'Sources', ContainerFile, '<Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]', LineNo)
|
||||
List[0] = NormPath(List[0])
|
||||
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', Item, LineNo)
|
||||
if List[4] != '':
|
||||
CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
|
||||
|
||||
return (List[0], List[1], List[2], List[3], List[4])
|
||||
|
||||
## Get Binary
|
||||
#
|
||||
# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
|
||||
#
|
||||
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (List[0], List[1], List[2], List[3])
|
||||
#
|
||||
def GetBinary(Item, ContainerFile, FileRelativePath, LineNo = -1):
|
||||
ItemNew = Item + DataType.TAB_VALUE_SPLIT
|
||||
List = GetSplitValueList(ItemNew)
|
||||
if len(List) != 4 and len(List) != 5:
|
||||
RaiseParserError(Item, 'Binaries', ContainerFile, "<FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]", LineNo)
|
||||
else:
|
||||
if List[3] != '':
|
||||
CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
|
||||
|
||||
return (List[0], List[1], List[2], List[3])
|
||||
|
||||
## Get Guids/Protocols/Ppis
|
||||
#
|
||||
# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
|
||||
#
|
||||
# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
|
||||
# @param Type: Type of parsing string
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (List[0], List[1])
|
||||
#
|
||||
def GetGuidsProtocolsPpisOfInf(Item, Type, ContainerFile, LineNo = -1):
|
||||
ItemNew = Item + TAB_VALUE_SPLIT
|
||||
List = GetSplitValueList(ItemNew)
|
||||
if List[1] != '':
|
||||
CheckPcdTokenInfo(List[1], Type, ContainerFile, LineNo)
|
||||
|
||||
return (List[0], List[1])
|
||||
|
||||
## Get Guids/Protocols/Ppis
|
||||
#
|
||||
# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
|
||||
#
|
||||
# @param Item: String as <GuidCName>=<GuidValue>
|
||||
# @param Type: Type of parsing string
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (List[0], List[1])
|
||||
#
|
||||
def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo = -1):
|
||||
List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
|
||||
if len(List) != 2:
|
||||
RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', LineNo)
|
||||
|
||||
return (List[0], List[1])
|
||||
|
||||
## GetPackage
|
||||
#
|
||||
# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
|
||||
#
|
||||
# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
|
||||
# @param Type: Type of parsing string
|
||||
# @param ContainerFile: The file which describes the library class, used for error report
|
||||
#
|
||||
# @retval (List[0], List[1])
|
||||
#
|
||||
def GetPackage(Item, ContainerFile, FileRelativePath, LineNo = -1):
|
||||
ItemNew = Item + TAB_VALUE_SPLIT
|
||||
List = GetSplitValueList(ItemNew)
|
||||
CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
|
||||
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', List[0], LineNo)
|
||||
|
||||
if List[1] != '':
|
||||
CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
|
||||
|
||||
return (List[0], List[1])
|
||||
|
||||
## Get Pcd Values of Inf
|
||||
#
|
||||
# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
|
||||
#
|
||||
# @param Item: The string describes pcd
|
||||
# @param Type: The type of Pcd
|
||||
# @param File: The file which describes the pcd, used for error report
|
||||
#
|
||||
# @retval (TokenSpcCName, TokenCName, Value, ItemType) Formatted Pcd Item
|
||||
#
|
||||
def GetPcdOfInf(Item, Type, File, LineNo):
|
||||
Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
|
||||
TokenGuid, TokenName, Value, InfType = '', '', '', ''
|
||||
|
||||
if Type == TAB_PCDS_FIXED_AT_BUILD:
|
||||
InfType = TAB_INF_FIXED_PCD
|
||||
elif Type == TAB_PCDS_PATCHABLE_IN_MODULE:
|
||||
InfType = TAB_INF_PATCH_PCD
|
||||
elif Type == TAB_PCDS_FEATURE_FLAG:
|
||||
InfType = TAB_INF_FEATURE_PCD
|
||||
elif Type == TAB_PCDS_DYNAMIC_EX:
|
||||
InfType = TAB_INF_PCD_EX
|
||||
elif Type == TAB_PCDS_DYNAMIC:
|
||||
InfType = TAB_INF_PCD
|
||||
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
|
||||
if len(List) < 2 or len(List) > 3:
|
||||
RaiseParserError(Item, InfType, File, Format, LineNo)
|
||||
else:
|
||||
Value = List[1]
|
||||
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
|
||||
if len(TokenInfo) != 2:
|
||||
RaiseParserError(Item, InfType, File, Format, LineNo)
|
||||
else:
|
||||
TokenGuid = TokenInfo[0]
|
||||
TokenName = TokenInfo[1]
|
||||
|
||||
return (TokenGuid, TokenName, Value, Type)
|
||||
|
||||
|
||||
## Get Pcd Values of Dec
|
||||
#
|
||||
# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
|
||||
# @retval (TokenSpcCName, TokenCName, Value, DatumType, Token, ItemType) Formatted Pcd Item
|
||||
#
|
||||
def GetPcdOfDec(Item, Type, File, LineNo = -1):
|
||||
Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
|
||||
TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
|
||||
List = GetSplitValueList(Item)
|
||||
if len(List) != 4:
|
||||
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
|
||||
else:
|
||||
Value = List[1]
|
||||
DatumType = List[2]
|
||||
Token = List[3]
|
||||
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
|
||||
if len(TokenInfo) != 2:
|
||||
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
|
||||
else:
|
||||
TokenGuid = TokenInfo[0]
|
||||
TokenName = TokenInfo[1]
|
||||
|
||||
return (TokenGuid, TokenName, Value, DatumType, Token, Type)
|
||||
|
||||
## Parse DEFINE statement
|
||||
#
|
||||
# Get DEFINE macros
|
||||
#
|
||||
# 1. Insert a record into TblDec
|
||||
# Value1: Macro Name
|
||||
# Value2: Macro Value
|
||||
#
|
||||
def ParseDefine(LineValue, StartLine, Table, FileID, Filename, SectionName, SectionModel, Arch):
|
||||
EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
|
||||
Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
|
||||
Table.Insert(MODEL_META_DATA_DEFINE, Define[0], Define[1], '', '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
|
||||
|
||||
## InsertSectionItems
|
||||
#
|
||||
# Insert item data of a section to a dict
|
||||
#
|
||||
def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, RecordSet):
|
||||
# Insert each item data of a section
|
||||
for Index in range(0, len(ArchList)):
|
||||
Arch = ArchList[Index]
|
||||
Third = ThirdList[Index]
|
||||
if Arch == '':
|
||||
Arch = TAB_ARCH_COMMON
|
||||
|
||||
Records = RecordSet[Model]
|
||||
for SectionItem in SectionItemList:
|
||||
BelongsToItem, EndLine, EndColumn = -1, -1, -1
|
||||
LineValue, StartLine, EndLine, Comment = SectionItem[0], SectionItem[1], SectionItem[1], SectionItem[2]
|
||||
|
||||
EdkLogger.debug(4, "Parsing %s ..." %LineValue)
|
||||
# And then parse DEFINE statement
|
||||
if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
|
||||
continue
|
||||
|
||||
# At last parse other sections
|
||||
ID = -1
|
||||
Records.append([LineValue, Arch, StartLine, ID, Third, Comment])
|
||||
|
||||
if RecordSet != {}:
|
||||
RecordSet[Model] = Records
|
||||
|
||||
## Insert records to database
|
||||
#
|
||||
# Insert item data of a section to database
|
||||
# @param Table: The Table to be inserted
|
||||
# @param FileID: The ID of belonging file
|
||||
# @param Filename: The name of belonging file
|
||||
# @param CurrentSection: The name of currect section
|
||||
# @param SectionItemList: A list of items of the section
|
||||
# @param ArchList: A list of arches
|
||||
# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
|
||||
# @param IfDefList: A list of all conditional statements
|
||||
# @param RecordSet: A dict of all parsed records
|
||||
#
|
||||
def InsertSectionItemsIntoDatabase(Table, FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, RecordSet):
|
||||
#
|
||||
# Insert each item data of a section
|
||||
#
|
||||
for Index in range(0, len(ArchList)):
|
||||
Arch = ArchList[Index]
|
||||
Third = ThirdList[Index]
|
||||
if Arch == '':
|
||||
Arch = TAB_ARCH_COMMON
|
||||
|
||||
Records = RecordSet[Model]
|
||||
for SectionItem in SectionItemList:
|
||||
BelongsToItem, EndLine, EndColumn = -1, -1, -1
|
||||
LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
|
||||
|
||||
EdkLogger.debug(4, "Parsing %s ..." %LineValue)
|
||||
#
|
||||
# And then parse DEFINE statement
|
||||
#
|
||||
if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
|
||||
ParseDefine(LineValue, StartLine, Table, FileID, Filename, CurrentSection, Model, Arch)
|
||||
continue
|
||||
|
||||
#
|
||||
# At last parse other sections
|
||||
#
|
||||
ID = Table.Insert(Model, LineValue, Third, Third, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
|
||||
Records.append([LineValue, Arch, StartLine, ID, Third])
|
||||
|
||||
if RecordSet != {}:
|
||||
RecordSet[Model] = Records
|
||||
|
||||
## GenMetaDatSectionItem
|
||||
def GenMetaDatSectionItem(Key, Value, List):
|
||||
if Key not in List:
|
||||
List[Key] = [Value]
|
||||
else:
|
||||
List[Key].append(Value)
|
BIN
BaseTools/Source/Python/Common/PyUtility.pyd
Normal file
BIN
BaseTools/Source/Python/Common/PyUtility.pyd
Normal file
Binary file not shown.
703
BaseTools/Source/Python/Common/String.py
Normal file
703
BaseTools/Source/Python/Common/String.py
Normal file
@@ -0,0 +1,703 @@
|
||||
## @file
|
||||
# This file is used to define common string related functions used in parsing process
|
||||
#
|
||||
# Copyright (c) 2007 ~ 2008, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import re
|
||||
import DataType
|
||||
import os.path
|
||||
import string
|
||||
import EdkLogger as EdkLogger
|
||||
|
||||
from GlobalData import *
|
||||
from BuildToolError import *
|
||||
|
||||
## GetSplitValueList
|
||||
#
|
||||
# Get a value list from a string with multiple values splited with SplitTag
|
||||
# The default SplitTag is DataType.TAB_VALUE_SPLIT
|
||||
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
|
||||
#
|
||||
# @param String: The input string to be splitted
|
||||
# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
|
||||
# @param MaxSplit: The max number of split values, default is -1
|
||||
#
|
||||
# @retval list() A list for splitted string
|
||||
#
|
||||
def GetSplitValueList(String, SplitTag = DataType.TAB_VALUE_SPLIT, MaxSplit = -1):
|
||||
return map(lambda l: l.strip(), String.split(SplitTag, MaxSplit))
|
||||
|
||||
## MergeArches
|
||||
#
|
||||
# Find a key's all arches in dict, add the new arch to the list
|
||||
# If not exist any arch, set the arch directly
|
||||
#
|
||||
# @param Dict: The input value for Dict
|
||||
# @param Key: The input value for Key
|
||||
# @param Arch: The Arch to be added or merged
|
||||
#
|
||||
def MergeArches(Dict, Key, Arch):
|
||||
if Key in Dict.keys():
|
||||
Dict[Key].append(Arch)
|
||||
else:
|
||||
Dict[Key] = Arch.split()
|
||||
|
||||
## GenDefines
|
||||
#
|
||||
# Parse a string with format "DEFINE <VarName> = <PATH>"
|
||||
# Generate a map Defines[VarName] = PATH
|
||||
# Return False if invalid format
|
||||
#
|
||||
# @param String: String with DEFINE statement
|
||||
# @param Arch: Supportted Arch
|
||||
# @param Defines: DEFINE statement to be parsed
|
||||
#
|
||||
# @retval 0 DEFINE statement found, and valid
|
||||
# @retval 1 DEFINE statement found, but not valid
|
||||
# @retval -1 DEFINE statement not found
|
||||
#
|
||||
def GenDefines(String, Arch, Defines):
|
||||
if String.find(DataType.TAB_DEFINE + ' ') > -1:
|
||||
List = String.replace(DataType.TAB_DEFINE + ' ', '').split(DataType.TAB_EQUAL_SPLIT)
|
||||
if len(List) == 2:
|
||||
Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
|
||||
return 0
|
||||
else:
|
||||
return -1
|
||||
|
||||
return 1
|
||||
|
||||
## GenInclude
|
||||
#
|
||||
# Parse a string with format "!include <Filename>"
|
||||
# Return the file path
|
||||
# Return False if invalid format or NOT FOUND
|
||||
#
|
||||
# @param String: String with INCLUDE statement
|
||||
# @param IncludeFiles: INCLUDE statement to be parsed
|
||||
# @param Arch: Supportted Arch
|
||||
#
|
||||
# @retval True
|
||||
# @retval False
|
||||
#
|
||||
def GenInclude(String, IncludeFiles, Arch):
|
||||
if String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
|
||||
IncludeFile = CleanString(String[String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
|
||||
MergeArches(IncludeFiles, IncludeFile, Arch)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
## GetLibraryClassesWithModuleType
|
||||
#
|
||||
# Get Library Class definition when no module type defined
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param Key: Reserved
|
||||
# @param KeyValues: To store data after parsing
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content
|
||||
#
|
||||
# @retval True Get library classes successfully
|
||||
#
|
||||
def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
|
||||
newKey = SplitModuleType(Key)
|
||||
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
|
||||
LineList = Lines.splitlines()
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line != '' and Line[0] != CommentCharacter:
|
||||
KeyValues.append([CleanString(Line, CommentCharacter), newKey[1]])
|
||||
|
||||
return True
|
||||
|
||||
## GetDynamics
|
||||
#
|
||||
# Get Dynamic Pcds
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param Key: Reserved
|
||||
# @param KeyValues: To store data after parsing
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content
|
||||
#
|
||||
# @retval True Get Dynamic Pcds successfully
|
||||
#
|
||||
def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
|
||||
#
|
||||
# Get SkuId Name List
|
||||
#
|
||||
SkuIdNameList = SplitModuleType(Key)
|
||||
|
||||
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
|
||||
LineList = Lines.splitlines()
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line != '' and Line[0] != CommentCharacter:
|
||||
KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
|
||||
|
||||
return True
|
||||
|
||||
## SplitModuleType
|
||||
#
|
||||
# Split ModuleType out of section defien to get key
|
||||
# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [ 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
|
||||
#
|
||||
# @param Key: String to be parsed
|
||||
#
|
||||
# @retval ReturnValue A list for module types
|
||||
#
|
||||
def SplitModuleType(Key):
|
||||
KeyList = Key.split(DataType.TAB_SPLIT)
|
||||
#
|
||||
# Fill in for arch
|
||||
#
|
||||
KeyList.append('')
|
||||
#
|
||||
# Fill in for moduletype
|
||||
#
|
||||
KeyList.append('')
|
||||
ReturnValue = []
|
||||
KeyValue = KeyList[0]
|
||||
if KeyList[1] != '':
|
||||
KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
|
||||
ReturnValue.append(KeyValue)
|
||||
ReturnValue.append(GetSplitValueList(KeyList[2]))
|
||||
|
||||
return ReturnValue
|
||||
|
||||
## Replace macro in strings list
|
||||
#
|
||||
# This method replace macros used in a given string list. The macros are
|
||||
# given in a dictionary.
|
||||
#
|
||||
# @param StringList StringList to be processed
|
||||
# @param MacroDefinitions The macro definitions in the form of dictionary
|
||||
# @param SelfReplacement To decide whether replace un-defined macro to ''
|
||||
#
|
||||
# @retval NewList A new string list whose macros are replaced
|
||||
#
|
||||
def ReplaceMacros(StringList, MacroDefinitions={}, SelfReplacement = False):
|
||||
NewList = []
|
||||
for String in StringList:
|
||||
if type(String) == type(''):
|
||||
NewList.append(ReplaceMacro(String, MacroDefinitions, SelfReplacement))
|
||||
else:
|
||||
NewList.append(String)
|
||||
|
||||
return NewList
|
||||
|
||||
## Replace macro in string
|
||||
#
|
||||
# This method replace macros used in given string. The macros are given in a
|
||||
# dictionary.
|
||||
#
|
||||
# @param String String to be processed
|
||||
# @param MacroDefinitions The macro definitions in the form of dictionary
|
||||
# @param SelfReplacement To decide whether replace un-defined macro to ''
|
||||
#
|
||||
# @retval string The string whose macros are replaced
|
||||
#
|
||||
def ReplaceMacro(String, MacroDefinitions={}, SelfReplacement = False):
|
||||
LastString = String
|
||||
while MacroDefinitions:
|
||||
MacroUsed = gMacroPattern.findall(String)
|
||||
# no macro found in String, stop replacing
|
||||
if len(MacroUsed) == 0:
|
||||
break
|
||||
|
||||
for Macro in MacroUsed:
|
||||
if Macro not in MacroDefinitions:
|
||||
if SelfReplacement:
|
||||
String = String.replace("$(%s)" % Macro, '')
|
||||
continue
|
||||
String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
|
||||
# in case there's macro not defined
|
||||
if String == LastString:
|
||||
break
|
||||
LastString = String
|
||||
|
||||
return String
|
||||
|
||||
## NormPath
|
||||
#
|
||||
# Create a normal path
|
||||
# And replace DFEINE in the path
|
||||
#
|
||||
# @param Path: The input value for Path to be converted
|
||||
# @param Defines: A set for DEFINE statement
|
||||
#
|
||||
# @retval Path Formatted path
|
||||
#
|
||||
def NormPath(Path, Defines = {}):
|
||||
IsRelativePath = False
|
||||
if Path:
|
||||
if Path[0] == '.':
|
||||
IsRelativePath = True
|
||||
#
|
||||
# Replace with Define
|
||||
#
|
||||
if Defines:
|
||||
Path = ReplaceMacro(Path, Defines)
|
||||
#
|
||||
# To local path format
|
||||
#
|
||||
Path = os.path.normpath(Path)
|
||||
|
||||
if IsRelativePath and Path[0] != '.':
|
||||
Path = os.path.join('.', Path)
|
||||
|
||||
return Path
|
||||
|
||||
## CleanString
|
||||
#
|
||||
# Remove comments in a string
|
||||
# Remove spaces
|
||||
#
|
||||
# @param Line: The string to be cleaned
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
|
||||
#
|
||||
# @retval Path Formatted path
|
||||
#
|
||||
def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
|
||||
#
|
||||
# remove whitespace
|
||||
#
|
||||
Line = Line.strip();
|
||||
#
|
||||
# Replace R8's comment character
|
||||
#
|
||||
if AllowCppStyleComment:
|
||||
Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
|
||||
#
|
||||
# remove comments
|
||||
#
|
||||
Line = Line.split(CommentCharacter, 1)[0];
|
||||
#
|
||||
# remove whitespace again
|
||||
#
|
||||
Line = Line.strip();
|
||||
|
||||
return Line
|
||||
|
||||
## GetMultipleValuesOfKeyFromLines
|
||||
#
|
||||
# Parse multiple strings to clean comment and spaces
|
||||
# The result is saved to KeyValues
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param Key: Reserved
|
||||
# @param KeyValues: To store data after parsing
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content
|
||||
#
|
||||
# @retval True Successfully executed
|
||||
#
|
||||
def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
|
||||
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
|
||||
LineList = Lines.split('\n')
|
||||
for Line in LineList:
|
||||
Line = CleanString(Line, CommentCharacter)
|
||||
if Line != '' and Line[0] != CommentCharacter:
|
||||
KeyValues += [Line]
|
||||
|
||||
return True
|
||||
|
||||
## GetDefineValue
|
||||
#
|
||||
# Parse a DEFINE statement to get defined value
|
||||
# DEFINE Key Value
|
||||
#
|
||||
# @param String: The content to be parsed
|
||||
# @param Key: The key of DEFINE statement
|
||||
# @param CommentCharacter: Comment char, used to ignore comment content
|
||||
#
|
||||
# @retval string The defined value
|
||||
#
|
||||
def GetDefineValue(String, Key, CommentCharacter):
|
||||
String = CleanString(String)
|
||||
return String[String.find(Key + ' ') + len(Key + ' ') : ]
|
||||
|
||||
## GetSingleValueOfKeyFromLines
|
||||
#
|
||||
# Parse multiple strings as below to get value of each definition line
|
||||
# Key1 = Value1
|
||||
# Key2 = Value2
|
||||
# The result is saved to Dictionary
|
||||
#
|
||||
# @param Lines: The content to be parsed
|
||||
# @param Dictionary: To store data after parsing
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
|
||||
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
|
||||
#
|
||||
# @retval True Successfully executed
|
||||
#
|
||||
def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
|
||||
Lines = Lines.split('\n')
|
||||
Keys = []
|
||||
Value = ''
|
||||
DefineValues = ['']
|
||||
SpecValues = ['']
|
||||
|
||||
for Line in Lines:
|
||||
#
|
||||
# Handle DEFINE and SPEC
|
||||
#
|
||||
if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
|
||||
if '' in DefineValues:
|
||||
DefineValues.remove('')
|
||||
DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
|
||||
continue
|
||||
if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
|
||||
if '' in SpecValues:
|
||||
SpecValues.remove('')
|
||||
SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
|
||||
continue
|
||||
|
||||
#
|
||||
# Handle Others
|
||||
#
|
||||
LineList = Line.split(KeySplitCharacter, 1)
|
||||
if len(LineList) >= 2:
|
||||
Key = LineList[0].split()
|
||||
if len(Key) == 1 and Key[0][0] != CommentCharacter:
|
||||
#
|
||||
# Remove comments and white spaces
|
||||
#
|
||||
LineList[1] = CleanString(LineList[1], CommentCharacter)
|
||||
if ValueSplitFlag:
|
||||
Value = map(string.strip, LineList[1].split(ValueSplitCharacter))
|
||||
else:
|
||||
Value = CleanString(LineList[1], CommentCharacter).splitlines()
|
||||
|
||||
if Key[0] in Dictionary:
|
||||
if Key[0] not in Keys:
|
||||
Dictionary[Key[0]] = Value
|
||||
Keys.append(Key[0])
|
||||
else:
|
||||
Dictionary[Key[0]].extend(Value)
|
||||
else:
|
||||
Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
|
||||
|
||||
if DefineValues == []:
|
||||
DefineValues = ['']
|
||||
if SpecValues == []:
|
||||
SpecValues = ['']
|
||||
Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
|
||||
Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
|
||||
|
||||
return True
|
||||
|
||||
## The content to be parsed
|
||||
#
|
||||
# Do pre-check for a file before it is parsed
|
||||
# Check $()
|
||||
# Check []
|
||||
#
|
||||
# @param FileName: Used for error report
|
||||
# @param FileContent: File content to be parsed
|
||||
# @param SupSectionTag: Used for error report
|
||||
#
|
||||
def PreCheck(FileName, FileContent, SupSectionTag):
|
||||
LineNo = 0
|
||||
IsFailed = False
|
||||
NewFileContent = ''
|
||||
for Line in FileContent.splitlines():
|
||||
LineNo = LineNo + 1
|
||||
#
|
||||
# Clean current line
|
||||
#
|
||||
Line = CleanString(Line)
|
||||
|
||||
#
|
||||
# Remove commented line
|
||||
#
|
||||
if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
|
||||
Line = ''
|
||||
#
|
||||
# Check $()
|
||||
#
|
||||
if Line.find('$') > -1:
|
||||
if Line.find('$(') < 0 or Line.find(')') < 0:
|
||||
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
#
|
||||
# Check []
|
||||
#
|
||||
if Line.find('[') > -1 or Line.find(']') > -1:
|
||||
#
|
||||
# Only get one '[' or one ']'
|
||||
#
|
||||
if not (Line.find('[') > -1 and Line.find(']') > -1):
|
||||
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
#
|
||||
# Regenerate FileContent
|
||||
#
|
||||
NewFileContent = NewFileContent + Line + '\r\n'
|
||||
|
||||
if IsFailed:
|
||||
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
return NewFileContent
|
||||
|
||||
## CheckFileType
|
||||
#
|
||||
# Check if the Filename is including ExtName
|
||||
# Return True if it exists
|
||||
# Raise a error message if it not exists
|
||||
#
|
||||
# @param CheckFilename: Name of the file to be checked
|
||||
# @param ExtName: Ext name of the file to be checked
|
||||
# @param ContainerFilename: The container file which describes the file to be checked, used for error report
|
||||
# @param SectionName: Used for error report
|
||||
# @param Line: The line in container file which defines the file to be checked
|
||||
#
|
||||
# @retval True The file type is correct
|
||||
#
|
||||
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo = -1):
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
(Root, Ext) = os.path.splitext(CheckFilename)
|
||||
if Ext.upper() != ExtName.upper():
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
if LineNo == -1:
|
||||
LineNo = GetLineNo(ContainerFile, Line)
|
||||
ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName)
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo,
|
||||
File=ContainerFilename, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
return True
|
||||
|
||||
## CheckFileExist
|
||||
#
|
||||
# Check if the file exists
|
||||
# Return True if it exists
|
||||
# Raise a error message if it not exists
|
||||
#
|
||||
# @param CheckFilename: Name of the file to be checked
|
||||
# @param WorkspaceDir: Current workspace dir
|
||||
# @param ContainerFilename: The container file which describes the file to be checked, used for error report
|
||||
# @param SectionName: Used for error report
|
||||
# @param Line: The line in container file which defines the file to be checked
|
||||
#
|
||||
# @retval The file full path if the file exists
|
||||
#
|
||||
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo = -1):
|
||||
CheckFile = ''
|
||||
if CheckFilename != '' and CheckFilename != None:
|
||||
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
|
||||
if not os.path.isfile(CheckFile):
|
||||
ContainerFile = open(ContainerFilename, 'r').read()
|
||||
if LineNo == -1:
|
||||
LineNo = GetLineNo(ContainerFile, Line)
|
||||
ErrorMsg = "Can't find file '%s' defined in section '%s'" % (CheckFile, SectionName)
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg,
|
||||
File=ContainerFilename, Line = LineNo, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
return CheckFile
|
||||
|
||||
## GetLineNo
|
||||
#
|
||||
# Find the index of a line in a file
|
||||
#
|
||||
# @param FileContent: Search scope
|
||||
# @param Line: Search key
|
||||
#
|
||||
# @retval int Index of the line
|
||||
# @retval -1 The line is not found
|
||||
#
|
||||
def GetLineNo(FileContent, Line, IsIgnoreComment = True):
|
||||
LineList = FileContent.splitlines()
|
||||
for Index in range(len(LineList)):
|
||||
if LineList[Index].find(Line) > -1:
|
||||
#
|
||||
# Ignore statement in comment
|
||||
#
|
||||
if IsIgnoreComment:
|
||||
if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
|
||||
continue
|
||||
return Index + 1
|
||||
|
||||
return -1
|
||||
|
||||
## RaiseParserError
|
||||
#
|
||||
# Raise a parser error
|
||||
#
|
||||
# @param Line: String which has error
|
||||
# @param Section: Used for error report
|
||||
# @param File: File which has the string
|
||||
# @param Format: Correct format
|
||||
#
|
||||
def RaiseParserError(Line, Section, File, Format = '', LineNo = -1):
|
||||
if LineNo == -1:
|
||||
LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
|
||||
ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section)
|
||||
if Format != '':
|
||||
Format = "Correct format is " + Format
|
||||
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError = EdkLogger.IsRaiseError)
|
||||
|
||||
## WorkspaceFile
|
||||
#
|
||||
# Return a full path with workspace dir
|
||||
#
|
||||
# @param WorkspaceDir: Workspace dir
|
||||
# @param Filename: Relative file name
|
||||
#
|
||||
# @retval string A full path
|
||||
#
|
||||
def WorkspaceFile(WorkspaceDir, Filename):
|
||||
return os.path.join(NormPath(WorkspaceDir), NormPath(Filename))
|
||||
|
||||
## Split string
|
||||
#
|
||||
# Revmove '"' which startswith and endswith string
|
||||
#
|
||||
# @param String: The string need to be splited
|
||||
#
|
||||
# @retval String: The string after removed '""'
|
||||
#
|
||||
def SplitString(String):
|
||||
if String.startswith('\"'):
|
||||
String = String[1:]
|
||||
if String.endswith('\"'):
|
||||
String = String[:-1]
|
||||
|
||||
return String
|
||||
|
||||
## Convert To Sql String
|
||||
#
|
||||
# 1. Replace "'" with "''" in each item of StringList
|
||||
#
|
||||
# @param StringList: A list for strings to be converted
|
||||
#
|
||||
def ConvertToSqlString(StringList):
|
||||
return map(lambda s: s.replace("'", "''") , StringList)
|
||||
|
||||
## Convert To Sql String
|
||||
#
|
||||
# 1. Replace "'" with "''" in the String
|
||||
#
|
||||
# @param String: A String to be converted
|
||||
#
|
||||
def ConvertToSqlString2(String):
|
||||
return String.replace("'", "''")
|
||||
|
||||
#
|
||||
# Remove comment block
|
||||
#
|
||||
def RemoveBlockComment(Lines):
|
||||
IsFindBlockComment = False
|
||||
IsFindBlockCode = False
|
||||
ReservedLine = ''
|
||||
NewLines = []
|
||||
|
||||
for Line in Lines:
|
||||
Line = Line.strip()
|
||||
#
|
||||
# Remove comment block
|
||||
#
|
||||
if Line.find(DataType.TAB_COMMENT_R8_START) > -1:
|
||||
ReservedLine = GetSplitValueList(Line, DataType.TAB_COMMENT_R8_START, 1)[0]
|
||||
IsFindBlockComment = True
|
||||
if Line.find(DataType.TAB_COMMENT_R8_END) > -1:
|
||||
Line = ReservedLine + GetSplitValueList(Line, DataType.TAB_COMMENT_R8_END, 1)[1]
|
||||
ReservedLine = ''
|
||||
IsFindBlockComment = False
|
||||
if IsFindBlockComment:
|
||||
NewLines.append('')
|
||||
continue
|
||||
|
||||
NewLines.append(Line)
|
||||
return NewLines
|
||||
|
||||
#
|
||||
# Get String of a List
|
||||
#
|
||||
def GetStringOfList(List, Split = ' '):
|
||||
if type(List) != type([]):
|
||||
return List
|
||||
Str = ''
|
||||
for Item in List:
|
||||
Str = Str + Item + Split
|
||||
|
||||
return Str.strip()
|
||||
|
||||
#
|
||||
# Get HelpTextList from HelpTextClassList
|
||||
#
|
||||
def GetHelpTextList(HelpTextClassList):
|
||||
List = []
|
||||
if HelpTextClassList:
|
||||
for HelpText in HelpTextClassList:
|
||||
if HelpText.String.endswith('\n'):
|
||||
HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
|
||||
List.extend(HelpText.String.split('\n'))
|
||||
|
||||
return List
|
||||
|
||||
def StringToArray(String):
|
||||
if isinstance(String, unicode):
|
||||
if len(unicode) ==0:
|
||||
return "{0x00, 0x00}"
|
||||
return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String])
|
||||
elif String.startswith('L"'):
|
||||
if String == "L\"\"":
|
||||
return "{0x00, 0x00}"
|
||||
else:
|
||||
return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String[2:-1]])
|
||||
elif String.startswith('"'):
|
||||
if String == "\"\"":
|
||||
return "{0x00}";
|
||||
else:
|
||||
return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
|
||||
else:
|
||||
return '{%s, 0}' % ', '.join(String.split())
|
||||
|
||||
def StringArrayLength(String):
|
||||
if isinstance(String, unicode):
|
||||
return (len(String) + 1) * 2 + 1;
|
||||
elif String.startswith('L"'):
|
||||
return (len(String) - 3 + 1) * 2
|
||||
elif String.startswith('"'):
|
||||
return (len(String) - 2 + 1)
|
||||
else:
|
||||
return len(String.split()) + 1
|
||||
|
||||
def RemoveDupOption(OptionString, Which="/I", Against=None):
|
||||
OptionList = OptionString.split()
|
||||
ValueList = []
|
||||
if Against:
|
||||
ValueList += Against
|
||||
for Index in range(len(OptionList)):
|
||||
Opt = OptionList[Index]
|
||||
if not Opt.startswith(Which):
|
||||
continue
|
||||
if len(Opt) > len(Which):
|
||||
Val = Opt[len(Which):]
|
||||
else:
|
||||
Val = ""
|
||||
if Val in ValueList:
|
||||
OptionList[Index] = ""
|
||||
else:
|
||||
ValueList.append(Val)
|
||||
return " ".join(OptionList)
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
174
BaseTools/Source/Python/Common/TargetTxtClassObject.py
Normal file
174
BaseTools/Source/Python/Common/TargetTxtClassObject.py
Normal file
@@ -0,0 +1,174 @@
|
||||
## @file
|
||||
# This file is used to define each component of Target.txt file
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
import EdkLogger
|
||||
import DataType
|
||||
from BuildToolError import *
|
||||
import GlobalData
|
||||
|
||||
gDefaultTargetTxtFile = "Conf/target.txt"
|
||||
|
||||
## TargetTxtClassObject
|
||||
#
|
||||
# This class defined content used in file target.txt
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
# @param Filename: Input value for full path of target.txt
|
||||
#
|
||||
# @var TargetTxtDictionary: To store keys and values defined in target.txt
|
||||
#
|
||||
class TargetTxtClassObject(object):
|
||||
def __init__(self, Filename = None):
|
||||
self.TargetTxtDictionary = {
|
||||
DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
|
||||
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
|
||||
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
|
||||
DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD : '',
|
||||
DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
|
||||
DataType.TAB_TAT_DEFINES_TARGET : [],
|
||||
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
|
||||
DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
|
||||
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
|
||||
}
|
||||
if Filename != None:
|
||||
self.LoadTargetTxtFile(Filename)
|
||||
|
||||
## LoadTargetTxtFile
|
||||
#
|
||||
# Load target.txt file and parse it, return a set structure to store keys and values
|
||||
#
|
||||
# @param Filename: Input value for full path of target.txt
|
||||
#
|
||||
# @retval set() A set structure to store keys and values
|
||||
# @retval 1 Error happenes in parsing
|
||||
#
|
||||
def LoadTargetTxtFile(self, Filename):
|
||||
if os.path.exists(Filename) and os.path.isfile(Filename):
|
||||
return self.ConvertTextFileToDict(Filename, '#', '=')
|
||||
else:
|
||||
EdkLogger.error("Target.txt Parser", FILE_NOT_FOUND, ExtraData=Filename)
|
||||
return 1
|
||||
|
||||
## ConvertTextFileToDict
|
||||
#
|
||||
# Convert a text file to a dictionary of (name:value) pairs.
|
||||
# The data is saved to self.TargetTxtDictionary
|
||||
#
|
||||
# @param FileName: Text filename
|
||||
# @param CommentCharacter: Comment char, be used to ignore comment content
|
||||
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
|
||||
#
|
||||
# @retval 0 Convert successfully
|
||||
# @retval 1 Open file failed
|
||||
#
|
||||
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
|
||||
F = None
|
||||
try:
|
||||
F = open(FileName,'r')
|
||||
except:
|
||||
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
|
||||
if F != None:
|
||||
F.close()
|
||||
|
||||
for Line in F:
|
||||
Line = Line.strip()
|
||||
if Line.startswith(CommentCharacter) or Line == '':
|
||||
continue
|
||||
|
||||
LineList = Line.split(KeySplitCharacter, 1)
|
||||
Key = LineList[0].strip()
|
||||
if len(LineList) == 2:
|
||||
Value = LineList[1].strip()
|
||||
else:
|
||||
Value = ""
|
||||
|
||||
if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
|
||||
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
|
||||
self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
|
||||
elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
|
||||
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
|
||||
self.TargetTxtDictionary[Key] = Value.split()
|
||||
elif Key == DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD:
|
||||
if Value not in ["Enable", "Disable"]:
|
||||
EdkLogger.error("build", FORMAT_INVALID, "Invalid setting of [%s]: %s." % (Key, Value),
|
||||
ExtraData="\tSetting must be one of [Enable, Disable]",
|
||||
File=FileName)
|
||||
self.TargetTxtDictionary[Key] = Value
|
||||
elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
|
||||
try:
|
||||
V = int(Value, 0)
|
||||
except:
|
||||
EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
|
||||
File=FileName)
|
||||
self.TargetTxtDictionary[Key] = Value
|
||||
#elif Key not in GlobalData.gGlobalDefines:
|
||||
# GlobalData.gGlobalDefines[Key] = Value
|
||||
|
||||
F.close()
|
||||
return 0
|
||||
|
||||
## Print the dictionary
|
||||
#
|
||||
# Print all items of dictionary one by one
|
||||
#
|
||||
# @param Dict: The dictionary to be printed
|
||||
#
|
||||
def printDict(Dict):
|
||||
if Dict != None:
|
||||
KeyList = Dict.keys()
|
||||
for Key in KeyList:
|
||||
if Dict[Key] != '':
|
||||
print Key + ' = ' + str(Dict[Key])
|
||||
|
||||
## Print the dictionary
|
||||
#
|
||||
# Print the items of dictionary which matched with input key
|
||||
#
|
||||
# @param list: The dictionary to be printed
|
||||
# @param key: The key of the item to be printed
|
||||
#
|
||||
def printList(Key, List):
|
||||
if type(List) == type([]):
|
||||
if len(List) > 0:
|
||||
if Key.find(TAB_SPLIT) != -1:
|
||||
print "\n" + Key
|
||||
for Item in List:
|
||||
print Item
|
||||
## TargetTxtDict
|
||||
#
|
||||
# Load target.txt in input workspace dir
|
||||
#
|
||||
# @param WorkSpace: Workspace dir
|
||||
#
|
||||
# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
|
||||
#
|
||||
def TargetTxtDict(WorkSpace):
|
||||
Target = TargetTxtClassObject()
|
||||
Target.LoadTargetTxtFile(os.path.normpath(os.path.join(WorkSpace, gDefaultTargetTxtFile)))
|
||||
return Target
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
Target = TargetTxtDict(os.getenv("WORKSPACE"))
|
||||
print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
|
||||
print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET]
|
||||
print Target.TargetTxtDictionary
|
217
BaseTools/Source/Python/Common/ToolDefClassObject.py
Normal file
217
BaseTools/Source/Python/Common/ToolDefClassObject.py
Normal file
@@ -0,0 +1,217 @@
|
||||
## @file
|
||||
# This file is used to define each component of tools_def.txt file
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import os
|
||||
import re
|
||||
import EdkLogger
|
||||
|
||||
from Dictionary import *
|
||||
from BuildToolError import *
|
||||
from TargetTxtClassObject import *
|
||||
|
||||
##
|
||||
# Static vailabes used for pattern
|
||||
#
|
||||
gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
|
||||
gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
|
||||
gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
|
||||
gDefaultToolsDefFile = "Conf/tools_def.txt"
|
||||
|
||||
## ToolDefClassObject
|
||||
#
|
||||
# This class defined content used in file tools_def.txt
|
||||
#
|
||||
# @param object: Inherited from object class
|
||||
# @param Filename: Input value for full path of tools_def.txt
|
||||
#
|
||||
# @var ToolsDefTxtDictionary: To store keys and values defined in target.txt
|
||||
# @var MacroDictionary: To store keys and values defined in DEFINE statement
|
||||
#
|
||||
class ToolDefClassObject(object):
|
||||
def __init__(self, FileName = None):
|
||||
self.ToolsDefTxtDictionary = {}
|
||||
self.MacroDictionary = {}
|
||||
for Env in os.environ:
|
||||
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
|
||||
|
||||
if FileName != None:
|
||||
self.LoadToolDefFile(FileName)
|
||||
|
||||
## LoadToolDefFile
|
||||
#
|
||||
# Load target.txt file and parse it, return a set structure to store keys and values
|
||||
#
|
||||
# @param Filename: Input value for full path of tools_def.txt
|
||||
#
|
||||
def LoadToolDefFile(self, FileName):
|
||||
FileContent = []
|
||||
if os.path.isfile(FileName):
|
||||
try:
|
||||
F = open(FileName,'r')
|
||||
FileContent = F.readlines()
|
||||
except:
|
||||
EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
|
||||
else:
|
||||
EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName)
|
||||
|
||||
self.ToolsDefTxtDatabase = {
|
||||
TAB_TOD_DEFINES_TARGET : [],
|
||||
TAB_TOD_DEFINES_TOOL_CHAIN_TAG : [],
|
||||
TAB_TOD_DEFINES_TARGET_ARCH : [],
|
||||
TAB_TOD_DEFINES_COMMAND_TYPE : []
|
||||
}
|
||||
|
||||
for Index in range(len(FileContent)):
|
||||
Line = FileContent[Index].strip()
|
||||
if Line == "" or Line[0] == '#':
|
||||
continue
|
||||
NameValuePair = Line.split("=", 1)
|
||||
if len(NameValuePair) != 2:
|
||||
EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1))
|
||||
continue
|
||||
|
||||
Name = NameValuePair[0].strip()
|
||||
Value = NameValuePair[1].strip()
|
||||
|
||||
if Name == "IDENTIFIER":
|
||||
EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value))
|
||||
continue
|
||||
|
||||
MacroDefinition = gMacroDefPattern.findall(Name)
|
||||
if MacroDefinition != []:
|
||||
Done, Value = self.ExpandMacros(Value)
|
||||
if not Done:
|
||||
EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
|
||||
"Macro or Environment has not been defined",
|
||||
ExtraData=Value[4:-1], File=FileName, Line=Index+1)
|
||||
|
||||
MacroName = MacroDefinition[0].strip()
|
||||
self.MacroDictionary["DEF(%s)" % MacroName] = Value
|
||||
EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value))
|
||||
continue
|
||||
|
||||
Done, Value = self.ExpandMacros(Value)
|
||||
if not Done:
|
||||
EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
|
||||
"Macro or Environment has not been defined",
|
||||
ExtraData=Value[4:-1], File=FileName, Line=Index+1)
|
||||
|
||||
List = Name.split('_')
|
||||
if len(List) != 5:
|
||||
EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name))
|
||||
continue
|
||||
elif List[4] == '*':
|
||||
EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name))
|
||||
continue
|
||||
else:
|
||||
self.ToolsDefTxtDictionary[Name] = Value
|
||||
if List[0] != '*':
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]]
|
||||
if List[1] != '*':
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]]
|
||||
if List[2] != '*':
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]]
|
||||
if List[3] != '*':
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]]
|
||||
if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == '*' and List[3] == '*':
|
||||
if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase:
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
|
||||
elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
|
||||
elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value:
|
||||
EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name))
|
||||
if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == '*' and List[3] == '*':
|
||||
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \
|
||||
or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
|
||||
EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name))
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
|
||||
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET]))
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]))
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH]))
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE]))
|
||||
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort()
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort()
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort()
|
||||
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
|
||||
|
||||
KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]
|
||||
for Index in range(3,-1,-1):
|
||||
for Key in dict(self.ToolsDefTxtDictionary):
|
||||
List = Key.split('_')
|
||||
if List[Index] == '*':
|
||||
for String in self.ToolsDefTxtDatabase[KeyList[Index]]:
|
||||
List[Index] = String
|
||||
NewKey = '%s_%s_%s_%s_%s' % tuple(List)
|
||||
if NewKey not in self.ToolsDefTxtDictionary:
|
||||
self.ToolsDefTxtDictionary[NewKey] = self.ToolsDefTxtDictionary[Key]
|
||||
continue
|
||||
del self.ToolsDefTxtDictionary[Key]
|
||||
elif List[Index] not in self.ToolsDefTxtDatabase[KeyList[Index]]:
|
||||
del self.ToolsDefTxtDictionary[Key]
|
||||
|
||||
## ExpandMacros
|
||||
#
|
||||
# Replace defined macros with real value
|
||||
#
|
||||
# @param Value: The string with unreplaced macros
|
||||
#
|
||||
# @retval Value: The string which has been replaced with real value
|
||||
#
|
||||
def ExpandMacros(self, Value):
|
||||
EnvReference = gEnvRefPattern.findall(Value)
|
||||
for Ref in EnvReference:
|
||||
if Ref not in self.MacroDictionary:
|
||||
return False, Ref
|
||||
Value = Value.replace(Ref, self.MacroDictionary[Ref])
|
||||
|
||||
MacroReference = gMacroRefPattern.findall(Value)
|
||||
for Ref in MacroReference:
|
||||
if Ref not in self.MacroDictionary:
|
||||
return False, Ref
|
||||
Value = Value.replace(Ref, self.MacroDictionary[Ref])
|
||||
|
||||
return True, Value
|
||||
|
||||
## ToolDefDict
|
||||
#
|
||||
# Load tools_def.txt in input workspace dir
|
||||
#
|
||||
# @param WorkSpace: Workspace dir
|
||||
#
|
||||
# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
|
||||
#
|
||||
def ToolDefDict(WorkSpace):
|
||||
Target = TargetTxtDict(WorkSpace)
|
||||
ToolDef = ToolDefClassObject()
|
||||
if DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
|
||||
gDefaultToolsDefFile = Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
|
||||
ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(WorkSpace, gDefaultToolsDefFile)))
|
||||
return ToolDef
|
||||
|
||||
##
|
||||
#
|
||||
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||
# script.
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
ToolDef = ToolDefDict(os.getenv("WORKSPACE"))
|
||||
pass
|
1754
BaseTools/Source/Python/Common/XmlParser.py
Normal file
1754
BaseTools/Source/Python/Common/XmlParser.py
Normal file
File diff suppressed because it is too large
Load Diff
228
BaseTools/Source/Python/Common/XmlRoutines.py
Normal file
228
BaseTools/Source/Python/Common/XmlRoutines.py
Normal file
@@ -0,0 +1,228 @@
|
||||
## @file
|
||||
# This is an XML API that uses a syntax similar to XPath, but it is written in
|
||||
# standard python so that no extra python packages are required to use it.
|
||||
#
|
||||
# Copyright (c) 2007, Intel Corporation
|
||||
# All rights reserved. This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.php
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
|
||||
##
|
||||
# Import Modules
|
||||
#
|
||||
import xml.dom.minidom
|
||||
|
||||
## Create a element of XML
|
||||
#
|
||||
# @param Name
|
||||
# @param String
|
||||
# @param NodeList
|
||||
# @param AttributeList
|
||||
#
|
||||
# @revel Element
|
||||
#
|
||||
def CreateXmlElement(Name, String, NodeList, AttributeList):
|
||||
Doc = xml.dom.minidom.Document()
|
||||
Element = Doc.createElement(Name)
|
||||
if String != '' and String != None:
|
||||
Element.appendChild(Doc.createTextNode(String))
|
||||
|
||||
for Item in NodeList:
|
||||
if type(Item) == type([]):
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
Node = Doc.createElement(Key)
|
||||
Node.appendChild(Doc.createTextNode(Value))
|
||||
Element.appendChild(Node)
|
||||
else:
|
||||
Element.appendChild(Item)
|
||||
for Item in AttributeList:
|
||||
Key = Item[0]
|
||||
Value = Item[1]
|
||||
if Key != '' and Key != None and Value != '' and Value != None:
|
||||
Element.setAttribute(Key, Value)
|
||||
|
||||
return Element
|
||||
|
||||
## Get a list of XML nodes using XPath style syntax.
|
||||
#
|
||||
# Return a list of XML DOM nodes from the root Dom specified by XPath String.
|
||||
# If the input Dom or String is not valid, then an empty list is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM node.
|
||||
# @param String A XPath style path.
|
||||
#
|
||||
# @revel Nodes A list of XML nodes matching XPath style Sting.
|
||||
#
|
||||
def XmlList(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
return []
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
if String[0] == "/":
|
||||
String = String[1:]
|
||||
TagList = String.split('/')
|
||||
Nodes = [Dom]
|
||||
Index = 0
|
||||
End = len(TagList) - 1
|
||||
while Index <= End:
|
||||
ChildNodes = []
|
||||
for Node in Nodes:
|
||||
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
|
||||
if Index < End:
|
||||
ChildNodes.extend(Node.childNodes)
|
||||
else:
|
||||
ChildNodes.append(Node)
|
||||
Nodes = ChildNodes
|
||||
ChildNodes = []
|
||||
Index += 1
|
||||
|
||||
return Nodes
|
||||
|
||||
|
||||
## Get a single XML node using XPath style syntax.
|
||||
#
|
||||
# Return a single XML DOM node from the root Dom specified by XPath String.
|
||||
# If the input Dom or String is not valid, then an empty string is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM node.
|
||||
# @param String A XPath style path.
|
||||
#
|
||||
# @revel Node A single XML node matching XPath style Sting.
|
||||
#
|
||||
def XmlNode(Dom, String):
|
||||
if String == None or String == "" or Dom == None or Dom == "":
|
||||
return ""
|
||||
if Dom.nodeType == Dom.DOCUMENT_NODE:
|
||||
Dom = Dom.documentElement
|
||||
if String[0] == "/":
|
||||
String = String[1:]
|
||||
TagList = String.split('/')
|
||||
Index = 0
|
||||
End = len(TagList) - 1
|
||||
ChildNodes = [Dom]
|
||||
while Index <= End:
|
||||
for Node in ChildNodes:
|
||||
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
|
||||
if Index < End:
|
||||
ChildNodes = Node.childNodes
|
||||
else:
|
||||
return Node
|
||||
break
|
||||
Index += 1
|
||||
return ""
|
||||
|
||||
|
||||
## Get a single XML element using XPath style syntax.
|
||||
#
|
||||
# Return a single XML element from the root Dom specified by XPath String.
|
||||
# If the input Dom or String is not valid, then an empty string is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM object.
|
||||
# @param Strin A XPath style path.
|
||||
#
|
||||
# @revel Element An XML element matching XPath style Sting.
|
||||
#
|
||||
def XmlElement(Dom, String):
|
||||
try:
|
||||
return XmlNode(Dom, String).firstChild.data.strip()
|
||||
except:
|
||||
return ""
|
||||
|
||||
|
||||
## Get a single XML element of the current node.
|
||||
#
|
||||
# Return a single XML element specified by the current root Dom.
|
||||
# If the input Dom is not valid, then an empty string is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM object.
|
||||
#
|
||||
# @revel Element An XML element in current root Dom.
|
||||
#
|
||||
def XmlElementData(Dom):
|
||||
try:
|
||||
return Dom.firstChild.data.strip()
|
||||
except:
|
||||
return ""
|
||||
|
||||
|
||||
## Get a list of XML elements using XPath style syntax.
|
||||
#
|
||||
# Return a list of XML elements from the root Dom specified by XPath String.
|
||||
# If the input Dom or String is not valid, then an empty list is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM object.
|
||||
# @param String A XPath style path.
|
||||
#
|
||||
# @revel Elements A list of XML elements matching XPath style Sting.
|
||||
#
|
||||
def XmlElementList(Dom, String):
|
||||
return map(XmlElementData, XmlList(Dom, String))
|
||||
|
||||
|
||||
## Get the XML attribute of the current node.
|
||||
#
|
||||
# Return a single XML attribute named Attribute from the current root Dom.
|
||||
# If the input Dom or Attribute is not valid, then an empty string is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM object.
|
||||
# @param Attribute The name of Attribute.
|
||||
#
|
||||
# @revel Element A single XML element matching XPath style Sting.
|
||||
#
|
||||
def XmlAttribute(Dom, Attribute):
|
||||
try:
|
||||
return Dom.getAttribute(Attribute).strip()
|
||||
except:
|
||||
return ''
|
||||
|
||||
|
||||
## Get the XML node name of the current node.
|
||||
#
|
||||
# Return a single XML node name from the current root Dom.
|
||||
# If the input Dom is not valid, then an empty string is returned.
|
||||
#
|
||||
# @param Dom The root XML DOM object.
|
||||
#
|
||||
# @revel Element A single XML element matching XPath style Sting.
|
||||
#
|
||||
def XmlNodeName(Dom):
|
||||
try:
|
||||
return Dom.nodeName.strip()
|
||||
except:
|
||||
return ''
|
||||
|
||||
## Parse an XML file.
|
||||
#
|
||||
# Parse the input XML file named FileName and return a XML DOM it stands for.
|
||||
# If the input File is not a valid XML file, then an empty string is returned.
|
||||
#
|
||||
# @param FileName The XML file name.
|
||||
#
|
||||
# @revel Dom The Dom object achieved from the XML file.
|
||||
#
|
||||
def XmlParseFile(FileName):
|
||||
try:
|
||||
XmlFile = open(FileName)
|
||||
Dom = xml.dom.minidom.parse(XmlFile)
|
||||
XmlFile.close()
|
||||
return Dom
|
||||
except Exception, X:
|
||||
print X
|
||||
return ""
|
||||
|
||||
# This acts like the main() function for the script, unless it is 'import'ed
|
||||
# into another script.
|
||||
if __name__ == '__main__':
|
||||
# Nothing to do here. Could do some unit tests.
|
||||
A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
|
||||
B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
|
||||
C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
|
||||
print C.toprettyxml(indent = " ")
|
||||
pass
|
0
BaseTools/Source/Python/Common/__init__.py
Normal file
0
BaseTools/Source/Python/Common/__init__.py
Normal file
Reference in New Issue
Block a user