Compare commits
86 Commits
master
...
system76-c
Author | SHA1 | Date | |
---|---|---|---|
f40dc199c2 | |||
3f54e8f57a | |||
55849a4a46 | |||
74f2015276 | |||
6dff36c9b2 | |||
814c0fa862 | |||
a05aa4aff0 | |||
30c84e8277 | |||
2ba9869baa | |||
826b9d30cf | |||
b0e7e3919f | |||
976842130a | |||
52d5d1b2c5 | |||
7050fc3a26 | |||
0eca9e3c1a | |||
19304f2144 | |||
b062ed0c86 | |||
d9cf95c6a2 | |||
75a5161506 | |||
d193b18023 | |||
061df3962f | |||
f6e7c15556 | |||
95fb70f34d | |||
900debdeec | |||
33006e4f4e | |||
b8fbf69c16 | |||
ba13872223 | |||
eb1e7a0284 | |||
eba6c7c6ce | |||
a5f556c37b | |||
e9023ae655 | |||
cdd43b2533 | |||
d1a6917813 | |||
81e096bda1 | |||
42f99de779 | |||
a03327088a | |||
4ba839c5c8 | |||
6bd9d60b41 | |||
964f80253a | |||
47794944e6 | |||
8f008298cc | |||
783f22ecc3 | |||
81a93fb3a9 | |||
30dc65997f | |||
93f8c4a6d5 | |||
51be9d0425 | |||
31a4e13c19 | |||
9d32ca34d2 | |||
0f9b597012 | |||
19e12d66fe | |||
9332332724 | |||
239e19e391 | |||
c8a3cdb675 | |||
40147bbf14 | |||
cf2bde63a1 | |||
f6f2ff4904 | |||
a26aad44d1 | |||
3596140a69 | |||
9b7ce3b9a1 | |||
147c02d66e | |||
768fe7365f | |||
0f6978e314 | |||
0a8abc3c4d | |||
54b26ae48f | |||
9d72e4b922 | |||
7f094cdf45 | |||
b0f8ea3c9c | |||
731869826b | |||
a49a99ef5e | |||
d06545cd3d | |||
575764deeb | |||
ed1cf61e3e | |||
091f5d689f | |||
d944c24078 | |||
bdd22050f6 | |||
0286bfae44 | |||
735fae0452 | |||
8de2a8523d | |||
e4fc0fbacb | |||
fceee0d621 | |||
d67d692ee2 | |||
927fc0e4b5 | |||
04515ff368 | |||
439e7d2556 | |||
0554ac9dd0 | |||
0a679652e8 |
@ -1,50 +0,0 @@
|
||||
# Azure DevOps Pipelines
|
||||
|
||||
These yml files are used to provide CI builds using the Azure DevOps Pipeline Service.
|
||||
Most of the CI leverages edk2-pytools to support cross platform building and execution.
|
||||
|
||||
## Core CI
|
||||
|
||||
Focused on building and testing all packages in Edk2 without an actual target platform.
|
||||
|
||||
See `.pytools/ReadMe.py` for more details
|
||||
|
||||
## Platform CI
|
||||
|
||||
Focused on building a single target platform and confirming functionality on that platform.
|
||||
|
||||
## Conventions
|
||||
|
||||
* Files extension should be *.yml. *.yaml is also supported but in Edk2 we use those for our package configuration.
|
||||
* Platform CI files should be in the `<PlatformPkg>/.azurepipelines` folder.
|
||||
* Core CI files are in the root folder.
|
||||
* Shared templates are in the `templates` folder.
|
||||
* Top level CI files should be named `<host os>-<tool_chain_tag>.yml`
|
||||
|
||||
## Links
|
||||
|
||||
* Basic Azure Landing Site - https://docs.microsoft.com/en-us/azure/devops/pipelines/?view=azure-devops
|
||||
* Pipeline jobs - https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml
|
||||
* Pipeline yml scheme - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema%2Cparameter-schema
|
||||
* Pipeline expression - https://docs.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops
|
||||
* PyTools - https://github.com/tianocore/edk2-pytool-extensions and https://github.com/tianocore/edk2-pytool-library
|
||||
|
||||
## Lessons Learned
|
||||
|
||||
### Templates and parameters
|
||||
|
||||
They are great but evil. If they are used as part of determining the steps of a build they must resolve before the build starts. They can not use variables set in a yml or determined as part of a matrix. If they are used in a step then they can be bound late.
|
||||
|
||||
### File matching patterns
|
||||
|
||||
On Linux this can hang if there are too many files in the search list.
|
||||
|
||||
### Templates and file splitting
|
||||
|
||||
Suggestion is to do one big yaml file that does what you want for one of your targets. Then do the second one and find the deltas. From that you can start to figure out the right split of files, steps, jobs.
|
||||
|
||||
### Conditional steps
|
||||
|
||||
If you want the step to show up in the log but not run, use a step conditional. This is great when a platform doesn't currently support a feature but you want the builders to know that the features exists and maybe someday it will.
|
||||
|
||||
If you want the step to not show up use a template step conditional wrapper. Beware this will be evaluated early (at build start). This can hide things not needed on a given OS for example.
|
@ -1,26 +0,0 @@
|
||||
## @file
|
||||
# Azure Pipeline build file for a build using ubuntu and GCC5
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
|
||||
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
trigger:
|
||||
- master
|
||||
- stable/*
|
||||
pr:
|
||||
- master
|
||||
- stable/*
|
||||
|
||||
variables:
|
||||
- template: templates/defaults.yml
|
||||
|
||||
jobs:
|
||||
- template: templates/pr-gate-build-job.yml
|
||||
parameters:
|
||||
tool_chain_tag: 'GCC5'
|
||||
vm_image: 'ubuntu-22.04'
|
||||
container: ${{ variables.default_linux_image }}
|
||||
arch_list: "IA32,X64,ARM,AARCH64,RISCV64,LOONGARCH64"
|
||||
usePythonVersion: '' # use Python from the container image
|
@ -1,36 +0,0 @@
|
||||
## @file
|
||||
# Azure Pipielines YML file that evalues the patch series in a PR using the
|
||||
# python script BaseTools/Scripts/PatchCheck.py.
|
||||
#
|
||||
# NOTE: This example monitors pull requests against the edk2-ci branch. Most
|
||||
# environments would replace 'edk2-ci' with 'master'.
|
||||
#
|
||||
# Copyright (c) 2019 - 2020, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
# https://github.com/tianocore
|
||||
#
|
||||
##
|
||||
|
||||
trigger: none
|
||||
|
||||
pr:
|
||||
- master
|
||||
- stable/*
|
||||
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.12'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: |
|
||||
git fetch origin $(System.PullRequest.TargetBranch):$(System.PullRequest.TargetBranch)
|
||||
python BaseTools/Scripts/PatchCheck.py $(System.PullRequest.TargetBranch)..$(System.PullRequest.SourceCommitId)
|
||||
displayName: 'Use PatchCheck.py to verify patch series in pull request'
|
@ -1,29 +0,0 @@
|
||||
## @file
|
||||
# Azure Pipeline build file for a build using Windows and VS2019
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
trigger:
|
||||
- master
|
||||
- stable/*
|
||||
|
||||
pr:
|
||||
- master
|
||||
- stable/*
|
||||
|
||||
variables:
|
||||
- template: templates/defaults.yml
|
||||
|
||||
jobs:
|
||||
- template: templates/pr-gate-build-job.yml
|
||||
parameters:
|
||||
tool_chain_tag: 'VS2019'
|
||||
vm_image: 'windows-2019'
|
||||
arch_list: "IA32,X64"
|
||||
usePythonVersion: ${{ variables.default_python_version }}
|
||||
extra_install_step:
|
||||
- powershell: choco install opencppcoverage; Write-Host "##vso[task.prependpath]C:\Program Files\OpenCppCoverage"
|
||||
displayName: Install Code Coverage Tool
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
@ -1,59 +0,0 @@
|
||||
# CI Templates
|
||||
|
||||
This folder contains azure pipeline yml templates for "Core" and "Platform" Continuous Integration and PR validation.
|
||||
|
||||
## Common CI templates
|
||||
|
||||
### basetools-build-steps.yml
|
||||
|
||||
This template compiles the Edk2 basetools from source. The steps in this template are
|
||||
conditional and will only run if variable `pkg_count` is greater than 0.
|
||||
|
||||
It also has two conditional steps only used when the toolchain contains GCC. These two steps
|
||||
use `apt` to update the system packages and add those necessary for Edk2 builds.
|
||||
|
||||
## Core CI templates
|
||||
|
||||
### pr-gate-build-job.yml
|
||||
|
||||
This templates contains the jobs and most importantly the matrix of which packages and
|
||||
targets to run for Core CI.
|
||||
|
||||
### pr-gate-steps.yml
|
||||
|
||||
This template is the main Core CI template. It controls all the steps run and is responsible for most functionality of the Core CI process. This template sets
|
||||
the `pkg_count` variable using the `stuart_pr_eval` tool when the
|
||||
build type is "pull request"
|
||||
|
||||
### spell-check-prereq-steps.yml
|
||||
|
||||
This template installs the node based tools used by the spell checker plugin. The steps
|
||||
in this template are conditional and will only run if variable `pkg_count` is greater than 0.
|
||||
|
||||
## Platform CI templates
|
||||
|
||||
### platform-build-run-steps.yml
|
||||
|
||||
This template makes heavy use of pytools to build and run a platform in the Edk2 repo
|
||||
|
||||
Also uses basetools-build-steps.yml to compile basetools
|
||||
|
||||
#### Special Notes
|
||||
|
||||
* For a build type of pull request it will conditionally build if the patches change files that impact the platform.
|
||||
* uses `stuart_pr_eval` to determine impact
|
||||
* For manual builds or CI builds it will always build the platform
|
||||
* It compiles basetools from source
|
||||
* Will use `stuart_build --FlashOnly` to attempt to run the built image if the `Run` parameter is set.
|
||||
* See the parameters block for expected configuration options
|
||||
* Parameter `extra_install_step` allows the caller to insert extra steps. This is useful if additional dependencies, tools, or other things need to be installed. Here is an example of installing qemu on Windows.
|
||||
|
||||
``` yaml
|
||||
steps:
|
||||
- template: ../../.azurepipelines/templates/build-run-steps.yml
|
||||
parameters:
|
||||
extra_install_step:
|
||||
- powershell: choco install qemu; Write-Host "##vso[task.prependpath]c:\Program Files\qemu"
|
||||
displayName: Install QEMU and Set QEMU on path # friendly name displayed in the UI
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
```
|
@ -1,28 +0,0 @@
|
||||
## @file
|
||||
# File templates/basetools-build-job.yml
|
||||
#
|
||||
# template file to build basetools
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
parameters:
|
||||
tool_chain_tag: ''
|
||||
|
||||
steps:
|
||||
- task: CmdLine@1
|
||||
displayName: Build Base Tools from source
|
||||
inputs:
|
||||
filename: python
|
||||
arguments: BaseTools/Edk2ToolsBuild.py -t ${{ parameters.tool_chain_tag }}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: "Copy base tools build log"
|
||||
inputs:
|
||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
SourceFolder: 'BaseTools/BaseToolsBuild'
|
||||
contents: |
|
||||
BASETOOLS_BUILD*.*
|
||||
flattenFolders: true
|
||||
condition: and(gt(variables.pkg_count, 0), succeededOrFailed())
|
@ -1,12 +0,0 @@
|
||||
## @file
|
||||
# File templates/default.yml
|
||||
#
|
||||
# template file containing common default values
|
||||
#
|
||||
# Copyright (c) Red Hat, Inc.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
variables:
|
||||
default_python_version: "3.12"
|
||||
default_linux_image: "ghcr.io/tianocore/containers/fedora-37-test:a0dd931"
|
@ -1,150 +0,0 @@
|
||||
|
||||
## @file
|
||||
# File steps.yml
|
||||
#
|
||||
# template file containing the steps to build
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
parameters:
|
||||
- name: tool_chain_tag
|
||||
type: string
|
||||
default: ''
|
||||
- name: build_pkg
|
||||
type: string
|
||||
default: ''
|
||||
- name: build_target
|
||||
type: string
|
||||
default: ''
|
||||
- name: build_arch
|
||||
type: string
|
||||
default: ''
|
||||
- name: build_file
|
||||
type: string
|
||||
default: ''
|
||||
- name: build_flags
|
||||
type: string
|
||||
default: ''
|
||||
- name: run_flags
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
- name: extra_install_step
|
||||
type: stepList
|
||||
default: []
|
||||
- name: usePythonVersion
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
steps:
|
||||
- bash: |
|
||||
echo "##vso[task.prependpath]${HOME}/.local/bin"
|
||||
echo "new PATH=${PATH}"
|
||||
displayName: Set PATH
|
||||
condition: eq('${{ parameters.tool_chain_tag }}', 'GCC5')
|
||||
|
||||
- checkout: self
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: ${{ parameters.usePythonVersion }}
|
||||
architecture: "x64"
|
||||
condition: ne('${{ parameters.usePythonVersion }}', '')
|
||||
|
||||
- script: pip install -r pip-requirements.txt --upgrade
|
||||
displayName: 'Install/Upgrade pip modules'
|
||||
|
||||
# Set default
|
||||
- bash: echo "##vso[task.setvariable variable=pkg_count]${{ 1 }}"
|
||||
|
||||
# Fetch the target branch so that pr_eval can diff them.
|
||||
# Seems like azure pipelines/github changed checkout process in nov 2020.
|
||||
- script: git fetch origin $(System.PullRequest.targetBranch)
|
||||
displayName: fetch target branch
|
||||
condition: eq(variables['Build.Reason'], 'PullRequest')
|
||||
|
||||
# trim the package list if this is a PR
|
||||
- task: CmdLine@1
|
||||
displayName: Check if ${{ parameters.build_pkg }} need testing
|
||||
inputs:
|
||||
filename: stuart_pr_eval
|
||||
arguments: -c ${{ parameters.build_file }} -t ${{ parameters.build_target}} -a ${{ parameters.build_arch}} --pr-target origin/$(System.PullRequest.targetBranch) --output-count-format-string "##vso[task.setvariable variable=pkg_count;isOutpout=true]{pkgcount}"
|
||||
condition: eq(variables['Build.Reason'], 'PullRequest')
|
||||
|
||||
# Setup repo
|
||||
- task: CmdLine@1
|
||||
displayName: Setup
|
||||
inputs:
|
||||
filename: stuart_setup
|
||||
arguments: -c ${{ parameters.build_file }} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}} -t ${{ parameters.build_target}} -a ${{ parameters.build_arch}} ${{ parameters.build_flags}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
# Stuart Update
|
||||
- task: CmdLine@1
|
||||
displayName: Update
|
||||
inputs:
|
||||
filename: stuart_update
|
||||
arguments: -c ${{ parameters.build_file }} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}} -t ${{ parameters.build_target}} -a ${{ parameters.build_arch}} ${{ parameters.build_flags}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
# build basetools
|
||||
# do this after setup and update so that code base dependencies
|
||||
# are all resolved.
|
||||
- template: basetools-build-steps.yml
|
||||
parameters:
|
||||
tool_chain_tag: ${{ parameters.tool_chain_tag }}
|
||||
|
||||
# Potential Extra steps
|
||||
- ${{ parameters.extra_install_step }}
|
||||
|
||||
# Build
|
||||
- task: CmdLine@1
|
||||
displayName: Build
|
||||
inputs:
|
||||
filename: stuart_build
|
||||
arguments: -c ${{ parameters.build_file }} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}} TARGET=${{ parameters.build_target}} -a ${{ parameters.build_arch}} ${{ parameters.build_flags}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
# Run
|
||||
- task: CmdLine@1
|
||||
displayName: Run to shell
|
||||
inputs:
|
||||
filename: stuart_build
|
||||
arguments: -c ${{ parameters.build_file }} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}} TARGET=${{ parameters.build_target}} -a ${{ parameters.build_arch}} ${{ parameters.build_flags}} ${{ parameters.run_flags }} --FlashOnly
|
||||
condition: and(and(gt(variables.pkg_count, 0), succeeded()), eq(variables['Run'], true))
|
||||
timeoutInMinutes: 2
|
||||
|
||||
# Copy the build logs to the artifact staging directory
|
||||
- task: CopyFiles@2
|
||||
displayName: "Copy build logs"
|
||||
inputs:
|
||||
targetFolder: "$(Build.ArtifactStagingDirectory)"
|
||||
SourceFolder: "Build"
|
||||
contents: |
|
||||
BUILDLOG_*.txt
|
||||
BUILDLOG_*.md
|
||||
CI_*.txt
|
||||
CI_*.md
|
||||
CISETUP.txt
|
||||
SETUPLOG.txt
|
||||
UPDATE_LOG.txt
|
||||
PREVALLOG.txt
|
||||
TestSuites.xml
|
||||
**/BUILD_TOOLS_REPORT.html
|
||||
**/OVERRIDELOG.TXT
|
||||
BASETOOLS_BUILD*.*
|
||||
flattenFolders: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
# Publish build artifacts to Azure Artifacts/TFS or a file share
|
||||
- task: PublishBuildArtifacts@1
|
||||
continueOnError: true
|
||||
displayName: "Publish build logs"
|
||||
inputs:
|
||||
pathtoPublish: "$(Build.ArtifactStagingDirectory)"
|
||||
artifactName: "Build Logs $(System.JobName)"
|
||||
condition: succeededOrFailed()
|
@ -1,132 +0,0 @@
|
||||
## @file
|
||||
# File templates/pr-gate-build-job.yml
|
||||
#
|
||||
# template file used to build supported packages.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Copyright (c) 2020 - 2021, ARM Limited. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
parameters:
|
||||
tool_chain_tag: ''
|
||||
vm_image: ''
|
||||
arch_list: ''
|
||||
extra_install_step: []
|
||||
usePythonVersion: ''
|
||||
container: ''
|
||||
|
||||
# Build step
|
||||
jobs:
|
||||
|
||||
- job: Build_${{ parameters.tool_chain_tag }}
|
||||
timeoutInMinutes: 120
|
||||
#Use matrix to speed up the build process
|
||||
strategy:
|
||||
matrix:
|
||||
${{ if eq(parameters.tool_chain_tag, 'GCC5') }}:
|
||||
TARGET_GCC_ONLY:
|
||||
Build.Pkgs: 'EmbeddedPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_ARM_ARMPLATFORM:
|
||||
Build.Pkgs: 'ArmPkg,ArmPlatformPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_MDE_CPU:
|
||||
Build.Pkgs: 'MdePkg,UefiCpuPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_MDEMODULE_DEBUG:
|
||||
Build.Pkgs: 'MdeModulePkg'
|
||||
Build.Targets: 'DEBUG,NOOPT'
|
||||
TARGET_MDEMODULE_RELEASE:
|
||||
Build.Pkgs: 'MdeModulePkg'
|
||||
Build.Targets: 'RELEASE,NO-TARGET'
|
||||
TARGET_NETWORK:
|
||||
Build.Pkgs: 'NetworkPkg,RedfishPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_OTHER:
|
||||
Build.Pkgs: 'PcAtChipsetPkg,PrmPkg,ShellPkg,SourceLevelDebugPkg,StandaloneMmPkg,SignedCapsulePkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_FMP_FAT_TEST:
|
||||
Build.Pkgs: 'FmpDevicePkg,FatPkg,UnitTestFrameworkPkg,DynamicTablesPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_CRYPTO_DEBUG:
|
||||
Build.Pkgs: 'CryptoPkg'
|
||||
Build.Targets: 'DEBUG,NOOPT'
|
||||
TARGET_CRYPTO_RELEASE:
|
||||
Build.Pkgs: 'CryptoPkg'
|
||||
Build.Targets: 'RELEASE,NO-TARGET'
|
||||
TARGET_FSP:
|
||||
Build.Pkgs: 'IntelFsp2Pkg,IntelFsp2WrapperPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_SECURITY:
|
||||
Build.Pkgs: 'SecurityPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_UEFIPAYLOAD:
|
||||
Build.Pkgs: 'UefiPayloadPkg'
|
||||
Build.Targets: 'DEBUG,RELEASE,NO-TARGET,NOOPT'
|
||||
TARGET_PLATFORMS:
|
||||
# For Platforms only check code. Leave it to Platform CI
|
||||
# to build them.
|
||||
Build.Pkgs: 'ArmVirtPkg,EmulatorPkg,OvmfPkg'
|
||||
Build.Targets: 'NO-TARGET,NOOPT'
|
||||
|
||||
workspace:
|
||||
clean: all
|
||||
|
||||
pool:
|
||||
vmImage: ${{ parameters.vm_image }}
|
||||
|
||||
${{ if not(eq(parameters.container, '')) }}:
|
||||
container: ${{ parameters.container }}
|
||||
|
||||
steps:
|
||||
- template: pr-gate-steps.yml
|
||||
parameters:
|
||||
tool_chain_tag: ${{ parameters.tool_chain_tag }}
|
||||
build_pkgs: $(Build.Pkgs)
|
||||
build_targets: $(Build.Targets)
|
||||
build_archs: ${{ parameters.arch_list }}
|
||||
usePythonVersion: ${{ parameters.usePythonVersion }}
|
||||
extra_install_step: ${{ parameters.extra_install_step }}
|
||||
|
||||
- job: Build_${{ parameters.tool_chain_tag }}_TARGET_CODE_COVERAGE
|
||||
dependsOn: Build_${{ parameters.tool_chain_tag }}
|
||||
workspace:
|
||||
clean: all
|
||||
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
submodules: true
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: 'Download Build Artifacts'
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
targetPath: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- powershell: Write-Host "##vso[task.setvariable variable=is_code_coverage]0"
|
||||
displayName: Give default value for whether CodeCoverage or not
|
||||
|
||||
- powershell: if (Test-Path -Path $(Build.ArtifactStagingDirectory)/**/coverage.xml) {Write-Host "##vso[task.setvariable variable=is_code_coverage]1"}
|
||||
displayName: Check coverage.xml exist or not
|
||||
|
||||
- task: CmdLine@2
|
||||
displayName: Create code coverage report
|
||||
inputs:
|
||||
script: |
|
||||
dotnet tool install -g dotnet-reportgenerator-globaltool
|
||||
reportgenerator -reports:$(Build.ArtifactStagingDirectory)/**/coverage.xml -targetdir:$(Build.ArtifactStagingDirectory)/Coverage -reporttypes:Cobertura -filefilters:-*Build*;-*UnitTest*;-*Mock*;-*usr*
|
||||
condition: eq(variables.is_code_coverage, 1)
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.ArtifactStagingDirectory)/Coverage/Cobertura.xml'
|
||||
condition: eq(variables.is_code_coverage, 1)
|
||||
|
@ -1,150 +0,0 @@
|
||||
## @file
|
||||
# File templates/pr-gate-steps.yml
|
||||
#
|
||||
# template file containing the steps to build
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
parameters:
|
||||
tool_chain_tag: ''
|
||||
build_pkgs: ''
|
||||
build_targets: ''
|
||||
build_archs: ''
|
||||
usePythonVersion: ''
|
||||
extra_install_step: []
|
||||
|
||||
steps:
|
||||
- bash: |
|
||||
echo "##vso[task.prependpath]${HOME}/.local/bin"
|
||||
echo "new PATH=${PATH}"
|
||||
displayName: Set PATH
|
||||
condition: eq('${{ parameters.tool_chain_tag }}', 'GCC5')
|
||||
|
||||
- checkout: self
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: ${{ parameters.usePythonVersion }}
|
||||
architecture: "x64"
|
||||
condition: ne('${{ parameters.usePythonVersion }}', '')
|
||||
|
||||
- script: pip install -r pip-requirements.txt --upgrade
|
||||
displayName: 'Install/Upgrade pip modules'
|
||||
|
||||
# Set default
|
||||
- bash: |
|
||||
echo "##vso[task.setvariable variable=pkgs_to_build]${{ parameters.build_pkgs }}"
|
||||
echo "##vso[task.setvariable variable=pkg_count]${{ 1 }}"
|
||||
|
||||
# Fetch the target branch so that pr_eval can diff them.
|
||||
# Seems like azure pipelines/github changed checkout process in nov 2020.
|
||||
- script: git fetch origin $(System.PullRequest.targetBranch)
|
||||
displayName: fetch target branch
|
||||
condition: eq(variables['Build.Reason'], 'PullRequest')
|
||||
|
||||
- ${{ parameters.extra_install_step }}
|
||||
|
||||
# trim the package list if this is a PR
|
||||
- task: CmdLine@1
|
||||
displayName: Check if ${{ parameters.build_pkgs }} need testing
|
||||
inputs:
|
||||
filename: stuart_pr_eval
|
||||
arguments: -c .pytool/CISettings.py -p ${{ parameters.build_pkgs }} --pr-target origin/$(System.PullRequest.targetBranch) --output-csv-format-string "##vso[task.setvariable variable=pkgs_to_build;isOutpout=true]{pkgcsv}" --output-count-format-string "##vso[task.setvariable variable=pkg_count;isOutpout=true]{pkgcount}"
|
||||
condition: eq(variables['Build.Reason'], 'PullRequest')
|
||||
|
||||
# install spell check prereqs
|
||||
- template: spell-check-prereq-steps.yml
|
||||
|
||||
# Build repo
|
||||
- task: CmdLine@1
|
||||
displayName: Setup ${{ parameters.build_pkgs }} ${{ parameters.build_archs}}
|
||||
inputs:
|
||||
filename: stuart_setup
|
||||
arguments: -c .pytool/CISettings.py -p $(pkgs_to_build) -t ${{ parameters.build_targets}} -a ${{ parameters.build_archs}} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
- task: CmdLine@1
|
||||
displayName: Update ${{ parameters.build_pkgs }} ${{ parameters.build_archs}}
|
||||
inputs:
|
||||
filename: stuart_update
|
||||
arguments: -c .pytool/CISettings.py -p $(pkgs_to_build) -t ${{ parameters.build_targets}} -a ${{ parameters.build_archs}} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
# build basetools
|
||||
# do this after setup and update so that code base dependencies
|
||||
# are all resolved.
|
||||
- template: basetools-build-steps.yml
|
||||
parameters:
|
||||
tool_chain_tag: ${{ parameters.tool_chain_tag }}
|
||||
|
||||
- task: CmdLine@1
|
||||
displayName: Build and Test ${{ parameters.build_pkgs }} ${{ parameters.build_archs}}
|
||||
inputs:
|
||||
filename: stuart_ci_build
|
||||
arguments: -c .pytool/CISettings.py -p $(pkgs_to_build) -t ${{ parameters.build_targets}} -a ${{ parameters.build_archs}} TOOL_CHAIN_TAG=${{ parameters.tool_chain_tag}}
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
# Publish Test Results to Azure Pipelines/TFS
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish junit test results'
|
||||
continueOnError: true
|
||||
condition: and( succeededOrFailed(),gt(variables.pkg_count, 0))
|
||||
inputs:
|
||||
testResultsFormat: 'JUnit' # Options: JUnit, NUnit, VSTest, xUnit
|
||||
testResultsFiles: 'Build/TestSuites.xml'
|
||||
#searchFolder: '$(System.DefaultWorkingDirectory)' # Optional
|
||||
mergeTestResults: true # Optional
|
||||
testRunTitle: $(System.JobName) # Optional
|
||||
#buildPlatform: # Optional
|
||||
#buildConfiguration: # Optional
|
||||
publishRunAttachments: true # Optional
|
||||
|
||||
# Publish Test Results to Azure Pipelines/TFS
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish host based test results for $(System.JobName)'
|
||||
continueOnError: true
|
||||
condition: and( succeededOrFailed(), gt(variables.pkg_count, 0))
|
||||
inputs:
|
||||
testResultsFormat: 'JUnit' # Options: JUnit, NUnit, VSTest, xUnit
|
||||
testResultsFiles: 'Build/**/*.result.xml'
|
||||
#searchFolder: '$(System.DefaultWorkingDirectory)' # Optional
|
||||
mergeTestResults: false # Optional
|
||||
testRunTitle: ${{ parameters.build_pkgs }} # Optional
|
||||
#buildPlatform: # Optional
|
||||
#buildConfiguration: # Optional
|
||||
publishRunAttachments: true # Optional
|
||||
|
||||
# Copy the build logs to the artifact staging directory
|
||||
- task: CopyFiles@2
|
||||
displayName: "Copy build logs"
|
||||
inputs:
|
||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
SourceFolder: 'Build'
|
||||
contents: |
|
||||
BUILDLOG_*.txt
|
||||
BUILDLOG_*.md
|
||||
CI_*.txt
|
||||
CI_*.md
|
||||
CISETUP.txt
|
||||
SETUPLOG.txt
|
||||
UPDATE_LOG.txt
|
||||
PREVALLOG.txt
|
||||
TestSuites.xml
|
||||
**/BUILD_TOOLS_REPORT.html
|
||||
**/OVERRIDELOG.TXT
|
||||
coverage.xml
|
||||
flattenFolders: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
# Publish build artifacts to Azure Artifacts/TFS or a file share
|
||||
- task: PublishBuildArtifacts@1
|
||||
continueOnError: true
|
||||
displayName: "Publish build logs"
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: 'Build Logs $(System.JobName)'
|
||||
condition: succeededOrFailed()
|
@ -1,22 +0,0 @@
|
||||
## @file
|
||||
# File templates/spell-check-prereq-steps.yml
|
||||
#
|
||||
# template file used to install spell checking prerequisits
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
parameters:
|
||||
none: ''
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '14.x'
|
||||
#checkLatest: false # Optional
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
||||
|
||||
- script: npm install -g cspell@5.20.0
|
||||
displayName: 'Install cspell npm'
|
||||
condition: and(gt(variables.pkg_count, 0), succeeded())
|
@ -1,16 +0,0 @@
|
||||
{
|
||||
"image": "ghcr.io/tianocore/containers/fedora-35-dev:latest",
|
||||
"postCreateCommand": "git config --global --add safe.directory * && pip install --upgrade -r pip-requirements.txt",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"DavidAnson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"ms-vscode.cpptools",
|
||||
"walonli.edk2-vscode",
|
||||
"zachflower.uncrustify"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
# EditorConfig file: https://EditorConfig.org
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = latin1
|
||||
end_of_line = crlf
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.py]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.sh]
|
||||
end_of_line = lf
|
||||
|
||||
[.gitattributes]
|
||||
end_of_line = lf
|
||||
|
||||
[.mailmap]
|
||||
charset = utf-8
|
||||
|
||||
[Maintainers.txt]
|
||||
charset = utf-8
|
||||
|
||||
[Makefile,GNUmakefile]
|
||||
indent_style = tab
|
@ -1,56 +0,0 @@
|
||||
# PrmPkg: Apply uncrustify changes
|
||||
a298a84478053872ed9da660a75f182ce81b8ddc
|
||||
# UnitTestFrameworkPkg: Apply uncrustify changes
|
||||
7c0ad2c33810ead45b7919f8f8d0e282dae52e71
|
||||
# UefiPayloadPkg: Apply uncrustify changes
|
||||
e5efcf8be8a1bf59aa98875787475e3144ee4cef
|
||||
# UefiCpuPkg: Apply uncrustify changes
|
||||
053e878bfb5c9d5eca779789b62891add30b14ba
|
||||
# StandaloneMmPkg: Apply uncrustify changes
|
||||
91415a36ae7aaeabb2bbab3762f39544f9aed683
|
||||
# SourceLevelDebugPkg: Apply uncrustify changes
|
||||
c1e126b1196de75e0a4cda21e4551ea9bb05e059
|
||||
# SignedCapsulePkg: Apply uncrustify changes
|
||||
b87864896714cf3062a7bc6d577d8fbd62d105e5
|
||||
# ShellPkg: Apply uncrustify changes
|
||||
47d20b54f9a65b08aa602a1866c1b59a69088dfc
|
||||
# SecurityPkg: Apply uncrustify changes
|
||||
c411b485b63a671a1e276700cff025c73997233c
|
||||
# RedfishPkg: Apply uncrustify changes
|
||||
39de741e2dcb8f11e9b4438e37224797643d8451
|
||||
# PcAtChipsetPkg: Apply uncrustify changes
|
||||
5220bd211df890f2672c23c050082862cd1e82d6
|
||||
# OvmfPkg: Apply uncrustify changes
|
||||
ac0a286f4d747a4c6c603a7b225917293cbe1e9f
|
||||
# NetworkPkg: Apply uncrustify changes
|
||||
d1050b9dff1cace252aff86630bfdb59dff5f507
|
||||
# MdePkg: Apply uncrustify changes
|
||||
2f88bd3a1296c522317f1c21377876de63de5be7
|
||||
# MdeModulePkg: Apply uncrustify changes
|
||||
1436aea4d5707e672672a11bda72be2c63c936c3
|
||||
# IntelFsp2WrapperPkg: Apply uncrustify changes
|
||||
7c7184e201a90a1d2376e615e55e3f4074731468
|
||||
# IntelFsp2Pkg: Apply uncrustify changes
|
||||
111f2228ddf487b0ac3491e416bb3dcdcfa4f979
|
||||
# FmpDevicePkg: Apply uncrustify changes
|
||||
45ce0a67bb4ee80f27da93777c623f51f344f23b
|
||||
# FatPkg: Apply uncrustify changes
|
||||
bcdcc4160d7460c46c08c9395aae81be44ef23a9
|
||||
# EmulatorPkg: Apply uncrustify changes
|
||||
a550d468a6ca577d9e9c57a0eafcf2fc9fbb8c97
|
||||
# EmbeddedPkg: Apply uncrustify changes
|
||||
e7108d0e9655b1795c94ac372b0449f28dd907df
|
||||
# DynamicTablesPkg: Apply uncrustify changes
|
||||
731c67e1d77b7741a91762d17659fc9fbcb9e305
|
||||
# CryptoPkg: Apply uncrustify changes
|
||||
7c342378317039e632d9a1a5d4cf7c21aec8cb7a
|
||||
# ArmVirtPkg: Apply uncrustify changes
|
||||
2b16a4fb91b9b31c0d152588f5ac51080c6c0763
|
||||
# ArmPlatformPkg: Apply uncrustify changes
|
||||
40b0b23ed34f48c26d711d3e4613a4bb35eeadff
|
||||
# ArmPkg: Apply uncrustify changes
|
||||
429309e0c6b74792d679681a8edd0d5ae0ff850c
|
||||
# EmulatorPkg: Format with Uncrustify 73.0.8
|
||||
972e3b0b9d67ef2847c9c1c89e606e6074a7ddda
|
||||
# OvmfPkg: Format with Uncrustify 73.0.8
|
||||
0e9ce9146a6dc50a35488e3a4a7a2a4bbaf1eb1c
|
24
.github/ISSUE_TEMPLATE/config.yml
vendored
24
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,24 +0,0 @@
|
||||
## @file
|
||||
# GitHub issue configuration file.
|
||||
#
|
||||
# This file is meant to direct contributors familiar with GitHub's issue tracker
|
||||
# to the external resources used by TianoCore.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Bugs and Feature Requests
|
||||
url: https://bugzilla.tianocore.org/
|
||||
about: Submit bug reports and feature requests here
|
||||
- name: Reporting Security Issues
|
||||
url: https://github.com/tianocore/tianocore.github.io/wiki/Reporting-Security-Issues
|
||||
about: Read the wiki page that describes the process here
|
||||
- name: EDK II Development Mailing List
|
||||
url: https://edk2.groups.io/g/devel
|
||||
about: Submit code patches and ask questions on the mailing list (devel@edk2.groups.io)
|
||||
- name: EDK II Discussions
|
||||
url: https://github.com/tianocore/edk2/discussions
|
||||
about: You can also reach out on the Discussion section of this repository
|
36
.github/dependabot.yml
vendored
36
.github/dependabot.yml
vendored
@ -1,36 +0,0 @@
|
||||
## @file
|
||||
# Dependabot configuration file to enable GitHub services for managing and updating
|
||||
# dependencies.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
##
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
commit-message:
|
||||
prefix: "pip"
|
||||
reviewers:
|
||||
- "makubacki"
|
||||
- "mdkinney"
|
||||
- "spbrogan"
|
||||
rebase-strategy: "disabled"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
commit-message:
|
||||
prefix: "GitHub Action"
|
||||
reviewers:
|
||||
- "makubacki"
|
||||
- "mdkinney"
|
||||
- "spbrogan"
|
||||
rebase-strategy: "disabled"
|
27
.github/pull_request_template.md
vendored
27
.github/pull_request_template.md
vendored
@ -1,27 +0,0 @@
|
||||
# Description
|
||||
|
||||
<_Include a description of the change and why this change was made._>
|
||||
|
||||
<_For each item, place an "x" in between `[` and `]` if true. Example: `[x]` (you can also check items in GitHub UI)_>
|
||||
|
||||
<_Create the PR as a Draft PR if it is only created to run CI checks._>
|
||||
|
||||
<_Delete lines in \<\> tags before creating the PR._>
|
||||
|
||||
- [ ] Breaking change?
|
||||
- **Breaking change** - Will this cause a break in build or boot behavior?
|
||||
- Examples: Add a new library class or move a module to a different repo.
|
||||
- [ ] Impacts security?
|
||||
- **Security** - Does the change have a direct security impact?
|
||||
- Examples: Crypto algorithm change or buffer overflow fix.
|
||||
- [ ] Includes tests?
|
||||
- **Tests** - Does the change include any explicit test code?
|
||||
- Examples: Unit tests or integration tests.
|
||||
|
||||
## How This Was Tested
|
||||
|
||||
<_Describe the test(s) that were run to verify the changes._>
|
||||
|
||||
## Integration Instructions
|
||||
|
||||
<_Describe how these changes should be integrated. Use N/A if nothing is required._>
|
361
.github/workflows/codeql.yml
vendored
361
.github/workflows/codeql.yml
vendored
@ -1,361 +0,0 @@
|
||||
# This workflow runs CodeQL against the repository.
|
||||
#
|
||||
# Results are uploaded to GitHub Code Scanning.
|
||||
#
|
||||
# Due to a known issue with the CodeQL extractor when building the edk2
|
||||
# codebase on Linux systems, only Windows agents are used for build with
|
||||
# the VS toolchain.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '!**.c'
|
||||
- '!**.h'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: windows-2019
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- Package: "ArmPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "CryptoPkg"
|
||||
ArchList: "IA32"
|
||||
- Package: "CryptoPkg"
|
||||
ArchList: "X64"
|
||||
- Package: "DynamicTablesPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "FatPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "FmpDevicePkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "IntelFsp2Pkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "IntelFsp2WrapperPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "MdeModulePkg"
|
||||
ArchList: "IA32"
|
||||
- Package: "MdeModulePkg"
|
||||
ArchList: "X64"
|
||||
- Package: "MdePkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "PcAtChipsetPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "PrmPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "SecurityPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "ShellPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "SourceLevelDebugPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "StandaloneMmPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "UefiCpuPkg"
|
||||
ArchList: "IA32,X64"
|
||||
- Package: "UnitTestFrameworkPkg"
|
||||
ArchList: "IA32,X64"
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'pip-requirements.txt'
|
||||
|
||||
- name: Use Git Long Paths on Windows
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
git config --system core.longpaths true
|
||||
|
||||
- name: Install/Upgrade pip Modules
|
||||
run: pip install -r pip-requirements.txt --upgrade requests sarif-tools
|
||||
|
||||
- name: Determine CI Settings File Supported Operations
|
||||
id: get_ci_file_operations
|
||||
shell: python
|
||||
run: |
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from edk2toolext.invocables.edk2_ci_setup import CiSetupSettingsManager
|
||||
from edk2toolext.invocables.edk2_setup import SetupSettingsManager
|
||||
|
||||
# Find the repo CI Settings file
|
||||
ci_settings_file = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('.pytool/CISettings.py'))
|
||||
|
||||
# Note: At this point, submodules have not been pulled, only one CI Settings file should exist
|
||||
if len(ci_settings_file) != 1 or not ci_settings_file[0].is_file():
|
||||
print("::error title=Workspace Error!::Failed to find CI Settings file!")
|
||||
sys.exit(1)
|
||||
|
||||
ci_settings_file = ci_settings_file[0]
|
||||
|
||||
# Try Finding the Settings class in the file
|
||||
module_name = 'ci_settings'
|
||||
|
||||
spec = importlib.util.spec_from_file_location(module_name, ci_settings_file)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
try:
|
||||
settings = getattr(module, 'Settings')
|
||||
except AttributeError:
|
||||
print("::error title=Workspace Error!::Failed to find Settings class in CI Settings file!")
|
||||
sys.exit(1)
|
||||
|
||||
# Determine Which Operations Are Supported by the Settings Class
|
||||
ci_setup_supported = issubclass(settings, CiSetupSettingsManager)
|
||||
setup_supported = issubclass(settings, SetupSettingsManager)
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
print(f'ci_setup_supported={str(ci_setup_supported).lower()}', file=fh)
|
||||
print(f'setup_supported={str(setup_supported).lower()}', file=fh)
|
||||
|
||||
- name: Convert Arch to Log Format
|
||||
id: convert_arch_hyphen
|
||||
env:
|
||||
ARCH_LIST: ${{ matrix.ArchList }}
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
print(f'arch_list={os.environ["ARCH_LIST"].replace(",", "-")}', file=fh)
|
||||
|
||||
- name: Setup
|
||||
if: steps.get_ci_file_operations.outputs.setup_supported == 'true'
|
||||
run: stuart_setup -c .pytool/CISettings.py -t DEBUG -a ${{ matrix.ArchList }} TOOL_CHAIN_TAG=VS2019
|
||||
|
||||
- name: Upload Setup Log As An Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: (success() || failure()) && steps.get_ci_file_operations.outputs.setup_supported == 'true'
|
||||
with:
|
||||
name: ${{ matrix.Package }}-${{ steps.convert_arch_hyphen.outputs.arch_list }}-Setup-Log
|
||||
path: |
|
||||
**/SETUPLOG.txt
|
||||
retention-days: 7
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: CI Setup
|
||||
if: steps.get_ci_file_operations.outputs.ci_setup_supported == 'true'
|
||||
run: stuart_ci_setup -c .pytool/CISettings.py -t DEBUG -a ${{ matrix.ArchList }} TOOL_CHAIN_TAG=VS2019
|
||||
|
||||
- name: Upload CI Setup Log As An Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: (success() || failure()) && steps.get_ci_file_operations.outputs.ci_setup_supported == 'true'
|
||||
with:
|
||||
name: ${{ matrix.Package }}-${{ steps.convert_arch_hyphen.outputs.arch_list }}-CI-Setup-Log
|
||||
path: |
|
||||
**/CISETUP.txt
|
||||
retention-days: 7
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Update
|
||||
run: stuart_update -c .pytool/CISettings.py -t DEBUG -a ${{ matrix.ArchList }} TOOL_CHAIN_TAG=VS2019
|
||||
|
||||
- name: Upload Update Log As An Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: success() || failure()
|
||||
with:
|
||||
name: ${{ matrix.Package }}-${{ steps.convert_arch_hyphen.outputs.arch_list }}-Update-Log
|
||||
path: |
|
||||
**/UPDATE_LOG.txt
|
||||
retention-days: 7
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Build Tools From Source
|
||||
run: python BaseTools/Edk2ToolsBuild.py -t VS2019
|
||||
|
||||
- name: Find CodeQL Plugin Directory
|
||||
id: find_dir
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Find the plugin directory that contains the CodeQL plugin
|
||||
plugin_dir = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('BaseTools/Plugin/CodeQL'))
|
||||
|
||||
# This should only be found once
|
||||
if len(plugin_dir) == 1:
|
||||
plugin_dir = str(plugin_dir[0])
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
print(f'codeql_plugin_dir={plugin_dir}', file=fh)
|
||||
else:
|
||||
print("::error title=Workspace Error!::Failed to find CodeQL plugin directory!")
|
||||
sys.exit(1)
|
||||
|
||||
- name: Get CodeQL CLI Cache Data
|
||||
id: cache_key_gen
|
||||
env:
|
||||
CODEQL_PLUGIN_DIR: ${{ steps.find_dir.outputs.codeql_plugin_dir }}
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import yaml
|
||||
|
||||
codeql_cli_ext_dep_name = 'codeqlcli_windows_ext_dep'
|
||||
codeql_plugin_file = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep_name + '.yaml')
|
||||
|
||||
with open (codeql_plugin_file) as pf:
|
||||
codeql_cli_ext_dep = yaml.safe_load(pf)
|
||||
|
||||
cache_key_name = codeql_cli_ext_dep['name']
|
||||
cache_key_version = codeql_cli_ext_dep['version']
|
||||
cache_key = f'{cache_key_name}-{cache_key_version}'
|
||||
|
||||
codeql_plugin_cli_ext_dep_dir = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep['name'].strip() + '_extdep')
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
print(f'codeql_cli_cache_key={cache_key}', file=fh)
|
||||
print(f'codeql_cli_ext_dep_dir={codeql_plugin_cli_ext_dep_dir}', file=fh)
|
||||
|
||||
- name: Attempt to Load CodeQL CLI From Cache
|
||||
id: codeqlcli_cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }}
|
||||
key: ${{ steps.cache_key_gen.outputs.codeql_cli_cache_key }}
|
||||
|
||||
- name: Download CodeQL CLI
|
||||
if: steps.codeqlcli_cache.outputs.cache-hit != 'true'
|
||||
run: stuart_update -c .pytool/CISettings.py -t DEBUG -a ${{ matrix.ArchList }} TOOL_CHAIN_TAG=VS2019 --codeql
|
||||
|
||||
- name: Remove CI Plugins Irrelevant to CodeQL
|
||||
shell: python
|
||||
env:
|
||||
CODEQL_PLUGIN_DIR: ${{ steps.find_dir.outputs.codeql_plugin_dir }}
|
||||
run: |
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
# Only these two plugins are needed for CodeQL
|
||||
plugins_to_keep = ['CompilerPlugin']
|
||||
|
||||
plugin_dir = Path('.pytool/Plugin').absolute()
|
||||
if plugin_dir.is_dir():
|
||||
for dir in plugin_dir.iterdir():
|
||||
if str(dir.stem) not in plugins_to_keep:
|
||||
shutil.rmtree(str(dir.absolute()), ignore_errors=True)
|
||||
|
||||
- name: CI Build
|
||||
env:
|
||||
STUART_CODEQL_PATH: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }}
|
||||
run: stuart_ci_build -c .pytool/CISettings.py -t DEBUG -p ${{ matrix.Package }} -a ${{ matrix.ArchList }} TOOL_CHAIN_TAG=VS2019 --codeql
|
||||
|
||||
- name: Build Cleanup
|
||||
id: build_cleanup
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
dirs_to_delete = ['ia32', 'x64', 'arm', 'aarch64']
|
||||
|
||||
def delete_dirs(path: Path):
|
||||
if path.exists() and path.is_dir():
|
||||
if path.name.lower() in dirs_to_delete:
|
||||
print(f'Removed {str(path)}')
|
||||
shutil.rmtree(path)
|
||||
return
|
||||
|
||||
for child_dir in path.iterdir():
|
||||
delete_dirs(child_dir)
|
||||
|
||||
build_path = Path(os.environ['GITHUB_WORKSPACE'], 'Build')
|
||||
delete_dirs(build_path)
|
||||
|
||||
- name: Upload Build Logs As An Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: success() || failure()
|
||||
with:
|
||||
name: ${{ matrix.Package }}-${{ steps.convert_arch_hyphen.outputs.arch_list }}-Build-Logs
|
||||
path: |
|
||||
**/BUILD_REPORT.TXT
|
||||
**/OVERRIDELOG.TXT
|
||||
**/BUILDLOG_*.md
|
||||
**/BUILDLOG_*.txt
|
||||
**/CI_*.md
|
||||
**/CI_*.txt
|
||||
retention-days: 7
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Prepare Env Data for CodeQL Upload
|
||||
id: env_data
|
||||
env:
|
||||
PACKAGE_NAME: ${{ matrix.Package }}
|
||||
shell: python
|
||||
run: |
|
||||
import logging
|
||||
import os
|
||||
from edk2toollib.utility_functions import RunCmd
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
package = os.environ['PACKAGE_NAME'].strip().lower()
|
||||
directory_name = 'codeql-analysis-' + package + '-debug'
|
||||
file_name = 'codeql-db-' + package + '-debug-0.sarif'
|
||||
sarif_path = Path('Build', directory_name, file_name)
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
|
||||
if sarif_path.is_file():
|
||||
emacs_file_path = sarif_path.with_name(sarif_path.stem + "-emacs.txt")
|
||||
out_stream_buffer = StringIO()
|
||||
exit_code = RunCmd("sarif", f"emacs {sarif_path} --output {emacs_file_path} --no-autotrim",
|
||||
outstream=out_stream_buffer,
|
||||
logging_level=logging.NOTSET)
|
||||
print(f'upload_sarif_file=true', file=fh)
|
||||
print(f'emacs_file_path={emacs_file_path}', file=fh)
|
||||
print(f'sarif_file_path={sarif_path}', file=fh)
|
||||
else:
|
||||
print(f'upload_sarif_file=false', file=fh)
|
||||
|
||||
- name: Upload CodeQL Results (SARIF) As An Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: steps.env_data.outputs.upload_sarif_file == 'true'
|
||||
with:
|
||||
name: ${{ matrix.Package }}-${{ steps.convert_arch_hyphen.outputs.arch_list }}-CodeQL-SARIF
|
||||
path: |
|
||||
${{ steps.env_data.outputs.emacs_file_path }}
|
||||
${{ steps.env_data.outputs.sarif_file_path }}
|
||||
retention-days: 14
|
||||
if-no-files-found: warn
|
||||
|
||||
- name: Upload CodeQL Results (SARIF) To GitHub Code Scanning
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
if: steps.env_data.outputs.upload_sarif_file == 'true'
|
||||
with:
|
||||
# Path to SARIF file relative to the root of the repository.
|
||||
sarif_file: ${{ steps.env_data.outputs.sarif_file_path }}
|
||||
# Optional category for the results. Used to differentiate multiple results for one commit.
|
||||
# Each package is a separate category.
|
||||
category: ${{ matrix.Package }}
|
36
.github/workflows/pr-labeler.yml
vendored
36
.github/workflows/pr-labeler.yml
vendored
@ -1,36 +0,0 @@
|
||||
# This workflow automatically applies labels to pull requests based on regular expression matches against the content
|
||||
# in the pull request.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
# For more information, see:
|
||||
# https://github.com/github/issue-labeler
|
||||
|
||||
name: Apply Labels Based on Message Content
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- edited
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Label PR from Description
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Apply Labels Based on PR Description
|
||||
uses: github/issue-labeler@v3.1
|
||||
with:
|
||||
configuration-path: .github/workflows/pr-labeler/regex.yml
|
||||
enable-versioned-regex: 0
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
16
.github/workflows/pr-labeler/regex.yml
vendored
16
.github/workflows/pr-labeler/regex.yml
vendored
@ -1,16 +0,0 @@
|
||||
# Specifies labels to apply to pull requests based on regular expressions.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
# For more information, see:
|
||||
# https://github.com/github/issue-labeler
|
||||
|
||||
impact:breaking-change:
|
||||
- '\s*-\s*\[\s*[x|X]\s*\] Breaking change\?'
|
||||
|
||||
impact:security:
|
||||
- '\s*-\s*\[\s*[x|X]\s*\] Impacts security\?'
|
||||
|
||||
impact:testing:
|
||||
- '\s*-\s*\[\s*[x|X]\s*\] Includes tests\?'
|
44
.github/workflows/stale.yml
vendored
44
.github/workflows/stale.yml
vendored
@ -1,44 +0,0 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity
|
||||
# for a specified amount of time.
|
||||
#
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
|
||||
name: Stale Check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# At 23:35 on every day-of-week from Sunday through Saturday
|
||||
# https://crontab.guru/#35_23_*_*_0-6
|
||||
- cron: '35 23 * * 0-6'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: Stale
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check for Stale Items
|
||||
uses: actions/stale@v8
|
||||
with:
|
||||
days-before-issue-close: -1
|
||||
days-before-issue-stale: -1
|
||||
days-before-pr-stale: 60
|
||||
days-before-pr-close: 7
|
||||
stale-pr-message: >
|
||||
This PR has been automatically marked as stale because it has not had
|
||||
activity in 60 days. It will be closed if no further activity occurs within
|
||||
7 days. Thank you for your contributions.
|
||||
close-pr-message: >
|
||||
This pull request has been automatically been closed because it did not have any
|
||||
activity in 60 days and no follow up within 7 days after being marked stale.
|
||||
Thank you for your contributions.
|
||||
stale-pr-label: stale
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -1,7 +1,3 @@
|
||||
Build/
|
||||
.DS_Store
|
||||
*_extdep/
|
||||
*.pyc
|
||||
__pycache__/
|
||||
tags/
|
||||
.vscode/
|
||||
.DS_Store
|
||||
|
37
.gitmodules
vendored
37
.gitmodules
vendored
@ -1,40 +1,3 @@
|
||||
[submodule "CryptoPkg/Library/OpensslLib/openssl"]
|
||||
path = CryptoPkg/Library/OpensslLib/openssl
|
||||
url = https://github.com/openssl/openssl
|
||||
[submodule "SoftFloat"]
|
||||
path = ArmPkg/Library/ArmSoftFloatLib/berkeley-softfloat-3
|
||||
url = https://github.com/ucb-bar/berkeley-softfloat-3.git
|
||||
[submodule "UnitTestFrameworkPkg/Library/CmockaLib/cmocka"]
|
||||
path = UnitTestFrameworkPkg/Library/CmockaLib/cmocka
|
||||
url = https://github.com/tianocore/edk2-cmocka.git
|
||||
[submodule "MdeModulePkg/Universal/RegularExpressionDxe/oniguruma"]
|
||||
path = MdeModulePkg/Universal/RegularExpressionDxe/oniguruma
|
||||
url = https://github.com/kkos/oniguruma
|
||||
[submodule "MdeModulePkg/Library/BrotliCustomDecompressLib/brotli"]
|
||||
path = MdeModulePkg/Library/BrotliCustomDecompressLib/brotli
|
||||
url = https://github.com/google/brotli
|
||||
[submodule "BaseTools/Source/C/BrotliCompress/brotli"]
|
||||
path = BaseTools/Source/C/BrotliCompress/brotli
|
||||
url = https://github.com/google/brotli
|
||||
ignore = untracked
|
||||
[submodule "RedfishPkg/Library/JsonLib/jansson"]
|
||||
path = RedfishPkg/Library/JsonLib/jansson
|
||||
url = https://github.com/akheron/jansson
|
||||
[submodule "UnitTestFrameworkPkg/Library/GoogleTestLib/googletest"]
|
||||
path = UnitTestFrameworkPkg/Library/GoogleTestLib/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
[submodule "UnitTestFrameworkPkg/Library/SubhookLib/subhook"]
|
||||
path = UnitTestFrameworkPkg/Library/SubhookLib/subhook
|
||||
url = https://github.com/Zeex/subhook.git
|
||||
[submodule "MdePkg/Library/BaseFdtLib/libfdt"]
|
||||
path = MdePkg/Library/BaseFdtLib/libfdt
|
||||
url = https://github.com/devicetree-org/pylibfdt.git
|
||||
[submodule "MdePkg/Library/MipiSysTLib/mipisyst"]
|
||||
path = MdePkg/Library/MipiSysTLib/mipisyst
|
||||
url = https://github.com/MIPI-Alliance/public-mipi-sys-t.git
|
||||
[submodule "CryptoPkg/Library/MbedTlsLib/mbedtls"]
|
||||
path = CryptoPkg/Library/MbedTlsLib/mbedtls
|
||||
url = https://github.com/ARMmbed/mbedtls
|
||||
[submodule "SecurityPkg/DeviceSecurity/SpdmLib/libspdm"]
|
||||
path = SecurityPkg/DeviceSecurity/SpdmLib/libspdm
|
||||
url = https://github.com/DMTF/libspdm.git
|
||||
|
81
.mailmap
81
.mailmap
@ -1,81 +0,0 @@
|
||||
#
|
||||
# This list is used by git-shortlog to update a few name translations
|
||||
# in the git archive to adjust for job changes or incorrect/inconsistent
|
||||
# name usage.
|
||||
#
|
||||
# Please keep this file sorted alphabetically, and email in lowercase.
|
||||
# The format used is:
|
||||
#
|
||||
# Firstname Lastname <email@domain.tld>
|
||||
#
|
||||
|
||||
Aaron Li <aaron.li@intel.com> <songpeng.li@intel.com>
|
||||
Antoine Cœur <coeur@gmx.fr>
|
||||
Antoine Cœur <coeur@gmx.fr> <Coeur@gmx.fr>
|
||||
Ard Biesheuvel <ard.biesheuvel@linaro.org> <abiesheuvel@Edk2>
|
||||
Ashley DeSimone <ashley.e.desimone@intel.com> <ashdesimone@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Baraneedharan Anbazhagan <anbazhagan@hp.com>
|
||||
Chasel Chiu <chasel.chiu@intel.com>
|
||||
Christopher J Zurcher <christopher.j.zurcher@intel.com>
|
||||
Eric Dong <eric.dong@intel.com>
|
||||
Eric Dong <eric.dong@intel.com> Eric Dong <eirc.dong@intel.com>
|
||||
Eric Dong <eric.dong@intel.com> <ydong10@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Eric Dong <eric.dong@intel.com> <ydong10@Edk2>
|
||||
Erik Bjorge <erik.c.bjorge@intel.com> <geekboy15a@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Eugene Cohen <eugene@nuviainc.com>
|
||||
Eugene Cohen <eugene@nuviainc.com> <eugene@hp.com>
|
||||
Hao A Wu <hao.a.wu@intel.com>
|
||||
Hao A Wu <hao.a.wu@intel.com> <hwu1225@Edk2>
|
||||
Hot Tian <hot.tian@intel.com>
|
||||
Hot Tian <hot.tian@intel.com> <hhtian@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Jiewen Yao <jiewen.yao@intel.com>
|
||||
Jiewen Yao <jiewen.yao@intel.com> <Jiewen.yao@intel.com>
|
||||
Jiewen Yao <jiewen.yao@intel.com> <Jiewen.Yao@intel.com>
|
||||
Jiewen Yao <jiewen.yao@intel.com> <jyao1>
|
||||
Jiewen Yao <jiewen.yao@intel.com> <jyao1@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Jiewen Yao <jiewen.yao@intel.com> <jyao1@Edk2>
|
||||
Jim Dailey <Jim.Dailey@Dell.com>
|
||||
Jim Dailey <Jim.Dailey@Dell.com> <Jim_Dailey@Dell.com>
|
||||
Laszlo Ersek <lersek@redhat.com> <lersek@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Laszlo Ersek <lersek@redhat.com> <lersek@Edk2>
|
||||
Liming Gao <gaoliming@byosoft.com.cn>
|
||||
Liming Gao <liming.gao@intel.com> <Gao, Liming liming.gao@intel.com>
|
||||
Liming Gao <liming.gao@intel.com> <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Liming Gao <liming.gao@intel.com> <lgao4@Edk2>
|
||||
Liming Gao <liming.gao@intel.com> <liming.gao@intel.com>
|
||||
Maciej Rabeda <maciej.rabeda@intel.com>
|
||||
Marc-André Lureau <marcandre.lureau@redhat.com> <marcandre.lureau@redhat.com>
|
||||
Marvin Häuser <Marvin.Haeuser@outlook.com>
|
||||
Marvin Häuser <Marvin.Haeuser@outlook.com> edk2-devel <edk2-devel-bounces@lists.01.org>
|
||||
Marvin Häuser <mhaeuser@outlook.de>
|
||||
Matt DeVillier <matt.devillier@gmail.com>
|
||||
Maurice Ma <maurice.ma@intel.com>
|
||||
Michael Kubacki <michael.a.kubacki@intel.com>
|
||||
Michael Kubacki <michael.a.kubacki@intel.com> </o=Intel/ou=External (FYDIBOHF25SPDLT)/cn=Recipients/cn=3c8b0226e75f4ab08d20c151cb7a8a72>
|
||||
Ming Tan <ming.tan@intel.com>
|
||||
Nikolai Saoukh <nms@otdel-1.org>
|
||||
Philippe Mathieu-Daudé <philmd@redhat.com>
|
||||
Ray Ni <ray.ni@intel.com>
|
||||
Ray Ni <ray.ni@intel.com> <C:/Program Files (x86)/Git/O=Intel/OU=Pacifica02/cn=Recipients/cn=rni2>
|
||||
Ray Ni <ray.ni@intel.com> <niruiyu@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Ray Ni <ray.ni@intel.com> <niruiyu@Edk2>
|
||||
Ray Ni <ray.ni@intel.com> <ruiyu.ni@intel.com>
|
||||
Ray Ni <ray.ni@intel.com> <Ruiyu.ni@Intel.com>
|
||||
Ray Ni <ray.ni@intel.com> <ruyu.ni@intel.com>
|
||||
Rebecca Cran <rebecca@bluestop.org>
|
||||
Rebecca Cran <rebecca@bsdio.com>
|
||||
Samer El-Haj-Mahmoud <samer@elhajmahmoud.com> <elhaj@hpe.com>
|
||||
Samer El-Haj-Mahmoud <samer@elhajmahmoud.com> <Samer El-Haj-Mahmoud elhaj@hp.com>
|
||||
Shenglei Zhang <shenglei.zhang@intel.com>
|
||||
Star Zeng <star.zeng@intel.com>
|
||||
Star Zeng <star.zeng@intel.com> <lzeng14@6f19259b-4bc3-4df7-8a09-765794883524>
|
||||
Star Zeng <star.zeng@intel.com> <lzeng14@Edk2>
|
||||
Tom Lendacky <thomas.lendacky@amd.com>
|
||||
Vitaly Cheptsov <vit9696@protonmail.com> Vitaly Cheptsov via Groups.Io <vit9696=protonmail.com@groups.io>
|
||||
Vladimir Olovyannikov <vladimir.olovyannikov@broadcom.com> Vladimir Olovyannikov via edk2-devel <edk2-devel@lists.01.org>
|
||||
Wei6 Xu <wei6.xu@intel.com>
|
||||
Yonghong Zhu <yonghong.zhu@intel.com>
|
||||
Yonghong Zhu <yonghong.zhu@intel.com> <yzhu52@Edk2>
|
||||
Yu-Chen Lin <yuchenlin@synology.com>
|
||||
Zhichao Gao <zhichao.gao@intel.com>
|
||||
Zhiguang Liu <zhiguang.liu@intel.com>
|
@ -1,49 +0,0 @@
|
||||
## @file
|
||||
# Mergify YML file that automatically merges a GitHub pull request against
|
||||
# edk2-ci if all of the GitHub branch protections have passed. It also
|
||||
# contains rules to:
|
||||
# * auto close branches that are not from an EDK II Maintainer
|
||||
# * post a comment on pull requests that have merge conflicts.
|
||||
# * post a comment on pull requests that have PatchCheck.py errors.
|
||||
#
|
||||
# Configuration Notes:
|
||||
# * Update the 'base=edk2-ci' statements with the name of the branch to merge
|
||||
# pull requests.
|
||||
#
|
||||
# * Update the 'status-failure' statement with the name of the name of the Azure
|
||||
# Pipelines Build that performs the EDK II Maintainer check.
|
||||
#
|
||||
# * This file must be checked into the 'default' branch of a repo. Copies
|
||||
# of this file on other branches of a repo are ignored by Mergify.
|
||||
#
|
||||
# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
# https://github.com/apps/mergify
|
||||
# https://doc.mergify.io/
|
||||
#
|
||||
##
|
||||
|
||||
queue_rules:
|
||||
- name: default
|
||||
conditions:
|
||||
- base~=(^main|^master|^stable/)
|
||||
- label=push
|
||||
|
||||
pull_request_rules:
|
||||
- name: Automatically merge a PR when all required checks pass and 'push' label is present
|
||||
conditions:
|
||||
- base~=(^main|^master|^stable/)
|
||||
- label=push
|
||||
actions:
|
||||
queue:
|
||||
method: rebase
|
||||
name: default
|
||||
|
||||
- name: Post a comment on a PR that can not be merged due to a merge conflict
|
||||
conditions:
|
||||
- base~=(^main|^master|^stable/)
|
||||
- conflict
|
||||
actions:
|
||||
comment:
|
||||
message: PR can not be merged due to conflict. Please rebase and resubmit
|
@ -1,276 +0,0 @@
|
||||
# @file
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
|
||||
# Copyright (c) 2020 - 2021, ARM Limited. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
from edk2toolext.environment import shell_environment
|
||||
from edk2toolext.invocables.edk2_ci_build import CiBuildSettingsManager
|
||||
from edk2toolext.invocables.edk2_setup import SetupSettingsManager, RequiredSubmodule
|
||||
from edk2toolext.invocables.edk2_update import UpdateSettingsManager
|
||||
from edk2toolext.invocables.edk2_pr_eval import PrEvalSettingsManager
|
||||
from edk2toollib.utility_functions import GetHostInfo
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
try:
|
||||
# Temporarily needed until edk2 can update to the latest edk2-pytools
|
||||
# that has the CodeQL helpers.
|
||||
#
|
||||
# May not be present until submodules are populated.
|
||||
#
|
||||
root = Path(__file__).parent.parent.resolve()
|
||||
sys.path.append(str(root/'BaseTools'/'Plugin'/'CodeQL'/'integration'))
|
||||
import stuart_codeql as codeql_helpers
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class Settings(CiBuildSettingsManager, UpdateSettingsManager, SetupSettingsManager, PrEvalSettingsManager):
|
||||
|
||||
def __init__(self):
|
||||
self.ActualPackages = []
|
||||
self.ActualTargets = []
|
||||
self.ActualArchitectures = []
|
||||
self.ActualToolChainTag = ""
|
||||
self.UseBuiltInBaseTools = None
|
||||
self.ActualScopes = None
|
||||
|
||||
# ####################################################################################### #
|
||||
# Extra CmdLine configuration #
|
||||
# ####################################################################################### #
|
||||
|
||||
def AddCommandLineOptions(self, parserObj):
|
||||
group = parserObj.add_mutually_exclusive_group()
|
||||
group.add_argument("-force_piptools", "--fpt", dest="force_piptools", action="store_true", default=False, help="Force the system to use pip tools")
|
||||
group.add_argument("-no_piptools", "--npt", dest="no_piptools", action="store_true", default=False, help="Force the system to not use pip tools")
|
||||
|
||||
try:
|
||||
codeql_helpers.add_command_line_option(parserObj)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
def RetrieveCommandLineOptions(self, args):
|
||||
super().RetrieveCommandLineOptions(args)
|
||||
if args.force_piptools:
|
||||
self.UseBuiltInBaseTools = True
|
||||
if args.no_piptools:
|
||||
self.UseBuiltInBaseTools = False
|
||||
|
||||
try:
|
||||
self.codeql = codeql_helpers.is_codeql_enabled_on_command_line(args)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
# ####################################################################################### #
|
||||
# Default Support for this Ci Build #
|
||||
# ####################################################################################### #
|
||||
|
||||
def GetPackagesSupported(self):
|
||||
''' return iterable of edk2 packages supported by this build.
|
||||
These should be edk2 workspace relative paths '''
|
||||
|
||||
return ("ArmPkg",
|
||||
"ArmPlatformPkg",
|
||||
"ArmVirtPkg",
|
||||
"DynamicTablesPkg",
|
||||
"EmbeddedPkg",
|
||||
"EmulatorPkg",
|
||||
"IntelFsp2Pkg",
|
||||
"IntelFsp2WrapperPkg",
|
||||
"MdePkg",
|
||||
"MdeModulePkg",
|
||||
"NetworkPkg",
|
||||
"PcAtChipsetPkg",
|
||||
"SecurityPkg",
|
||||
"UefiCpuPkg",
|
||||
"FmpDevicePkg",
|
||||
"ShellPkg",
|
||||
"SignedCapsulePkg",
|
||||
"StandaloneMmPkg",
|
||||
"FatPkg",
|
||||
"CryptoPkg",
|
||||
"PrmPkg",
|
||||
"UnitTestFrameworkPkg",
|
||||
"OvmfPkg",
|
||||
"RedfishPkg",
|
||||
"SourceLevelDebugPkg",
|
||||
"UefiPayloadPkg"
|
||||
)
|
||||
|
||||
def GetArchitecturesSupported(self):
|
||||
''' return iterable of edk2 architectures supported by this build '''
|
||||
return (
|
||||
"IA32",
|
||||
"X64",
|
||||
"ARM",
|
||||
"AARCH64",
|
||||
"RISCV64",
|
||||
"LOONGARCH64")
|
||||
|
||||
def GetTargetsSupported(self):
|
||||
''' return iterable of edk2 target tags supported by this build '''
|
||||
return ("DEBUG", "RELEASE", "NO-TARGET", "NOOPT")
|
||||
|
||||
# ####################################################################################### #
|
||||
# Verify and Save requested Ci Build Config #
|
||||
# ####################################################################################### #
|
||||
|
||||
def SetPackages(self, list_of_requested_packages):
|
||||
''' Confirm the requested package list is valid and configure SettingsManager
|
||||
to build the requested packages.
|
||||
|
||||
Raise UnsupportedException if a requested_package is not supported
|
||||
'''
|
||||
unsupported = set(list_of_requested_packages) - \
|
||||
set(self.GetPackagesSupported())
|
||||
if(len(unsupported) > 0):
|
||||
logging.critical(
|
||||
"Unsupported Package Requested: " + " ".join(unsupported))
|
||||
raise Exception("Unsupported Package Requested: " +
|
||||
" ".join(unsupported))
|
||||
self.ActualPackages = list_of_requested_packages
|
||||
|
||||
def SetArchitectures(self, list_of_requested_architectures):
|
||||
''' Confirm the requests architecture list is valid and configure SettingsManager
|
||||
to run only the requested architectures.
|
||||
|
||||
Raise Exception if a list_of_requested_architectures is not supported
|
||||
'''
|
||||
unsupported = set(list_of_requested_architectures) - \
|
||||
set(self.GetArchitecturesSupported())
|
||||
if(len(unsupported) > 0):
|
||||
logging.critical(
|
||||
"Unsupported Architecture Requested: " + " ".join(unsupported))
|
||||
raise Exception(
|
||||
"Unsupported Architecture Requested: " + " ".join(unsupported))
|
||||
self.ActualArchitectures = list_of_requested_architectures
|
||||
|
||||
def SetTargets(self, list_of_requested_target):
|
||||
''' Confirm the request target list is valid and configure SettingsManager
|
||||
to run only the requested targets.
|
||||
|
||||
Raise UnsupportedException if a requested_target is not supported
|
||||
'''
|
||||
unsupported = set(list_of_requested_target) - \
|
||||
set(self.GetTargetsSupported())
|
||||
if(len(unsupported) > 0):
|
||||
logging.critical(
|
||||
"Unsupported Targets Requested: " + " ".join(unsupported))
|
||||
raise Exception("Unsupported Targets Requested: " +
|
||||
" ".join(unsupported))
|
||||
self.ActualTargets = list_of_requested_target
|
||||
|
||||
# ####################################################################################### #
|
||||
# Actual Configuration for Ci Build #
|
||||
# ####################################################################################### #
|
||||
|
||||
def GetActiveScopes(self):
|
||||
''' return tuple containing scopes that should be active for this process '''
|
||||
if self.ActualScopes is None:
|
||||
scopes = ("cibuild", "edk2-build", "host-based-test")
|
||||
|
||||
self.ActualToolChainTag = shell_environment.GetBuildVars().GetValue("TOOL_CHAIN_TAG", "")
|
||||
|
||||
is_linux = GetHostInfo().os.upper() == "LINUX"
|
||||
|
||||
if self.UseBuiltInBaseTools is None:
|
||||
is_linux = GetHostInfo().os.upper() == "LINUX"
|
||||
# try and import the pip module for basetools
|
||||
try:
|
||||
import edk2basetools
|
||||
self.UseBuiltInBaseTools = True
|
||||
except ImportError:
|
||||
self.UseBuiltInBaseTools = False
|
||||
pass
|
||||
|
||||
if self.UseBuiltInBaseTools == True:
|
||||
scopes += ('pipbuild-unix',) if is_linux else ('pipbuild-win',)
|
||||
logging.warning("Using Pip Tools based BaseTools")
|
||||
else:
|
||||
logging.warning("Falling back to using in-tree BaseTools")
|
||||
|
||||
try:
|
||||
scopes += codeql_helpers.get_scopes(self.codeql)
|
||||
|
||||
if self.codeql:
|
||||
shell_environment.GetBuildVars().SetValue(
|
||||
"STUART_CODEQL_AUDIT_ONLY",
|
||||
"TRUE",
|
||||
"Set in CISettings.py")
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
self.ActualScopes = scopes
|
||||
return self.ActualScopes
|
||||
|
||||
def GetRequiredSubmodules(self):
|
||||
''' return iterable containing RequiredSubmodule objects.
|
||||
If no RequiredSubmodules return an empty iterable
|
||||
'''
|
||||
rs = []
|
||||
rs.append(RequiredSubmodule(
|
||||
"ArmPkg/Library/ArmSoftFloatLib/berkeley-softfloat-3", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"CryptoPkg/Library/OpensslLib/openssl", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"UnitTestFrameworkPkg/Library/CmockaLib/cmocka", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"UnitTestFrameworkPkg/Library/GoogleTestLib/googletest", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"MdeModulePkg/Universal/RegularExpressionDxe/oniguruma", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"MdeModulePkg/Library/BrotliCustomDecompressLib/brotli", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"BaseTools/Source/C/BrotliCompress/brotli", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"RedfishPkg/Library/JsonLib/jansson", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"UnitTestFrameworkPkg/Library/SubhookLib/subhook", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"MdePkg/Library/BaseFdtLib/libfdt", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"MdePkg/Library/MipiSysTLib/mipisyst", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"CryptoPkg/Library/MbedTlsLib/mbedtls", False))
|
||||
rs.append(RequiredSubmodule(
|
||||
"SecurityPkg/DeviceSecurity/SpdmLib/libspdm", False))
|
||||
return rs
|
||||
|
||||
def GetName(self):
|
||||
return "Edk2"
|
||||
|
||||
def GetDependencies(self):
|
||||
return [
|
||||
]
|
||||
|
||||
def GetPackagesPath(self):
|
||||
return ()
|
||||
|
||||
def GetWorkspaceRoot(self):
|
||||
''' get WorkspacePath '''
|
||||
return os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
def FilterPackagesToTest(self, changedFilesList: list, potentialPackagesList: list) -> list:
|
||||
''' Filter potential packages to test based on changed files. '''
|
||||
build_these_packages = []
|
||||
possible_packages = potentialPackagesList.copy()
|
||||
for f in changedFilesList:
|
||||
# split each part of path for comparison later
|
||||
nodes = f.split("/")
|
||||
|
||||
# python file change in .pytool folder causes building all
|
||||
if f.endswith(".py") and ".pytool" in nodes:
|
||||
build_these_packages = possible_packages
|
||||
break
|
||||
|
||||
# BaseTools files that might change the build
|
||||
if "BaseTools" in nodes:
|
||||
if os.path.splitext(f) not in [".txt", ".md"]:
|
||||
build_these_packages = possible_packages
|
||||
break
|
||||
return build_these_packages
|
@ -1,116 +0,0 @@
|
||||
# @file CharEncodingCheck.py
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
|
||||
import os
|
||||
import logging
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
##
|
||||
# map
|
||||
##
|
||||
EcodingMap = {
|
||||
".md": 'utf-8',
|
||||
".dsc": 'utf-8',
|
||||
".dec": 'utf-8',
|
||||
".c": 'utf-8',
|
||||
".h": 'utf-8',
|
||||
".asm": 'utf-8',
|
||||
".masm": 'utf-8',
|
||||
".nasm": 'utf-8',
|
||||
".s": 'utf-8',
|
||||
".inf": 'utf-8',
|
||||
".asl": 'utf-8',
|
||||
".uni": 'utf-8',
|
||||
".py": 'utf-8'
|
||||
}
|
||||
|
||||
|
||||
class CharEncodingCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that scans each file in the code tree and confirms the encoding is correct.
|
||||
|
||||
Configuration options:
|
||||
"CharEncodingCheck": {
|
||||
"IgnoreFiles": []
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
return ("Check for valid file encoding for " + packagename, packagename + ".CharEncodingCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the ci_build_plugin Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
overall_status = 0
|
||||
files_tested = 0
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
|
||||
if abs_pkg_path is None:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No Package folder {0}".format(abs_pkg_path))
|
||||
return 0
|
||||
|
||||
for (ext, enc) in EcodingMap.items():
|
||||
files = self.WalkDirectoryForExtension([ext], abs_pkg_path)
|
||||
files = [Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(x) for x in files] # make edk2relative path so can process ignores
|
||||
|
||||
if "IgnoreFiles" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreFiles"]:
|
||||
a = a.replace(os.sep, "/")
|
||||
try:
|
||||
tc.LogStdOut("Ignoring File {0}".format(a))
|
||||
files.remove(a)
|
||||
except:
|
||||
tc.LogStdError("CharEncodingCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
logging.info("CharEncodingCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
|
||||
files = [Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(x) for x in files]
|
||||
for a in files:
|
||||
files_tested += 1
|
||||
if not self.TestEncodingOk(a, enc):
|
||||
tc.LogStdError("Encoding Failure in {0}. Not {1}".format(a, enc))
|
||||
overall_status += 1
|
||||
|
||||
tc.LogStdOut("Tested Encoding on {0} files".format(files_tested))
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("CharEncoding {0} Failed. Errors {1}".format(packagename, overall_status), "CHAR_ENCODING_CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
||||
|
||||
def TestEncodingOk(self, apath, encodingValue):
|
||||
try:
|
||||
with open(apath, "rb") as fobj:
|
||||
fobj.read().decode(encodingValue)
|
||||
except Exception as exp:
|
||||
logging.error("Encoding failure: file: {0} type: {1}".format(apath, encodingValue))
|
||||
logging.debug("EXCEPTION: while processing {1} - {0}".format(exp, apath))
|
||||
return False
|
||||
|
||||
return True
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check char encoding
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Char Encoding Check Test",
|
||||
"module": "CharEncodingCheck"
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
# Character Encoding Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all the files in a package to make sure each file is
|
||||
correctly encoded and all characters can be read. Improper encoding causes
|
||||
tools to fail in some situations especially in different locals.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin can be configured to ignore certain files.
|
||||
|
||||
``` yaml
|
||||
"CharEncodingCheck": {
|
||||
"IgnoreFiles": []
|
||||
}
|
||||
```
|
||||
### IgnoreFiles
|
||||
|
||||
OPTIONAL List of file to ignore.
|
@ -1,102 +0,0 @@
|
||||
# @file CompilerPlugin.py
|
||||
##
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.uefi_build import UefiBuilder
|
||||
from edk2toolext import edk2_logging
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class CompilerPlugin(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that compiles the package dsc
|
||||
from the package being tested.
|
||||
|
||||
Configuration options:
|
||||
"CompilerPlugin": {
|
||||
"DscPath": "<path to dsc from root of pkg>"
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
target = environment.GetValue("TARGET")
|
||||
return ("Compile " + packagename + " " + target, packagename + ".Compiler." + target)
|
||||
|
||||
def RunsOnTargetList(self):
|
||||
return ["DEBUG", "RELEASE"]
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the ICiBuildPlugin Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
self._env = environment
|
||||
|
||||
# Parse the config for required DscPath element
|
||||
if "DscPath" not in pkgconfig:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("DscPath not found in config file. Nothing to compile.")
|
||||
return -1
|
||||
|
||||
AP = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
|
||||
APDSC = os.path.join(AP, pkgconfig["DscPath"].strip())
|
||||
AP_Path = Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(APDSC)
|
||||
if AP is None or AP_Path is None or not os.path.isfile(APDSC):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("Package Dsc not found.")
|
||||
return -1
|
||||
|
||||
logging.info("Building {0}".format(AP_Path))
|
||||
self._env.SetValue("ACTIVE_PLATFORM", AP_Path, "Set in Compiler Plugin")
|
||||
|
||||
# Parse DSC to check for SUPPORTED_ARCHITECTURES
|
||||
dp = DscParser()
|
||||
dp.SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
dp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
dp.ParseFile(AP_Path)
|
||||
if "SUPPORTED_ARCHITECTURES" in dp.LocalVars:
|
||||
SUPPORTED_ARCHITECTURES = dp.LocalVars["SUPPORTED_ARCHITECTURES"].split('|')
|
||||
TARGET_ARCHITECTURES = environment.GetValue("TARGET_ARCH").split(' ')
|
||||
|
||||
# Skip if there is no intersection between SUPPORTED_ARCHITECTURES and TARGET_ARCHITECTURES
|
||||
if len(set(SUPPORTED_ARCHITECTURES) & set(TARGET_ARCHITECTURES)) == 0:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No supported architecutres to build")
|
||||
return -1
|
||||
|
||||
uefiBuilder = UefiBuilder()
|
||||
# do all the steps
|
||||
# WorkSpace, PackagesPath, PInHelper, PInManager
|
||||
ret = uefiBuilder.Go(Edk2pathObj.WorkspacePath, os.pathsep.join(Edk2pathObj.PackagePathList), PLMHelper, PLM)
|
||||
if ret != 0: # failure:
|
||||
tc.SetFailed("Compile failed for {0}".format(packagename), "Compile_FAILED")
|
||||
tc.LogStdError("{0} Compile failed with error code {1} ".format(AP_Path, ret))
|
||||
return 1
|
||||
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return 0
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to compile each package
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Compiler Plugin",
|
||||
"module": "CompilerPlugin"
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
# Compiler Plugin
|
||||
|
||||
This CiBuildPlugin compiles the package DSC from the package being tested.
|
||||
|
||||
## Configuration
|
||||
|
||||
The package relative path of the DSC file to build.
|
||||
|
||||
``` yaml
|
||||
"CompilerPlugin": {
|
||||
"DscPath": "<path to dsc from root of pkg>"
|
||||
}
|
||||
```
|
||||
|
||||
### DscPath
|
||||
|
||||
Package relative path to the DSC file to build.
|
@ -1,120 +0,0 @@
|
||||
# @file dependency_check.py
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
import logging
|
||||
import os
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class DependencyCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that finds all modules (inf files) in a package and reviews the packages used
|
||||
to confirm they are acceptable. This is to help enforce layering and identify improper
|
||||
dependencies between packages.
|
||||
|
||||
Configuration options:
|
||||
"DependencyCheck": {
|
||||
"AcceptableDependencies": [], # Package dec files that are allowed in all INFs. Example: MdePkg/MdePkg.dec
|
||||
"AcceptableDependencies-<MODULE_TYPE>": [], # OPTIONAL Package dependencies for INFs that are HOST_APPLICATION
|
||||
"AcceptableDependencies-HOST_APPLICATION": [], # EXAMPLE Package dependencies for INFs that are HOST_APPLICATION
|
||||
"IgnoreInf": [] # Ignore INF if found in filesystem
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Test Package Dependencies for modules in " + packagename, packagename + ".DependencyCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the MuBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
overall_status = 0
|
||||
|
||||
# Get current platform
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
|
||||
# Get INF Files
|
||||
INFFiles = self.WalkDirectoryForExtension([".inf"], abs_pkg_path)
|
||||
INFFiles = [Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(x) for x in INFFiles] # make edk2relative path so can compare with Ignore List
|
||||
|
||||
# Remove ignored INFs
|
||||
if "IgnoreInf" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreInf"]:
|
||||
a = a.replace(os.sep, "/") ## convert path sep in case ignore list is bad. Can't change case
|
||||
try:
|
||||
INFFiles.remove(a)
|
||||
tc.LogStdOut("IgnoreInf {0}".format(a))
|
||||
except:
|
||||
logging.info("DependencyConfig.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
tc.LogStdError("DependencyConfig.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
|
||||
|
||||
# Get the AccpetableDependencies list
|
||||
if "AcceptableDependencies" not in pkgconfig:
|
||||
logging.info("DependencyCheck Skipped. No Acceptable Dependencies defined.")
|
||||
tc.LogStdOut("DependencyCheck Skipped. No Acceptable Dependencies defined.")
|
||||
tc.SetSkipped()
|
||||
return -1
|
||||
|
||||
# Log dependencies
|
||||
for k in pkgconfig.keys():
|
||||
if k.startswith("AcceptableDependencies"):
|
||||
pkgstring = "\n".join(pkgconfig[k])
|
||||
if ("-" in k):
|
||||
_, _, mod_type = k.partition("-")
|
||||
tc.LogStdOut(f"Additional dependencies for MODULE_TYPE {mod_type}:\n {pkgstring}")
|
||||
else:
|
||||
tc.LogStdOut(f"Acceptable Dependencies:\n {pkgstring}")
|
||||
|
||||
# For each INF file
|
||||
for file in INFFiles:
|
||||
ip = InfParser()
|
||||
logging.debug("Parsing " + file)
|
||||
ip.SetBaseAbsPath(Edk2pathObj.WorkspacePath).SetPackagePaths(Edk2pathObj.PackagePathList).ParseFile(file)
|
||||
|
||||
if("MODULE_TYPE" not in ip.Dict):
|
||||
tc.LogStdOut("Ignoring INF. Missing key for MODULE_TYPE {0}".format(file))
|
||||
continue
|
||||
|
||||
mod_type = ip.Dict["MODULE_TYPE"].upper()
|
||||
for p in ip.PackagesUsed:
|
||||
if p not in pkgconfig["AcceptableDependencies"]:
|
||||
# If not in the main acceptable dependencies list then check module specific
|
||||
mod_specific_key = "AcceptableDependencies-" + mod_type
|
||||
if mod_specific_key in pkgconfig and p in pkgconfig[mod_specific_key]:
|
||||
continue
|
||||
|
||||
logging.error("Dependency Check: Invalid Dependency INF: {0} depends on pkg {1}".format(file, p))
|
||||
tc.LogStdError("Dependency Check: Invalid Dependency INF: {0} depends on pkg {1}".format(file, p))
|
||||
overall_status += 1
|
||||
|
||||
# If XML object exists, add results
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("Failed with {0} errors".format(overall_status), "DEPENDENCYCHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
@ -1,13 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check all infs within a package
|
||||
# to confirm the packagesdependency are on the configured list of acceptable
|
||||
# dependencies.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Dependency Check Test",
|
||||
"module": "DependencyCheck"
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
# Depdendency Check Plugin
|
||||
|
||||
A CiBuildPlugin that finds all modules (inf files) in a package and reviews the
|
||||
packages used to confirm they are acceptable. This is to help enforce layering
|
||||
and identify improper dependencies between packages.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin must be configured with the acceptabe package dependencies for the
|
||||
package.
|
||||
|
||||
``` yaml
|
||||
"DependencyCheck": {
|
||||
"AcceptableDependencies": [],
|
||||
"AcceptableDependencies-<MODULE_TYPE>": [],
|
||||
"IgnoreInf": []
|
||||
}
|
||||
```
|
||||
|
||||
### AcceptableDependencies
|
||||
|
||||
Package dec files that are allowed in all INFs. Example: MdePkg/MdePkg.dec
|
||||
|
||||
### AcceptableDependencies-<MODULE_TYPE>
|
||||
|
||||
OPTIONAL Package dependencies for INFs that have module type <MODULE_TYPE>.
|
||||
Example: AcceptableDependencies-HOST_APPLICATION.
|
||||
|
||||
### IgnoreInf
|
||||
|
||||
OPTIONAL list of INFs to ignore for this dependency check.
|
@ -1,133 +0,0 @@
|
||||
# @file DscCompleteCheck.py
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import logging
|
||||
import os
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser
|
||||
from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class DscCompleteCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that scans the package dsc file and confirms all modules (inf files) are
|
||||
listed in the components sections.
|
||||
|
||||
Configuration options:
|
||||
"DscCompleteCheck": {
|
||||
"DscPath": "<path to dsc from root of pkg>"
|
||||
"IgnoreInf": [] # Ignore INF if found in filesystem by not dsc
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Check the " + packagename + " DSC for a being complete", packagename + ".DscCompleteCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the MuBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - VarDict containing the shell environment Build Vars
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
overall_status = 0
|
||||
|
||||
# Parse the config for required DscPath element
|
||||
if "DscPath" not in pkgconfig:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError(
|
||||
"DscPath not found in config file. Nothing to check.")
|
||||
return -1
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
|
||||
packagename)
|
||||
abs_dsc_path = os.path.join(abs_pkg_path, pkgconfig["DscPath"].strip())
|
||||
wsr_dsc_path = Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(
|
||||
abs_dsc_path)
|
||||
|
||||
if abs_dsc_path is None or wsr_dsc_path == "" or not os.path.isfile(abs_dsc_path):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("Package Dsc not found")
|
||||
return 0
|
||||
|
||||
# Get INF Files
|
||||
INFFiles = self.WalkDirectoryForExtension([".inf"], abs_pkg_path)
|
||||
INFFiles = [Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(
|
||||
x) for x in INFFiles] # make edk2relative path so can compare with DSC
|
||||
|
||||
# remove ignores
|
||||
|
||||
if "IgnoreInf" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreInf"]:
|
||||
a = a.replace(os.sep, "/")
|
||||
try:
|
||||
tc.LogStdOut("Ignoring INF {0}".format(a))
|
||||
INFFiles.remove(a)
|
||||
except:
|
||||
tc.LogStdError(
|
||||
"DscCompleteCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
logging.info(
|
||||
"DscCompleteCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
|
||||
# DSC Parser
|
||||
dp = DscParser()
|
||||
dp.SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
dp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
dp.SetInputVars(environment.GetAllBuildKeyValues())
|
||||
dp.ParseFile(wsr_dsc_path)
|
||||
|
||||
# Check if INF in component section
|
||||
for INF in INFFiles:
|
||||
if not any(INF.strip() in x for x in dp.ThreeMods) and \
|
||||
not any(INF.strip() in x for x in dp.SixMods) and \
|
||||
not any(INF.strip() in x for x in dp.OtherMods):
|
||||
|
||||
infp = InfParser().SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
infp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
infp.ParseFile(INF)
|
||||
if("MODULE_TYPE" not in infp.Dict):
|
||||
tc.LogStdOut(
|
||||
"Ignoring INF. Missing key for MODULE_TYPE {0}".format(INF))
|
||||
continue
|
||||
|
||||
if(infp.Dict["MODULE_TYPE"] == "HOST_APPLICATION"):
|
||||
tc.LogStdOut(
|
||||
"Ignoring INF. Module type is HOST_APPLICATION {0}".format(INF))
|
||||
continue
|
||||
|
||||
if len(infp.SupportedPhases) == 1 and \
|
||||
"HOST_APPLICATION" in infp.SupportedPhases:
|
||||
tc.LogStdOut(
|
||||
"Ignoring Library INF due to only supporting type HOST_APPLICATION {0}".format(INF))
|
||||
continue
|
||||
|
||||
logging.critical(INF + " not in " + wsr_dsc_path)
|
||||
tc.LogStdError("{0} not in {1}".format(INF, wsr_dsc_path))
|
||||
overall_status = overall_status + 1
|
||||
|
||||
# If XML object exists, add result
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("DscCompleteCheck {0} Failed. Errors {1}".format(
|
||||
wsr_dsc_path, overall_status), "CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
@ -1,12 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to confirm all INFs are listed in
|
||||
# the components section of package dsc
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Dsc Complete Check Test",
|
||||
"module": "DscCompleteCheck"
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
# Dsc Complete Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all INF files from a package and confirms they are
|
||||
listed in the package level DSC file. The test considers it an error if any INF
|
||||
does not appear in the `Components` section of the package-level DSC (indicating
|
||||
that it would not be built if the package were built). This is critical because
|
||||
much of the CI infrastructure assumes that all modules will be listed in the DSC
|
||||
and compiled.
|
||||
|
||||
This test will ignore INFs in the following cases:
|
||||
|
||||
1. When MODULE_TYPE = HOST_APPLICATION
|
||||
2. When a Library instance **only** supports the HOST_APPLICATION environment
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin has a few configuration options to support the UEFI codebase.
|
||||
|
||||
``` yaml
|
||||
"DscCompleteCheck": {
|
||||
"DscPath": "", # Path to dsc from root of package
|
||||
"IgnoreInf": [] # Ignore INF if found in filesystem but not dsc
|
||||
}
|
||||
```
|
||||
|
||||
### DscPath
|
||||
|
||||
Path to DSC to consider platform dsc
|
||||
|
||||
### IgnoreInf
|
||||
|
||||
Ignore error if Inf file is not listed in DSC file
|
@ -1,414 +0,0 @@
|
||||
# @file EccCheck.py
|
||||
#
|
||||
# Copyright (c) 2021, Arm Limited. All rights reserved.<BR>
|
||||
# Copyright (c) 2020, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import csv
|
||||
import xml.dom.minidom
|
||||
from typing import List, Dict, Tuple
|
||||
import logging
|
||||
from io import StringIO
|
||||
from edk2toolext.environment import shell_environment
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
from edk2toollib.utility_functions import RunCmd
|
||||
|
||||
|
||||
class EccCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that finds the Ecc issues of newly added code in pull request.
|
||||
|
||||
Configuration options:
|
||||
"EccCheck": {
|
||||
"ExceptionList": [],
|
||||
"IgnoreFiles": []
|
||||
},
|
||||
"""
|
||||
|
||||
FindModifyFile = re.compile(r'\+\+\+ b\/(.*)')
|
||||
LineScopePattern = (r'@@ -\d*\,*\d* \+\d*\,*\d* @@.*')
|
||||
LineNumRange = re.compile(r'@@ -\d*\,*\d* \+(\d*)\,*(\d*) @@.*')
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
return ("Check for efi coding style for " + packagename, packagename + ".EccCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the ci_build_plugin Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
workspace_path = Edk2pathObj.WorkspacePath
|
||||
basetools_path = environment.GetValue("EDK_TOOLS_PATH")
|
||||
python_path = os.path.join(basetools_path, "Source", "Python")
|
||||
env = shell_environment.GetEnvironment()
|
||||
env.set_shell_var('PYTHONPATH', python_path)
|
||||
env.set_shell_var('WORKSPACE', workspace_path)
|
||||
env.set_shell_var('PACKAGES_PATH', os.pathsep.join(Edk2pathObj.PackagePathList))
|
||||
self.ECC_PASS = True
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
|
||||
if abs_pkg_path is None:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No Package folder {0}".format(abs_pkg_path))
|
||||
return 0
|
||||
|
||||
# Create temp directory
|
||||
temp_path = os.path.join(workspace_path, 'Build', '.pytool', 'Plugin', 'EccCheck')
|
||||
try:
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
# Copy package being scanned to temp_path
|
||||
shutil.copytree (
|
||||
abs_pkg_path,
|
||||
os.path.join(temp_path, packagename),
|
||||
symlinks=True
|
||||
)
|
||||
# Copy exception.xml to temp_path
|
||||
shutil.copyfile (
|
||||
os.path.join(basetools_path, "Source", "Python", "Ecc", "exception.xml"),
|
||||
os.path.join(temp_path, "exception.xml")
|
||||
)
|
||||
# Output file to use for git diff operations
|
||||
temp_diff_output = os.path.join (temp_path, 'diff.txt')
|
||||
|
||||
self.ApplyConfig(pkgconfig, temp_path, packagename)
|
||||
modify_dir_list = self.GetModifyDir(packagename, temp_diff_output)
|
||||
patch = self.GetDiff(packagename, temp_diff_output)
|
||||
ecc_diff_range = self.GetDiffRange(patch, packagename, temp_path)
|
||||
#
|
||||
# Use temp_path as working directory when running ECC tool
|
||||
#
|
||||
self.GenerateEccReport(modify_dir_list, ecc_diff_range, temp_path, basetools_path)
|
||||
ecc_log = os.path.join(temp_path, "Ecc.log")
|
||||
if self.ECC_PASS:
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
tc.SetSuccess()
|
||||
return 0
|
||||
else:
|
||||
with open(ecc_log, encoding='utf8') as output:
|
||||
ecc_output = output.readlines()
|
||||
for line in ecc_output:
|
||||
logging.error(line.strip())
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
tc.SetFailed("EccCheck failed for {0}".format(packagename), "CHECK FAILED")
|
||||
return 1
|
||||
except KeyboardInterrupt:
|
||||
# If EccCheck is interrupted by keybard interrupt, then return failure
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
tc.SetFailed("EccCheck interrupted for {0}".format(packagename), "CHECK FAILED")
|
||||
return 1
|
||||
else:
|
||||
# If EccCheck fails for any other exception type, raise the exception
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
tc.SetFailed("EccCheck exception for {0}".format(packagename), "CHECK FAILED")
|
||||
raise
|
||||
return 1
|
||||
|
||||
def GetDiff(self, pkg: str, temp_diff_output: str) -> List[str]:
|
||||
patch = []
|
||||
#
|
||||
# Generate unified diff between origin/master and HEAD.
|
||||
#
|
||||
params = "diff --output={} --unified=0 origin/master HEAD".format(temp_diff_output)
|
||||
RunCmd("git", params)
|
||||
with open(temp_diff_output) as file:
|
||||
patch = file.read().strip().split('\n')
|
||||
return patch
|
||||
|
||||
def GetModifyDir(self, pkg: str, temp_diff_output: str) -> List[str]:
|
||||
#
|
||||
# Generate diff between origin/master and HEAD using --diff-filter to
|
||||
# exclude deleted and renamed files that do not need to be scanned by
|
||||
# ECC. Also use --name-status to only generate the names of the files
|
||||
# with differences. The output format of this git diff command is a
|
||||
# list of files with the change status and the filename. The filename
|
||||
# is always at the end of the line. Examples:
|
||||
#
|
||||
# M MdeModulePkg/Application/CapsuleApp/CapsuleApp.h
|
||||
# M MdeModulePkg/Application/UiApp/FrontPage.h
|
||||
#
|
||||
params = "diff --output={} --diff-filter=dr --name-status origin/master HEAD".format(temp_diff_output)
|
||||
RunCmd("git", params)
|
||||
dir_list = []
|
||||
with open(temp_diff_output) as file:
|
||||
dir_list = file.read().strip().split('\n')
|
||||
|
||||
modify_dir_list = []
|
||||
for modify_dir in dir_list:
|
||||
#
|
||||
# Parse file name from the end of the line
|
||||
#
|
||||
file_path = modify_dir.strip().split()
|
||||
#
|
||||
# Skip lines that do not have at least 2 elements (status and file name)
|
||||
#
|
||||
if len(file_path) < 2:
|
||||
continue
|
||||
#
|
||||
# Parse the directory name from the file name
|
||||
#
|
||||
file_dir = os.path.dirname(file_path[-1])
|
||||
#
|
||||
# Skip directory names that do not start with the package being scanned.
|
||||
#
|
||||
if file_dir.split('/')[0] != pkg:
|
||||
continue
|
||||
#
|
||||
# Skip directory names that are identical to the package being scanned.
|
||||
# The assumption here is that there are no source files at the package
|
||||
# root. Instead, the only expected files in the package root are
|
||||
# EDK II meta data files (DEC, DSC, FDF).
|
||||
#
|
||||
if file_dir == pkg:
|
||||
continue
|
||||
#
|
||||
# Skip directory names that are already in the modified dir list
|
||||
#
|
||||
if file_dir in modify_dir_list:
|
||||
continue
|
||||
#
|
||||
# Add the candidate directory to scan to the modified dir list
|
||||
#
|
||||
modify_dir_list.append(file_dir)
|
||||
|
||||
#
|
||||
# Remove duplicates from modify_dir_list
|
||||
# Given a folder path, ECC performs a recursive scan of that folder.
|
||||
# If a parent and child folder are both present in modify_dir_list,
|
||||
# then ECC will perform redudanct scans of source files. In order
|
||||
# to prevent redundant scans, if a parent and child folder are both
|
||||
# present, then remove all the child folders.
|
||||
#
|
||||
# For example, if modified_dir_list contains the following elements:
|
||||
# MdeModulePkg/Core/Dxe
|
||||
# MdeModulePkg/Core/Dxe/Hand
|
||||
# MdeModulePkg/Core/Dxe/Mem
|
||||
#
|
||||
# Then MdeModulePkg/Core/Dxe/Hand and MdeModulePkg/Core/Dxe/Mem should
|
||||
# be removed because the files in those folders are covered by a scan
|
||||
# of MdeModulePkg/Core/Dxe.
|
||||
#
|
||||
filtered_list = []
|
||||
for dir1 in modify_dir_list:
|
||||
Append = True
|
||||
for dir2 in modify_dir_list:
|
||||
if dir1 == dir2:
|
||||
continue
|
||||
common = os.path.commonpath([dir1, dir2])
|
||||
if os.path.normpath(common) == os.path.normpath(dir2):
|
||||
Append = False
|
||||
break
|
||||
if Append and dir1 not in filtered_list:
|
||||
filtered_list.append(dir1)
|
||||
return filtered_list
|
||||
|
||||
def GetDiffRange(self, patch_diff: List[str], pkg: str, temp_path: str) -> Dict[str, List[Tuple[int, int]]]:
|
||||
IsDelete = True
|
||||
StartCheck = False
|
||||
range_directory: Dict[str, List[Tuple[int, int]]] = {}
|
||||
for line in patch_diff:
|
||||
modify_file = self.FindModifyFile.findall(line)
|
||||
if modify_file and pkg in modify_file[0] and not StartCheck and os.path.isfile(modify_file[0]):
|
||||
modify_file_comment_dic = self.GetCommentRange(modify_file[0], temp_path)
|
||||
IsDelete = False
|
||||
StartCheck = True
|
||||
modify_file_dic = modify_file[0]
|
||||
modify_file_dic = modify_file_dic.replace("/", os.sep)
|
||||
range_directory[modify_file_dic] = []
|
||||
elif line.startswith('--- '):
|
||||
StartCheck = False
|
||||
elif re.match(self.LineScopePattern, line, re.I) and not IsDelete and StartCheck:
|
||||
start_line = self.LineNumRange.search(line).group(1)
|
||||
line_range = self.LineNumRange.search(line).group(2)
|
||||
if not line_range:
|
||||
line_range = '1'
|
||||
range_directory[modify_file_dic].append((int(start_line), int(start_line) + int(line_range) - 1))
|
||||
for i in modify_file_comment_dic:
|
||||
if int(i[0]) <= int(start_line) <= int(i[1]):
|
||||
range_directory[modify_file_dic].append(i)
|
||||
return range_directory
|
||||
|
||||
def GetCommentRange(self, modify_file: str, temp_path: str) -> List[Tuple[int, int]]:
|
||||
comment_range: List[Tuple[int, int]] = []
|
||||
modify_file_path = os.path.join(temp_path, modify_file)
|
||||
if not os.path.exists (modify_file_path):
|
||||
return comment_range
|
||||
with open(modify_file_path) as f:
|
||||
line_no = 1
|
||||
Start = False
|
||||
for line in f:
|
||||
if line.startswith('/**'):
|
||||
start_no = line_no
|
||||
Start = True
|
||||
if line.startswith('**/') and Start:
|
||||
end_no = line_no
|
||||
Start = False
|
||||
comment_range.append((int(start_no), int(end_no)))
|
||||
line_no += 1
|
||||
|
||||
if comment_range and comment_range[0][0] == 1:
|
||||
del comment_range[0]
|
||||
return comment_range
|
||||
|
||||
def GenerateEccReport(self, modify_dir_list: List[str], ecc_diff_range: Dict[str, List[Tuple[int, int]]],
|
||||
temp_path: str, basetools_path: str) -> None:
|
||||
ecc_need = False
|
||||
ecc_run = True
|
||||
config = os.path.normpath(os.path.join(basetools_path, "Source", "Python", "Ecc", "config.ini"))
|
||||
exception = os.path.normpath(os.path.join(temp_path, "exception.xml"))
|
||||
report = os.path.normpath(os.path.join(temp_path, "Ecc.csv"))
|
||||
for modify_dir in modify_dir_list:
|
||||
target = os.path.normpath(os.path.join(temp_path, modify_dir))
|
||||
logging.info('Run ECC tool for the commit in %s' % modify_dir)
|
||||
ecc_need = True
|
||||
ecc_params = "-c {0} -e {1} -t {2} -r {3}".format(config, exception, target, report)
|
||||
return_code = RunCmd("Ecc", ecc_params, workingdir=temp_path)
|
||||
if return_code != 0:
|
||||
ecc_run = False
|
||||
break
|
||||
if not ecc_run:
|
||||
logging.error('Fail to run ECC tool')
|
||||
self.ParseEccReport(ecc_diff_range, temp_path)
|
||||
|
||||
if not ecc_need:
|
||||
logging.info("Doesn't need run ECC check")
|
||||
|
||||
return
|
||||
|
||||
def ParseEccReport(self, ecc_diff_range: Dict[str, List[Tuple[int, int]]], temp_path: str) -> None:
|
||||
ecc_log = os.path.join(temp_path, "Ecc.log")
|
||||
ecc_csv = os.path.join(temp_path, "Ecc.csv")
|
||||
row_lines = []
|
||||
ignore_error_code = self.GetIgnoreErrorCode()
|
||||
if os.path.exists(ecc_csv):
|
||||
with open(ecc_csv) as csv_file:
|
||||
reader = csv.reader(csv_file)
|
||||
for row in reader:
|
||||
for modify_file in ecc_diff_range:
|
||||
if modify_file in row[3]:
|
||||
for i in ecc_diff_range[modify_file]:
|
||||
line_no = int(row[4])
|
||||
if i[0] <= line_no <= i[1] and row[1] not in ignore_error_code:
|
||||
row[0] = '\nEFI coding style error'
|
||||
row[1] = 'Error code: ' + row[1]
|
||||
row[3] = 'file: ' + row[3]
|
||||
row[4] = 'Line number: ' + row[4]
|
||||
row_line = '\n *'.join(row)
|
||||
row_lines.append(row_line)
|
||||
break
|
||||
break
|
||||
if row_lines:
|
||||
self.ECC_PASS = False
|
||||
|
||||
with open(ecc_log, 'a') as log:
|
||||
all_line = '\n'.join(row_lines)
|
||||
all_line = all_line + '\n'
|
||||
log.writelines(all_line)
|
||||
return
|
||||
|
||||
def ApplyConfig(self, pkgconfig: Dict[str, List[str]], temp_path: str, pkg: str) -> None:
|
||||
if "IgnoreFiles" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreFiles"]:
|
||||
a = os.path.join(temp_path, pkg, a)
|
||||
a = a.replace(os.sep, "/")
|
||||
|
||||
logging.info("Ignoring Files {0}".format(a))
|
||||
if os.path.exists(a):
|
||||
if os.path.isfile(a):
|
||||
os.remove(a)
|
||||
elif os.path.isdir(a):
|
||||
shutil.rmtree(a)
|
||||
else:
|
||||
logging.error("EccCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore files".format(a))
|
||||
|
||||
if "ExceptionList" in pkgconfig:
|
||||
exception_list = pkgconfig["ExceptionList"]
|
||||
exception_xml = os.path.join(temp_path, "exception.xml")
|
||||
try:
|
||||
logging.info("Appending exceptions")
|
||||
self.AppendException(exception_list, exception_xml)
|
||||
except Exception as e:
|
||||
logging.error("Fail to apply exceptions")
|
||||
raise e
|
||||
return
|
||||
|
||||
def AppendException(self, exception_list: List[str], exception_xml: str) -> None:
|
||||
error_code_list = exception_list[::2]
|
||||
keyword_list = exception_list[1::2]
|
||||
dom_tree = xml.dom.minidom.parse(exception_xml)
|
||||
root_node = dom_tree.documentElement
|
||||
for error_code, keyword in zip(error_code_list, keyword_list):
|
||||
customer_node = dom_tree.createElement("Exception")
|
||||
keyword_node = dom_tree.createElement("KeyWord")
|
||||
keyword_node_text_value = dom_tree.createTextNode(keyword)
|
||||
keyword_node.appendChild(keyword_node_text_value)
|
||||
customer_node.appendChild(keyword_node)
|
||||
error_code_node = dom_tree.createElement("ErrorID")
|
||||
error_code_text_value = dom_tree.createTextNode(error_code)
|
||||
error_code_node.appendChild(error_code_text_value)
|
||||
customer_node.appendChild(error_code_node)
|
||||
root_node.appendChild(customer_node)
|
||||
with open(exception_xml, 'w') as f:
|
||||
dom_tree.writexml(f, indent='', addindent='', newl='\n', encoding='UTF-8')
|
||||
return
|
||||
|
||||
def GetIgnoreErrorCode(self) -> set:
|
||||
"""
|
||||
Below are kinds of error code that are accurate in ecc scanning of edk2 level.
|
||||
But EccCheck plugin is partial scanning so they are always false positive issues.
|
||||
The mapping relationship of error code and error message is listed BaseTools/Sourc/Python/Ecc/EccToolError.py
|
||||
"""
|
||||
ignore_error_code = {
|
||||
"10000",
|
||||
"10001",
|
||||
"10002",
|
||||
"10003",
|
||||
"10004",
|
||||
"10005",
|
||||
"10006",
|
||||
"10007",
|
||||
"10008",
|
||||
"10009",
|
||||
"10010",
|
||||
"10011",
|
||||
"10012",
|
||||
"10013",
|
||||
"10015",
|
||||
"10016",
|
||||
"10017",
|
||||
"10022",
|
||||
}
|
||||
return ignore_error_code
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check Ecc issues
|
||||
#
|
||||
# Copyright (c) 2020, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "EccCheck Test",
|
||||
"module": "EccCheck"
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
# EFI Coding style Check Plugin
|
||||
|
||||
This CiBuildPlugin finds the Ecc issues of newly added code in pull request.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin can be configured to ignore certain files and issues.
|
||||
|
||||
"EccCheck": {
|
||||
"ExceptionList": [],
|
||||
"IgnoreFiles": []
|
||||
},
|
||||
"""
|
||||
|
||||
OPTIONAL List of file to ignore.
|
@ -1,251 +0,0 @@
|
||||
# @file GuidCheck.py
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import logging
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.uefi.edk2.guid_list import GuidList
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class GuidCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that scans the code tree and looks for duplicate guids
|
||||
from the package being tested.
|
||||
|
||||
Configuration options:
|
||||
"GuidCheck": {
|
||||
"IgnoreGuidName": [], # provide in format guidname=guidvalue or just guidname
|
||||
"IgnoreGuidValue": [],
|
||||
"IgnoreFoldersAndFiles": [],
|
||||
"IgnoreDuplicates": [] # Provide in format guidname=guidname=guidname...
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Confirm GUIDs are unique in " + packagename, packagename + ".GuidCheck")
|
||||
|
||||
def _FindConflictingGuidValues(self, guidlist: list) -> list:
|
||||
""" Find all duplicate guids by guid value and report them as errors
|
||||
"""
|
||||
# Sort the list by guid
|
||||
guidsorted = sorted(
|
||||
guidlist, key=lambda x: x.guid.upper(), reverse=True)
|
||||
|
||||
previous = None # Store previous entry for comparison
|
||||
error = None
|
||||
errors = []
|
||||
for index in range(len(guidsorted)):
|
||||
i = guidsorted[index]
|
||||
if(previous is not None):
|
||||
if i.guid == previous.guid: # Error
|
||||
if(error is None):
|
||||
# Catch errors with more than 1 conflict
|
||||
error = ErrorEntry("guid")
|
||||
error.entries.append(previous)
|
||||
errors.append(error)
|
||||
error.entries.append(i)
|
||||
else:
|
||||
# no match. clear error
|
||||
error = None
|
||||
previous = i
|
||||
return errors
|
||||
|
||||
def _FindConflictingGuidNames(self, guidlist: list) -> list:
|
||||
""" Find all duplicate guids by name and if they are not all
|
||||
from inf files report them as errors. It is ok to have
|
||||
BASE_NAME duplication.
|
||||
|
||||
Is this useful? It would catch two same named guids in dec file
|
||||
that resolve to different values.
|
||||
"""
|
||||
# Sort the list by guid
|
||||
namesorted = sorted(guidlist, key=lambda x: x.name.upper())
|
||||
|
||||
previous = None # Store previous entry for comparison
|
||||
error = None
|
||||
errors = []
|
||||
for index in range(len(namesorted)):
|
||||
i = namesorted[index]
|
||||
if(previous is not None):
|
||||
# If name matches
|
||||
if i.name == previous.name:
|
||||
if(error is None):
|
||||
# Catch errors with more than 1 conflict
|
||||
error = ErrorEntry("name")
|
||||
error.entries.append(previous)
|
||||
errors.append(error)
|
||||
error.entries.append(i)
|
||||
else:
|
||||
# no match. clear error
|
||||
error = None
|
||||
previous = i
|
||||
|
||||
# Loop thru and remove any errors where all files are infs as it is ok if
|
||||
# they have the same inf base name.
|
||||
for e in errors[:]:
|
||||
if len( [en for en in e.entries if not en.absfilepath.lower().endswith(".inf")]) == 0:
|
||||
errors.remove(e)
|
||||
|
||||
return errors
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the MuBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
Errors = []
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
|
||||
packagename)
|
||||
|
||||
if abs_pkg_path is None:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No package {0}".format(packagename))
|
||||
return -1
|
||||
|
||||
All_Ignores = ["/Build", "/Conf"]
|
||||
# Parse the config for other ignores
|
||||
if "IgnoreFoldersAndFiles" in pkgconfig:
|
||||
All_Ignores.extend(pkgconfig["IgnoreFoldersAndFiles"])
|
||||
|
||||
# Parse the workspace for all GUIDs
|
||||
gs = GuidList.guidlist_from_filesystem(
|
||||
Edk2pathObj.WorkspacePath, ignore_lines=All_Ignores)
|
||||
|
||||
# Remove ignored guidvalue
|
||||
if "IgnoreGuidValue" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreGuidValue"]:
|
||||
try:
|
||||
tc.LogStdOut("Ignoring Guid {0}".format(a.upper()))
|
||||
for b in gs[:]:
|
||||
if b.guid == a.upper():
|
||||
gs.remove(b)
|
||||
except:
|
||||
tc.LogStdError("GuidCheck.IgnoreGuid -> {0} not found. Invalid ignore guid".format(a.upper()))
|
||||
logging.info("GuidCheck.IgnoreGuid -> {0} not found. Invalid ignore guid".format(a.upper()))
|
||||
|
||||
# Remove ignored guidname
|
||||
if "IgnoreGuidName" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreGuidName"]:
|
||||
entry = a.split("=")
|
||||
if(len(entry) > 2):
|
||||
tc.LogStdError("GuidCheck.IgnoreGuidName -> {0} Invalid Format.".format(a))
|
||||
logging.info("GuidCheck.IgnoreGuidName -> {0} Invalid Format.".format(a))
|
||||
continue
|
||||
try:
|
||||
tc.LogStdOut("Ignoring Guid {0}".format(a))
|
||||
for b in gs[:]:
|
||||
if b.name == entry[0]:
|
||||
if(len(entry) == 1):
|
||||
gs.remove(b)
|
||||
elif(len(entry) == 2 and b.guid.upper() == entry[1].upper()):
|
||||
gs.remove(b)
|
||||
else:
|
||||
c.LogStdError("GuidCheck.IgnoreGuidName -> {0} incomplete match. Invalid ignore guid".format(a))
|
||||
|
||||
except:
|
||||
tc.LogStdError("GuidCheck.IgnoreGuidName -> {0} not found. Invalid ignore name".format(a))
|
||||
logging.info("GuidCheck.IgnoreGuidName -> {0} not found. Invalid ignore name".format(a))
|
||||
|
||||
# Find conflicting Guid Values
|
||||
Errors.extend(self._FindConflictingGuidValues(gs))
|
||||
|
||||
# Check if there are expected duplicates and remove it from the error list
|
||||
if "IgnoreDuplicates" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreDuplicates"]:
|
||||
names = a.split("=")
|
||||
if len(names) < 2:
|
||||
tc.LogStdError("GuidCheck.IgnoreDuplicates -> {0} invalid format".format(a))
|
||||
logging.info("GuidCheck.IgnoreDuplicates -> {0} invalid format".format(a))
|
||||
continue
|
||||
|
||||
for b in Errors[:]:
|
||||
if b.type != "guid":
|
||||
continue
|
||||
## Make a list of the names that are not in the names list. If there
|
||||
## are any in the list then this error should not be ignored.
|
||||
t = [x for x in b.entries if x.name not in names]
|
||||
if(len(t) == len(b.entries)):
|
||||
## did not apply to any entry
|
||||
continue
|
||||
elif(len(t) == 0):
|
||||
## full match - ignore duplicate
|
||||
tc.LogStdOut("GuidCheck.IgnoreDuplicates -> {0}".format(a))
|
||||
Errors.remove(b)
|
||||
elif(len(t) < len(b.entries)):
|
||||
## partial match
|
||||
tc.LogStdOut("GuidCheck.IgnoreDuplicates -> {0} incomplete match".format(a))
|
||||
logging.info("GuidCheck.IgnoreDuplicates -> {0} incomplete match".format(a))
|
||||
else:
|
||||
tc.LogStdOut("GuidCheck.IgnoreDuplicates -> {0} unknown error.".format(a))
|
||||
logging.info("GuidCheck.IgnoreDuplicates -> {0} unknown error".format(a))
|
||||
|
||||
|
||||
|
||||
# Find conflicting Guid Names
|
||||
Errors.extend(self._FindConflictingGuidNames(gs))
|
||||
|
||||
# Log errors for anything within the package under test
|
||||
for er in Errors[:]:
|
||||
InMyPackage = False
|
||||
for a in er.entries:
|
||||
if abs_pkg_path in a.absfilepath:
|
||||
InMyPackage = True
|
||||
break
|
||||
if(not InMyPackage):
|
||||
Errors.remove(er)
|
||||
else:
|
||||
logging.error(str(er))
|
||||
tc.LogStdError(str(er))
|
||||
|
||||
# add result to test case
|
||||
overall_status = len(Errors)
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("GuidCheck {0} Failed. Errors {1}".format(
|
||||
packagename, overall_status), "CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
||||
|
||||
|
||||
class ErrorEntry():
|
||||
""" Custom/private class for reporting errors in the GuidList
|
||||
"""
|
||||
|
||||
def __init__(self, errortype):
|
||||
self.type = errortype # 'guid' or 'name' depending on error type
|
||||
self.entries = [] # GuidListEntry that are in error condition
|
||||
|
||||
def __str__(self):
|
||||
a = f"Error Duplicate {self.type}: "
|
||||
if(self.type == "guid"):
|
||||
a += f" {self.entries[0].guid}"
|
||||
elif(self.type == "name"):
|
||||
a += f" {self.entries[0].name}"
|
||||
|
||||
a += f" ({len(self.entries)})\n"
|
||||
|
||||
for e in self.entries:
|
||||
a += "\t" + str(e) + "\n"
|
||||
return a
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check guid uniqueness
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Guid Check Test",
|
||||
"module": "GuidCheck"
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
# Guid Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all the files in a code tree to find all the GUID
|
||||
definitions. After collection it will then look for duplication in the package
|
||||
under test. Uniqueness of all GUIDs are critical within the UEFI environment.
|
||||
Duplication can cause numerous issues including locating the wrong data
|
||||
structure, calling the wrong function, or decoding the wrong data members.
|
||||
|
||||
Currently Scanned:
|
||||
|
||||
* INF files are scanned for there Module guid
|
||||
* DEC files are scanned for all of their Protocols, PPIs, and Guids as well as
|
||||
the one package GUID.
|
||||
|
||||
Any GUID value being equal to two names or even just defined in two files is
|
||||
considered an error unless in the ignore list.
|
||||
|
||||
Any GUID name that is found more than once is an error unless all occurrences
|
||||
are Module GUIDs. Since the Module GUID is assigned to the Module name it is
|
||||
common to have numerous versions of the same module named the same.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin has numerous configuration options to support the UEFI codebase.
|
||||
|
||||
``` yaml
|
||||
"GuidCheck": {
|
||||
"IgnoreGuidName": [],
|
||||
"IgnoreGuidValue": [],
|
||||
"IgnoreFoldersAndFiles": [],
|
||||
"IgnoreDuplicates": []
|
||||
}
|
||||
```
|
||||
|
||||
### IgnoreGuidName
|
||||
|
||||
This list allows strings in two formats.
|
||||
|
||||
* _GuidName_
|
||||
* This will remove any entry with this GuidName from the list of GUIDs
|
||||
therefore ignoring any error associated with this name.
|
||||
* _GuidName=GuidValue_
|
||||
* This will also ignore the GUID by name but only if the value equals the
|
||||
GuidValue.
|
||||
* GuidValue should be in registry format.
|
||||
* This is the suggested format to use as it will limit the ignore to only the
|
||||
defined case.
|
||||
|
||||
### IgnoreGuidValue
|
||||
|
||||
This list allows strings in guid registry format _GuidValue_.
|
||||
|
||||
* This will remove any entry with this GuidValue from the list of GUIDs
|
||||
therefore ignoring any error associated with this value.
|
||||
* GuidValue must be in registry format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
||||
|
||||
### IgnoreFoldersAndFiles
|
||||
|
||||
This supports .gitignore file and folder matching strings including wildcards
|
||||
|
||||
* Any folder or file ignored will not be parsed and therefore any GUID defined
|
||||
will be ignored.
|
||||
* The plugin will always ignores the following ["/Build", "/Conf"]
|
||||
|
||||
### IgnoreDuplicates
|
||||
|
||||
This supports strings in the format of _GuidName_=_GuidName_=_GuidName_
|
||||
|
||||
* For the error with the GuidNames to be ignored the list must match completely
|
||||
with what is found during the code scan.
|
||||
* For example if there are two GUIDs that are by design equal within the code
|
||||
tree then it should be _GuidName_=_GuidName_
|
||||
* If instead there are three GUIDs then it must be
|
||||
_GuidName_=_GuidName_=_GuidName_
|
||||
* This is the best ignore list to use because it is the most strict and will
|
||||
catch new problems when new conflicts are introduced.
|
||||
* There are numerous places in the UEFI specification in which two GUID names
|
||||
are assigned the same value. These names should be set in this ignore list so
|
||||
that they don't cause an error but any additional duplication would still be
|
||||
caught.
|
@ -1,149 +0,0 @@
|
||||
# @file HostUnitTestCompilerPlugin.py
|
||||
##
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.uefi_build import UefiBuilder
|
||||
from edk2toolext import edk2_logging
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
from edk2toollib.utility_functions import GetHostInfo
|
||||
|
||||
|
||||
class HostUnitTestCompilerPlugin(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that compiles the dsc for host based unit test apps.
|
||||
An IUefiBuildPlugin may be attached to this plugin that will run the
|
||||
unit tests and collect the results after successful compilation.
|
||||
|
||||
Configuration options:
|
||||
"HostUnitTestCompilerPlugin": {
|
||||
"DscPath": "<path to dsc from root of pkg>"
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
num,types = self.__GetHostUnitTestArch(environment)
|
||||
types = types.replace(" ", "_")
|
||||
|
||||
return ("Compile and Run Host-Based UnitTests for " + packagename + " on arch " + types,
|
||||
packagename + ".HostUnitTestCompiler." + types)
|
||||
|
||||
def RunsOnTargetList(self):
|
||||
return ["NOOPT"]
|
||||
|
||||
#
|
||||
# Find the intersection of application types that can run on this host
|
||||
# and the TARGET_ARCH being build in this request.
|
||||
#
|
||||
# return tuple with (number of UEFI arch types, space separated string)
|
||||
def __GetHostUnitTestArch(self, environment):
|
||||
requested = environment.GetValue("TARGET_ARCH").split(' ')
|
||||
host = []
|
||||
if GetHostInfo().arch == 'x86':
|
||||
#assume 64bit can handle 64 and 32
|
||||
#assume 32bit can only handle 32
|
||||
## change once IA32 issues resolved host.append("IA32")
|
||||
if GetHostInfo().bit == '64':
|
||||
host.append("X64")
|
||||
elif GetHostInfo().arch == 'ARM':
|
||||
if GetHostInfo().bit == '64':
|
||||
host.append("AARCH64")
|
||||
elif GetHostInfo().bit == '32':
|
||||
host.append("ARM")
|
||||
|
||||
willrun = set(requested) & set(host)
|
||||
return (len(willrun), " ".join(willrun))
|
||||
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the ICiBuildPlugin Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
self._env = environment
|
||||
environment.SetValue("CI_BUILD_TYPE", "host_unit_test", "Set in HostUnitTestCompilerPlugin")
|
||||
|
||||
# Parse the config for required DscPath element
|
||||
if "DscPath" not in pkgconfig:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("DscPath not found in config file. Nothing to compile for HostBasedUnitTests.")
|
||||
return -1
|
||||
|
||||
AP = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
|
||||
APDSC = os.path.join(AP, pkgconfig["DscPath"].strip())
|
||||
AP_Path = Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(APDSC)
|
||||
if AP is None or AP_Path is None or not os.path.isfile(APDSC):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("Package HostBasedUnitTest Dsc not found.")
|
||||
return -1
|
||||
|
||||
logging.info("Building {0}".format(AP_Path))
|
||||
self._env.SetValue("ACTIVE_PLATFORM", AP_Path, "Set in Compiler Plugin")
|
||||
num, RUNNABLE_ARCHITECTURES = self.__GetHostUnitTestArch(environment)
|
||||
if(num == 0):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No host architecture compatibility")
|
||||
return -1
|
||||
|
||||
if not environment.SetValue("TARGET_ARCH",
|
||||
RUNNABLE_ARCHITECTURES,
|
||||
"Update Target Arch based on Host Support"):
|
||||
#use AllowOverride function since this is a controlled attempt to change
|
||||
environment.AllowOverride("TARGET_ARCH")
|
||||
if not environment.SetValue("TARGET_ARCH",
|
||||
RUNNABLE_ARCHITECTURES,
|
||||
"Update Target Arch based on Host Support"):
|
||||
raise RuntimeError("Can't Change TARGET_ARCH as required")
|
||||
|
||||
# Parse DSC to check for SUPPORTED_ARCHITECTURES
|
||||
dp = DscParser()
|
||||
dp.SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
dp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
dp.ParseFile(AP_Path)
|
||||
if "SUPPORTED_ARCHITECTURES" in dp.LocalVars:
|
||||
SUPPORTED_ARCHITECTURES = dp.LocalVars["SUPPORTED_ARCHITECTURES"].split('|')
|
||||
TARGET_ARCHITECTURES = environment.GetValue("TARGET_ARCH").split(' ')
|
||||
|
||||
# Skip if there is no intersection between SUPPORTED_ARCHITECTURES and TARGET_ARCHITECTURES
|
||||
if len(set(SUPPORTED_ARCHITECTURES) & set(TARGET_ARCHITECTURES)) == 0:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No supported architecutres to build for host unit tests")
|
||||
return -1
|
||||
|
||||
uefiBuilder = UefiBuilder()
|
||||
# do all the steps
|
||||
# WorkSpace, PackagesPath, PInHelper, PInManager
|
||||
ret = uefiBuilder.Go(Edk2pathObj.WorkspacePath, os.pathsep.join(Edk2pathObj.PackagePathList), PLMHelper, PLM)
|
||||
if ret != 0: # failure:
|
||||
tc.SetFailed("Compile failed for {0}".format(packagename), "Compile_FAILED")
|
||||
tc.LogStdError("{0} Compile failed with error code {1} ".format(AP_Path, ret))
|
||||
return 1
|
||||
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return 0
|
@ -1,12 +0,0 @@
|
||||
##
|
||||
# CiBuildPlugin used to build anything that identifies
|
||||
# as a unit test.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "host-based-test",
|
||||
"name": "Host Unit Test Compiler Plugin",
|
||||
"module": "HostUnitTestCompilerPlugin"
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
# Host UnitTest Compiler Plugin
|
||||
|
||||
A CiBuildPlugin that compiles the dsc for host based unit test apps.
|
||||
An IUefiBuildPlugin may be attached to this plugin that will run the unit tests and collect the results after successful compilation.
|
||||
|
||||
## Configuration
|
||||
|
||||
The package relative path of the DSC file to build.
|
||||
|
||||
``` yaml
|
||||
"HostUnitTestCompilerPlugin": {
|
||||
"DscPath": "<path to dsc from root of pkg>"
|
||||
}
|
||||
```
|
||||
|
||||
### DscPath
|
||||
|
||||
Package relative path to the DSC file to build.
|
||||
|
||||
## Copyright
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
|
@ -1,141 +0,0 @@
|
||||
# @file HostUnitTestDscCompleteCheck.py
|
||||
#
|
||||
# This is a copy of DscCompleteCheck with different filtering logic.
|
||||
# It should be discussed if this should be one plugin
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import logging
|
||||
import os
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser
|
||||
from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser, AllPhases
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class HostUnitTestDscCompleteCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that scans the package Host Unit Test dsc file and confirms all Host application modules (inf files) are
|
||||
listed in the components sections.
|
||||
|
||||
Configuration options:
|
||||
"HostUnitTestDscCompleteCheck": {
|
||||
"DscPath": "", # Path to Host based unit test DSC file
|
||||
"IgnoreInf": [] # Ignore INF if found in filesystem but not dsc
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Check the " + packagename + " Host Unit Test DSC for a being complete", packagename + ".HostUnitTestDscCompleteCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the MuBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - VarDict containing the shell environment Build Vars
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
overall_status = 0
|
||||
|
||||
# Parse the config for required DscPath element
|
||||
if "DscPath" not in pkgconfig:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError(
|
||||
"DscPath not found in config file. Nothing to check.")
|
||||
return -1
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
|
||||
packagename)
|
||||
abs_dsc_path = os.path.join(abs_pkg_path, pkgconfig["DscPath"].strip())
|
||||
wsr_dsc_path = Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(
|
||||
abs_dsc_path)
|
||||
|
||||
if abs_dsc_path is None or wsr_dsc_path == "" or not os.path.isfile(abs_dsc_path):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("Package Host Unit Test Dsc not found")
|
||||
return 0
|
||||
|
||||
# Get INF Files
|
||||
INFFiles = self.WalkDirectoryForExtension([".inf"], abs_pkg_path)
|
||||
INFFiles = [Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(
|
||||
x) for x in INFFiles] # make edk2relative path so can compare with DSC
|
||||
|
||||
# remove ignores
|
||||
|
||||
if "IgnoreInf" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreInf"]:
|
||||
a = a.replace(os.sep, "/")
|
||||
try:
|
||||
tc.LogStdOut("Ignoring INF {0}".format(a))
|
||||
INFFiles.remove(a)
|
||||
except:
|
||||
tc.LogStdError(
|
||||
"HostUnitTestDscCompleteCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
logging.info(
|
||||
"HostUnitTestDscCompleteCheck.IgnoreInf -> {0} not found in filesystem. Invalid ignore file".format(a))
|
||||
|
||||
# DSC Parser
|
||||
dp = DscParser()
|
||||
dp.SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
dp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
dp.SetInputVars(environment.GetAllBuildKeyValues())
|
||||
dp.ParseFile(wsr_dsc_path)
|
||||
|
||||
# Check if INF in component section
|
||||
for INF in INFFiles:
|
||||
if not any(INF.strip() in x for x in dp.ThreeMods) and \
|
||||
not any(INF.strip() in x for x in dp.SixMods) and \
|
||||
not any(INF.strip() in x for x in dp.OtherMods):
|
||||
|
||||
infp = InfParser().SetBaseAbsPath(Edk2pathObj.WorkspacePath)
|
||||
infp.SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
infp.ParseFile(INF)
|
||||
if("MODULE_TYPE" not in infp.Dict):
|
||||
tc.LogStdOut(
|
||||
"Ignoring INF. Missing key for MODULE_TYPE {0}".format(INF))
|
||||
continue
|
||||
|
||||
if(infp.Dict["MODULE_TYPE"] == "HOST_APPLICATION"):
|
||||
# should compile test a library that is declared type HOST_APPLICATION
|
||||
pass
|
||||
|
||||
elif (len(infp.SupportedPhases) > 0 and
|
||||
"HOST_APPLICATION" in infp.SupportedPhases and
|
||||
infp.SupportedPhases != AllPhases):
|
||||
# should compile test a library that supports HOST_APPLICATION but
|
||||
# require it to be an explicit opt-in
|
||||
pass
|
||||
|
||||
else:
|
||||
tc.LogStdOut(
|
||||
"Ignoring INF. MODULE_TYPE or suppored phases not HOST_APPLICATION {0}".format(INF))
|
||||
continue
|
||||
|
||||
logging.critical(INF + " not in " + wsr_dsc_path)
|
||||
tc.LogStdError("{0} not in {1}".format(INF, wsr_dsc_path))
|
||||
overall_status = overall_status + 1
|
||||
|
||||
# If XML object exists, add result
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("HostUnitTestDscCompleteCheck {0} Failed. Errors {1}".format(
|
||||
wsr_dsc_path, overall_status), "CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
@ -1,12 +0,0 @@
|
||||
##
|
||||
# CiBuildPlugin used to confirm all INFs are listed in
|
||||
# the components section of package dsc
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "host-based-test",
|
||||
"name": "Host Unit Test Dsc Complete Check Test",
|
||||
"module": "HostUnitTestDscCompleteCheck"
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
# Host Unit Test Dsc Complete Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all INF files from a package for those related to host
|
||||
based unit tests confirms they are listed in the unit test DSC file for the package.
|
||||
The test considers it an error if any INF meeting the requirements does not appear
|
||||
in the `Components` section of the unit test DSC. This is critical because
|
||||
much of the CI infrastructure assumes that modules will be listed in the DSC
|
||||
and compiled.
|
||||
|
||||
This test will only require INFs in the following cases:
|
||||
|
||||
1. When MODULE_TYPE = HOST_APPLICATION
|
||||
2. When a Library instance supports the HOST_APPLICATION environment
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin has a few configuration options to support the UEFI codebase.
|
||||
|
||||
``` yaml
|
||||
"HostUnitTestDscCompleteCheck": {
|
||||
"DscPath": "", # Path to Host based unit test DSC file
|
||||
"IgnoreInf": [] # Ignore INF if found in filesystem but not dsc
|
||||
}
|
||||
```
|
||||
|
||||
### DscPath
|
||||
|
||||
Path to DSC to consider platform dsc
|
||||
|
||||
### IgnoreInf
|
||||
|
||||
Ignore error if Inf file is not listed in DSC file
|
@ -1,153 +0,0 @@
|
||||
# @file LibraryClassCheck.py
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import logging
|
||||
import os
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.uefi.edk2.parsers.dec_parser import DecParser
|
||||
from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
|
||||
|
||||
class LibraryClassCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that scans the code tree and library classes for undeclared
|
||||
files
|
||||
|
||||
Configuration options:
|
||||
"LibraryClassCheck": {
|
||||
IgnoreHeaderFile: [], # Ignore a file found on disk
|
||||
IgnoreLibraryClass: [] # Ignore a declaration found in dec file
|
||||
}
|
||||
"""
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
return ("Check library class declarations in " + packagename, packagename + ".LibraryClassCheck")
|
||||
|
||||
def __GetPkgDec(self, rootpath):
|
||||
try:
|
||||
allEntries = os.listdir(rootpath)
|
||||
for entry in allEntries:
|
||||
if entry.lower().endswith(".dec"):
|
||||
return(os.path.join(rootpath, entry))
|
||||
except Exception:
|
||||
logging.error("Unable to find DEC for package:{0}".format(rootpath))
|
||||
|
||||
return None
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the MuBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
overall_status = 0
|
||||
LibraryClassIgnore = []
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(packagename)
|
||||
abs_dec_path = self.__GetPkgDec(abs_pkg_path)
|
||||
wsr_dec_path = Edk2pathObj.GetEdk2RelativePathFromAbsolutePath(abs_dec_path)
|
||||
|
||||
if abs_dec_path is None or wsr_dec_path == "" or not os.path.isfile(abs_dec_path):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No DEC file {0} in package {1}".format(abs_dec_path, abs_pkg_path))
|
||||
return -1
|
||||
|
||||
# Get all include folders
|
||||
dec = DecParser()
|
||||
dec.SetBaseAbsPath(Edk2pathObj.WorkspacePath).SetPackagePaths(Edk2pathObj.PackagePathList)
|
||||
dec.ParseFile(wsr_dec_path)
|
||||
|
||||
AllHeaderFiles = []
|
||||
|
||||
for includepath in dec.IncludePaths:
|
||||
## Get all header files in the library folder
|
||||
AbsLibraryIncludePath = os.path.join(abs_pkg_path, includepath, "Library")
|
||||
if(not os.path.isdir(AbsLibraryIncludePath)):
|
||||
continue
|
||||
|
||||
hfiles = self.WalkDirectoryForExtension([".h"], AbsLibraryIncludePath)
|
||||
hfiles = [os.path.relpath(x,abs_pkg_path) for x in hfiles] # make package root relative path
|
||||
hfiles = [x.replace("\\", "/") for x in hfiles] # make package relative path
|
||||
|
||||
AllHeaderFiles.extend(hfiles)
|
||||
|
||||
if len(AllHeaderFiles) == 0:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError(f"No Library include folder in any Include path")
|
||||
return -1
|
||||
|
||||
# Remove ignored paths
|
||||
if "IgnoreHeaderFile" in pkgconfig:
|
||||
for a in pkgconfig["IgnoreHeaderFile"]:
|
||||
try:
|
||||
tc.LogStdOut("Ignoring Library Header File {0}".format(a))
|
||||
AllHeaderFiles.remove(a)
|
||||
except:
|
||||
tc.LogStdError("LibraryClassCheck.IgnoreHeaderFile -> {0} not found. Invalid Header File".format(a))
|
||||
logging.info("LibraryClassCheck.IgnoreHeaderFile -> {0} not found. Invalid Header File".format(a))
|
||||
|
||||
if "IgnoreLibraryClass" in pkgconfig:
|
||||
LibraryClassIgnore = pkgconfig["IgnoreLibraryClass"]
|
||||
|
||||
|
||||
## Attempt to find library classes
|
||||
for lcd in dec.LibraryClasses:
|
||||
## Check for correct file path separator
|
||||
if "\\" in lcd.path:
|
||||
tc.LogStdError("LibraryClassCheck.DecFilePathSeparator -> {0} invalid.".format(lcd.path))
|
||||
logging.error("LibraryClassCheck.DecFilePathSeparator -> {0} invalid.".format(lcd.path))
|
||||
overall_status += 1
|
||||
continue
|
||||
|
||||
if lcd.name in LibraryClassIgnore:
|
||||
tc.LogStdOut("Ignoring Library Class Name {0}".format(lcd.name))
|
||||
LibraryClassIgnore.remove(lcd.name)
|
||||
continue
|
||||
|
||||
logging.debug(f"Looking for Library Class {lcd.path}")
|
||||
try:
|
||||
AllHeaderFiles.remove(lcd.path)
|
||||
|
||||
except ValueError:
|
||||
tc.LogStdError(f"Library {lcd.name} with path {lcd.path} not found in package filesystem")
|
||||
logging.error(f"Library {lcd.name} with path {lcd.path} not found in package filesystem")
|
||||
overall_status += 1
|
||||
|
||||
## any remaining AllHeaderFiles are not described in DEC
|
||||
for h in AllHeaderFiles:
|
||||
tc.LogStdError(f"Library Header File {h} not declared in package DEC {wsr_dec_path}")
|
||||
logging.error(f"Library Header File {h} not declared in package DEC {wsr_dec_path}")
|
||||
overall_status += 1
|
||||
|
||||
## Warn about any invalid library class names in the ignore list
|
||||
for r in LibraryClassIgnore:
|
||||
tc.LogStdError("LibraryClassCheck.IgnoreLibraryClass -> {0} not found. Library Class not found".format(r))
|
||||
logging.info("LibraryClassCheck.IgnoreLibraryClass -> {0} not found. Library Class not found".format(r))
|
||||
|
||||
|
||||
# If XML object exists, add result
|
||||
if overall_status != 0:
|
||||
tc.SetFailed("LibraryClassCheck {0} Failed. Errors {1}".format(wsr_dec_path, overall_status), "CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check that all library classes are declared correctly in dec file
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Library Class Check Test",
|
||||
"module": "LibraryClassCheck"
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
# Library Class Check Plugin
|
||||
|
||||
This CiBuildPlugin scans at all library header files found in the `Library`
|
||||
folders in all of the package's declared include directories and ensures that
|
||||
all files have a matching LibraryClass declaration in the DEC file for the
|
||||
package. Any missing declarations will cause a failure.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin has a few configuration options to support the UEFI codebase.
|
||||
|
||||
``` yaml
|
||||
"LibraryClassCheck": {
|
||||
IgnoreHeaderFile: [], # Ignore a file found on disk
|
||||
IgnoreLibraryClass: [] # Ignore a declaration found in dec file
|
||||
}
|
||||
```
|
||||
|
||||
### IgnoreHeaderFile
|
||||
|
||||
Ignore a file found on disk
|
||||
|
||||
### IgnoreLibraryClass
|
||||
|
||||
Ignore a declaration found in dec file
|
@ -1,123 +0,0 @@
|
||||
# @file LicenseCheck.py
|
||||
#
|
||||
# Copyright (c) 2020, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import re
|
||||
from io import StringIO
|
||||
from typing import List, Tuple
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
from edk2toollib.utility_functions import RunCmd
|
||||
|
||||
|
||||
class LicenseCheck(ICiBuildPlugin):
|
||||
|
||||
"""
|
||||
A CiBuildPlugin to check the license for new added files.
|
||||
|
||||
Configuration options:
|
||||
"LicenseCheck": {
|
||||
"IgnoreFiles": []
|
||||
},
|
||||
"""
|
||||
|
||||
license_format_preflix = 'SPDX-License-Identifier'
|
||||
|
||||
bsd2_patent = 'BSD-2-Clause-Patent'
|
||||
|
||||
Readdedfileformat = re.compile(r'\+\+\+ b\/(.*)')
|
||||
|
||||
file_extension_list = [".c", ".h", ".inf", ".dsc", ".dec", ".py", ".bat", ".sh", ".uni", ".yaml",
|
||||
".fdf", ".inc", "yml", ".asm", ".asm16", ".asl", ".vfr", ".s", ".S", ".aslc",
|
||||
".nasm", ".nasmb", ".idf", ".Vfr", ".H"]
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
"""
|
||||
return ("Check for license for " + packagename, packagename + ".LicenseCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the ci_build_plugin Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
# Create temp directory
|
||||
temp_path = os.path.join(Edk2pathObj.WorkspacePath, 'Build', '.pytool', 'Plugin', 'LicenseCheck')
|
||||
if not os.path.exists(temp_path):
|
||||
os.makedirs(temp_path)
|
||||
# Output file to use for git diff operations
|
||||
temp_diff_output = os.path.join (temp_path, 'diff.txt')
|
||||
params = "diff --output={} --unified=0 origin/master HEAD".format(temp_diff_output)
|
||||
RunCmd("git", params)
|
||||
with open(temp_diff_output) as file:
|
||||
patch = file.read().strip().split("\n")
|
||||
# Delete temp directory
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
|
||||
ignore_files = []
|
||||
if "IgnoreFiles" in pkgconfig:
|
||||
ignore_files = pkgconfig["IgnoreFiles"]
|
||||
|
||||
self.ok = True
|
||||
self.startcheck = False
|
||||
self.license = True
|
||||
self.all_file_pass = True
|
||||
count = len(patch)
|
||||
line_index = 0
|
||||
for line in patch:
|
||||
if line.startswith('--- /dev/null'):
|
||||
nextline = patch[line_index + 1]
|
||||
added_file = self.Readdedfileformat.search(nextline).group(1)
|
||||
added_file_extension = os.path.splitext(added_file)[1]
|
||||
if added_file_extension in self.file_extension_list and packagename in added_file:
|
||||
if (self.IsIgnoreFile(added_file, ignore_files)):
|
||||
line_index = line_index + 1
|
||||
continue
|
||||
self.startcheck = True
|
||||
self.license = False
|
||||
if self.startcheck and self.license_format_preflix in line:
|
||||
if self.bsd2_patent in line:
|
||||
self.license = True
|
||||
if line_index + 1 == count or patch[line_index + 1].startswith('diff --') and self.startcheck:
|
||||
if not self.license:
|
||||
self.all_file_pass = False
|
||||
error_message = "Invalid license in: " + added_file + " Hint: Only BSD-2-Clause-Patent is accepted."
|
||||
logging.error(error_message)
|
||||
self.startcheck = False
|
||||
self.license = True
|
||||
line_index = line_index + 1
|
||||
|
||||
if self.all_file_pass:
|
||||
tc.SetSuccess()
|
||||
return 0
|
||||
else:
|
||||
tc.SetFailed("License Check {0} Failed. ".format(packagename), "LICENSE_CHECK_FAILED")
|
||||
return 1
|
||||
|
||||
def IsIgnoreFile(self, file: str, ignore_files: List[str]) -> bool:
|
||||
for f in ignore_files:
|
||||
if f in file:
|
||||
return True
|
||||
return False
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check license issues for new added files
|
||||
#
|
||||
# Copyright (c) 2020, Intel Corporation. All rights reserved.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "License Check Test",
|
||||
"module": "LicenseCheck"
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
# License Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all new added files in a package to make sure code
|
||||
is contributed under BSD-2-Clause-Patent.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin can be configured to ignore certain files.
|
||||
|
||||
``` yaml
|
||||
"LicenseCheck": {
|
||||
"IgnoreFiles": []
|
||||
}
|
||||
```
|
||||
### IgnoreFiles
|
||||
|
||||
OPTIONAL List of file to ignore.
|
@ -1,127 +0,0 @@
|
||||
# Spell Check Plugin
|
||||
|
||||
This CiBuildPlugin scans all the files in a given package and checks for
|
||||
spelling errors.
|
||||
|
||||
This plugin requires NodeJs and cspell. If the plugin doesn't find its required
|
||||
tools then it will mark the test as skipped.
|
||||
|
||||
* NodeJS: https://nodejs.org/en/
|
||||
* cspell: https://www.npmjs.com/package/cspell
|
||||
* Src and doc available: https://github.com/streetsidesoftware/cspell
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin has a few configuration options to support the UEFI codebase.
|
||||
|
||||
``` yaml
|
||||
"SpellCheck": {
|
||||
"AuditOnly": False, # If True, log all errors and then mark as skipped
|
||||
"IgnoreFiles": [], # use gitignore syntax to ignore errors in matching files
|
||||
"ExtendWords": [], # words to extend to the dictionary for this package
|
||||
"IgnoreStandardPaths": [], # Standard Plugin defined paths that should be ignore
|
||||
"AdditionalIncludePaths": [] # Additional paths to spell check (wildcards supported)
|
||||
}
|
||||
```
|
||||
|
||||
### AuditOnly
|
||||
|
||||
Boolean - Default is False.
|
||||
If True run the test in an Audit only mode which will log all errors but instead
|
||||
of failing the build it will set the test as skipped. This allows visibility
|
||||
into the failures without breaking the build.
|
||||
|
||||
### IgnoreFiles
|
||||
|
||||
This supports .gitignore file and folder matching strings including wildcards
|
||||
|
||||
* All files will be parsed regardless but then any spelling errors found within
|
||||
ignored files will not be reported as an error.
|
||||
* Errors in ignored files will still be output to the test results as
|
||||
informational comments.
|
||||
|
||||
### ExtendWords
|
||||
|
||||
This list allows words to be added to the dictionary for the spell checker when
|
||||
this package is tested. These follow the rules of the cspell config words field.
|
||||
|
||||
### IgnoreStandardPaths
|
||||
|
||||
This plugin by default will check the below standard paths. If the package
|
||||
would like to ignore any of them list that here.
|
||||
|
||||
```python
|
||||
[
|
||||
# C source
|
||||
"*.c",
|
||||
"*.h",
|
||||
|
||||
# Assembly files
|
||||
"*.nasm",
|
||||
"*.asm",
|
||||
"*.masm",
|
||||
"*.s",
|
||||
|
||||
# ACPI source language
|
||||
"*.asl",
|
||||
|
||||
# Edk2 build files
|
||||
"*.dsc", "*.dec", "*.fdf", "*.inf",
|
||||
|
||||
# Documentation files
|
||||
"*.md", "*.txt"
|
||||
]
|
||||
```
|
||||
|
||||
### AdditionalIncludePaths
|
||||
|
||||
If the package would to add additional path patterns to be included in
|
||||
spellchecking they can be defined here.
|
||||
|
||||
## Other configuration
|
||||
|
||||
In the cspell.base.json there are numerous other settings configured. There is
|
||||
no support to override these on a per package basis but future features could
|
||||
make this available. One interesting configuration option is `minWordLength`.
|
||||
Currently it is set to _5_ which means all 2,3, and 4 letter words will be
|
||||
ignored. This helps minimize the number of technical acronyms, register names,
|
||||
and other UEFI specific values that must be ignored.
|
||||
|
||||
## False positives
|
||||
|
||||
The cspell dictionary is not perfect and there are cases where technical words
|
||||
or acronyms are not found in the dictionary. There are three ways to resolve
|
||||
false positives and the choice for which method should be based on how broadly
|
||||
the word should be accepted.
|
||||
|
||||
### CSpell Base Config file
|
||||
|
||||
If the change should apply to all UEFI code and documentation then it should be
|
||||
added to the base config file `words` section. The base config file is adjacent
|
||||
to this file and titled `cspell.base.json`. This is a list of accepted words
|
||||
for all spell checking operations on all packages.
|
||||
|
||||
### Package Config
|
||||
|
||||
In the package `*.ci.yaml` file there is a `SpellCheck` config section. This
|
||||
section allows files to be ignored as well as words that should be considered
|
||||
valid for all files within this package. Add the desired words to the
|
||||
"ExtendedWords" member.
|
||||
|
||||
### In-line File
|
||||
|
||||
CSpell supports numerous methods to annotate your files to ignore words,
|
||||
sections, etc. This can be found in CSpell documentation. Suggestion here is
|
||||
to use a c-style comment at the top of the file to add words that should be
|
||||
ignored just for this file. Obviously this has the highest maintenance cost so
|
||||
it should only be used for file unique words.
|
||||
|
||||
``` c
|
||||
// spell-checker:ignore unenroll, word2, word3
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```ini
|
||||
# spell-checker:ignore unenroll, word2, word3
|
||||
```
|
@ -1,218 +0,0 @@
|
||||
# @file SpellCheck.py
|
||||
#
|
||||
# An edk2-pytool based plugin wrapper for cspell
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import logging
|
||||
import json
|
||||
import yaml
|
||||
from io import StringIO
|
||||
import os
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toollib.utility_functions import RunCmd
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
from edk2toollib.gitignore_parser import parse_gitignore_lines
|
||||
from edk2toolext.environment import version_aggregator
|
||||
|
||||
|
||||
class SpellCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that uses the cspell node module to scan the files
|
||||
from the package being tested for spelling errors. The plugin contains
|
||||
the base cspell.json file then thru the configuration options other settings
|
||||
can be changed or extended.
|
||||
|
||||
Configuration options:
|
||||
"SpellCheck": {
|
||||
"AuditOnly": False, # Don't fail the build if there are errors. Just log them
|
||||
"IgnoreFiles": [], # use gitignore syntax to ignore errors in matching files
|
||||
"ExtendWords": [], # words to extend to the dictionary for this package
|
||||
"IgnoreStandardPaths": [], # Standard Plugin defined paths that should be ignore
|
||||
"AdditionalIncludePaths": [] # Additional paths to spell check (wildcards supported)
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# A package can remove any of these using IgnoreStandardPaths
|
||||
#
|
||||
STANDARD_PLUGIN_DEFINED_PATHS = ("*.c", "*.h",
|
||||
"*.nasm", "*.asm", "*.masm", "*.s",
|
||||
"*.asl",
|
||||
"*.dsc", "*.dec", "*.fdf", "*.inf",
|
||||
"*.md", "*.txt"
|
||||
)
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
a tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Spell check files in " + packagename, packagename + ".SpellCheck")
|
||||
|
||||
##
|
||||
# External function of plugin. This function is used to perform the task of the CiBuild Plugin
|
||||
#
|
||||
# - package is the edk2 path to package. This means workspace/packagepath relative.
|
||||
# - edk2path object configured with workspace and packages path
|
||||
# - PkgConfig Object (dict) for the pkg
|
||||
# - EnvConfig Object
|
||||
# - Plugin Manager Instance
|
||||
# - Plugin Helper Obj Instance
|
||||
# - Junit Logger
|
||||
# - output_stream the StringIO output stream from this plugin via logging
|
||||
|
||||
def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream=None):
|
||||
Errors = []
|
||||
|
||||
abs_pkg_path = Edk2pathObj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
|
||||
packagename)
|
||||
|
||||
if abs_pkg_path is None:
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("No package {0}".format(packagename))
|
||||
return -1
|
||||
|
||||
# check for node
|
||||
return_buffer = StringIO()
|
||||
ret = RunCmd("node", "--version", outstream=return_buffer)
|
||||
if (ret != 0):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("NodeJs not installed. Test can't run")
|
||||
logging.warning("NodeJs not installed. Test can't run")
|
||||
return -1
|
||||
node_version = return_buffer.getvalue().strip() # format vXX.XX.XX
|
||||
tc.LogStdOut(f"Node version: {node_version}")
|
||||
version_aggregator.GetVersionAggregator().ReportVersion(
|
||||
"NodeJs", node_version, version_aggregator.VersionTypes.INFO)
|
||||
|
||||
# Check for cspell
|
||||
return_buffer = StringIO()
|
||||
ret = RunCmd("cspell", "--version", outstream=return_buffer)
|
||||
if (ret != 0):
|
||||
tc.SetSkipped()
|
||||
tc.LogStdError("cspell not installed. Test can't run")
|
||||
logging.warning("cspell not installed. Test can't run")
|
||||
return -1
|
||||
cspell_version = return_buffer.getvalue().strip() # format XX.XX.XX
|
||||
tc.LogStdOut(f"CSpell version: {cspell_version}")
|
||||
version_aggregator.GetVersionAggregator().ReportVersion(
|
||||
"CSpell", cspell_version, version_aggregator.VersionTypes.INFO)
|
||||
|
||||
# copy the default as a list
|
||||
package_relative_paths_to_spell_check = list(SpellCheck.STANDARD_PLUGIN_DEFINED_PATHS)
|
||||
|
||||
#
|
||||
# Allow the ci.yaml to remove any of the above standard paths
|
||||
#
|
||||
if("IgnoreStandardPaths" in pkgconfig):
|
||||
for a in pkgconfig["IgnoreStandardPaths"]:
|
||||
if(a in package_relative_paths_to_spell_check):
|
||||
tc.LogStdOut(
|
||||
f"ignoring standard path due to ci.yaml ignore: {a}")
|
||||
package_relative_paths_to_spell_check.remove(a)
|
||||
else:
|
||||
tc.LogStdOut(f"Invalid IgnoreStandardPaths value: {a}")
|
||||
|
||||
#
|
||||
# check for any additional include paths defined by package config
|
||||
#
|
||||
if("AdditionalIncludePaths" in pkgconfig):
|
||||
package_relative_paths_to_spell_check.extend(
|
||||
pkgconfig["AdditionalIncludePaths"])
|
||||
|
||||
#
|
||||
# Make the path string for cspell to check
|
||||
#
|
||||
relpath = os.path.relpath(abs_pkg_path)
|
||||
cpsell_paths = " ".join(
|
||||
# Double quote each path to defer expansion to cspell parameters
|
||||
[f'"{relpath}/**/{x}"' for x in package_relative_paths_to_spell_check])
|
||||
|
||||
# Make the config file
|
||||
config_file_path = os.path.join(
|
||||
Edk2pathObj.WorkspacePath, "Build", packagename, "cspell_actual_config.json")
|
||||
mydir = os.path.dirname(os.path.abspath(__file__))
|
||||
# load as yaml so it can have comments
|
||||
base = os.path.join(mydir, "cspell.base.yaml")
|
||||
with open(base, "r") as i:
|
||||
config = yaml.safe_load(i)
|
||||
|
||||
if("ExtendWords" in pkgconfig):
|
||||
config["words"].extend(pkgconfig["ExtendWords"])
|
||||
with open(config_file_path, "w") as o:
|
||||
json.dump(config, o) # output as json so compat with cspell
|
||||
|
||||
All_Ignores = []
|
||||
# Parse the config for other ignores
|
||||
if "IgnoreFiles" in pkgconfig:
|
||||
All_Ignores.extend(pkgconfig["IgnoreFiles"])
|
||||
|
||||
# spell check all the files
|
||||
ignore = parse_gitignore_lines(All_Ignores, os.path.join(
|
||||
abs_pkg_path, "nofile.txt"), abs_pkg_path)
|
||||
|
||||
# result is a list of strings like this
|
||||
# C:\src\sp-edk2\edk2\FmpDevicePkg\FmpDevicePkg.dec:53:9 - Unknown word (Capule)
|
||||
EasyFix = []
|
||||
results = self._check_spelling(cpsell_paths, config_file_path)
|
||||
for r in results:
|
||||
path, _, word = r.partition(" - Unknown word ")
|
||||
if len(word) == 0:
|
||||
# didn't find pattern
|
||||
continue
|
||||
|
||||
pathinfo = path.rsplit(":", 2) # remove the line no info
|
||||
if(ignore(pathinfo[0])): # check against ignore list
|
||||
tc.LogStdOut(f"ignoring error due to ci.yaml ignore: {r}")
|
||||
continue
|
||||
|
||||
# real error
|
||||
EasyFix.append(word.strip().strip("()"))
|
||||
Errors.append(r)
|
||||
|
||||
# Log all errors tc StdError
|
||||
for l in Errors:
|
||||
tc.LogStdError(l.strip())
|
||||
|
||||
# Helper - Log the syntax needed to add these words to dictionary
|
||||
if len(EasyFix) > 0:
|
||||
EasyFix = sorted(set(a.lower() for a in EasyFix))
|
||||
tc.LogStdOut("\n Easy fix:")
|
||||
OneString = "If these are not errors add this to your ci.yaml file.\n"
|
||||
OneString += '"SpellCheck": {\n "ExtendWords": ['
|
||||
for a in EasyFix:
|
||||
tc.LogStdOut(f'\n"{a}",')
|
||||
OneString += f'\n "{a}",'
|
||||
logging.info(OneString.rstrip(",") + '\n ]\n}')
|
||||
|
||||
# add result to test case
|
||||
overall_status = len(Errors)
|
||||
if overall_status != 0:
|
||||
if "AuditOnly" in pkgconfig and pkgconfig["AuditOnly"]:
|
||||
# set as skipped if AuditOnly
|
||||
tc.SetSkipped()
|
||||
return -1
|
||||
else:
|
||||
tc.SetFailed("SpellCheck {0} Failed. Errors {1}".format(
|
||||
packagename, overall_status), "CHECK_FAILED")
|
||||
else:
|
||||
tc.SetSuccess()
|
||||
return overall_status
|
||||
|
||||
def _check_spelling(self, abs_file_to_check: str, abs_config_file_to_use: str) -> []:
|
||||
output = StringIO()
|
||||
ret = RunCmd(
|
||||
"cspell", f"--config {abs_config_file_to_use} {abs_file_to_check}", outstream=output)
|
||||
if ret == 0:
|
||||
return []
|
||||
else:
|
||||
return output.getvalue().strip().splitlines()
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check spelling
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Spell Check Test",
|
||||
"module": "SpellCheck"
|
||||
}
|
@ -1,296 +0,0 @@
|
||||
## @file
|
||||
# CSpell configuration
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"version": "0.1",
|
||||
"language": "en",
|
||||
"dictionaries": [
|
||||
"companies ",
|
||||
"softwareTerms",
|
||||
"python",
|
||||
"cpp"
|
||||
],
|
||||
"ignorePaths": [
|
||||
"*.pdb",
|
||||
"**/*_extdep/**",
|
||||
"*.pdf",
|
||||
"*.exe",
|
||||
"*.jpg"
|
||||
],
|
||||
"minWordLength": 5,
|
||||
"allowCompoundWords": true,
|
||||
"maxNumberOfProblems": 200,
|
||||
"maxDuplicateProblems": 200,
|
||||
"ignoreWords": [
|
||||
"muchange"
|
||||
],
|
||||
"words": [
|
||||
"MTRRs",
|
||||
"Microarchitecture",
|
||||
"Goldmont",
|
||||
"cpuid",
|
||||
"mwait",
|
||||
"cstate",
|
||||
"smram",
|
||||
"scrtm",
|
||||
"smbus",
|
||||
"selftest",
|
||||
"socket",
|
||||
"MMRAM",
|
||||
"qword",
|
||||
"ENDBR",
|
||||
"SMBASE",
|
||||
"FXSAVE",
|
||||
"FXRSTOR",
|
||||
"RDRAND",
|
||||
"IOAPIC",
|
||||
"ATAPI",
|
||||
"movsb",
|
||||
"iretw",
|
||||
"XENSTORE",
|
||||
"cdrom",
|
||||
"oprom",
|
||||
"oproms",
|
||||
"varstore",
|
||||
"EKU",
|
||||
"ascii",
|
||||
"nmake",
|
||||
"NVDIMM",
|
||||
"nasmb",
|
||||
"Mtftp",
|
||||
"Hypercall",
|
||||
"hypercalls",
|
||||
"IOMMU",
|
||||
"QEMU",
|
||||
"qemus",
|
||||
"OVMF",
|
||||
"tiano",
|
||||
"tianocore",
|
||||
"edkii",
|
||||
"coreboot",
|
||||
"uefipayload",
|
||||
"bootloader",
|
||||
"bootloaders",
|
||||
"mdepkg",
|
||||
"skuid",
|
||||
"dxefv",
|
||||
"toolchain",
|
||||
"libraryclass",
|
||||
"preboot",
|
||||
"pythonpath",
|
||||
"cygpath",
|
||||
"nuget",
|
||||
"basetools",
|
||||
"prepi",
|
||||
"OPTEE",
|
||||
"stringid",
|
||||
"peims",
|
||||
"memmap",
|
||||
"guids",
|
||||
"uuids",
|
||||
"smbios",
|
||||
"certdb",
|
||||
"certdbv",
|
||||
"EfiSigList",
|
||||
"depex",
|
||||
"IHANDLE",
|
||||
"Virtio",
|
||||
"Mbytes",
|
||||
"Citrix",
|
||||
"initrd",
|
||||
"semihost",
|
||||
"Semihosting",
|
||||
"Trustzone",
|
||||
"Fastboot",
|
||||
"framebuffer",
|
||||
"genfw",
|
||||
"TTYTERM",
|
||||
"miniport",
|
||||
"LFENCE",
|
||||
"PCANSI",
|
||||
"submodule",
|
||||
"submodules",
|
||||
"brotli",
|
||||
"PCCTS",
|
||||
"softfloat",
|
||||
"whitepaper",
|
||||
"ACPICA",
|
||||
"plugfest",
|
||||
"bringup",
|
||||
"formset", #VFR
|
||||
"ideqvallist",
|
||||
"numberof",
|
||||
"oneof",
|
||||
"endformset",
|
||||
"endnumeric",
|
||||
"endoneof",
|
||||
"disableif",
|
||||
"guidid",
|
||||
"classguid",
|
||||
"efivarstore",
|
||||
"formsetguid",
|
||||
"formid",
|
||||
"suppressif",
|
||||
"grayoutif",
|
||||
"ideqval",
|
||||
"endform",
|
||||
"endcheckbox",
|
||||
"questionid",
|
||||
"questionref",
|
||||
"enddate",
|
||||
"endstring",
|
||||
"guidop",
|
||||
"endguidop",
|
||||
"langdef",
|
||||
"dynamicex",
|
||||
"tokenspace",
|
||||
"tokenguid",
|
||||
"pcd's", #seems like cspell bug
|
||||
"peim's",
|
||||
"autogen",
|
||||
"Disasm",
|
||||
"Torito",
|
||||
"SRIOV",
|
||||
"MRIOV",
|
||||
"UARTs",
|
||||
"Consplitter", # common module in UEFI
|
||||
"FIFOs",
|
||||
"ACPINVS",
|
||||
"Endof", # due to of not being uppercase
|
||||
"bootability",
|
||||
"Sdhci",
|
||||
"inmodule",
|
||||
"RISCV",
|
||||
"edksetup",
|
||||
"iscsi",
|
||||
"nvdata",
|
||||
"pytools",
|
||||
"NTDDI",
|
||||
"Wnonportable",
|
||||
"CLANGPDB",
|
||||
"nologo",
|
||||
"lldmap",
|
||||
"ASMLINK",
|
||||
"NODEFAULTLIB",
|
||||
"vcruntimed",
|
||||
"ucrtd",
|
||||
"msvcrtd",
|
||||
"XIPFLAGS",
|
||||
"bootflow",
|
||||
"bootup",
|
||||
"cacheability",
|
||||
"cachetype",
|
||||
"conout",
|
||||
"deadloop",
|
||||
"devicepath",
|
||||
"hisilicon",
|
||||
"littleendian",
|
||||
"nonsecure",
|
||||
"pagetable",
|
||||
"postmem",
|
||||
"premem",
|
||||
"reglist",
|
||||
"semihalf",
|
||||
"subvendor",
|
||||
"subhierarchy",
|
||||
"targetlist",
|
||||
"tmpname",
|
||||
"watchdogtimer",
|
||||
"writeback",
|
||||
"langcode",
|
||||
"langcodes",
|
||||
"autoreload",
|
||||
"bootable",
|
||||
"endiannness",
|
||||
"fvmain",
|
||||
"prefetchable",
|
||||
"multiboot",
|
||||
"ramdisk",
|
||||
"unbootable",
|
||||
"setjump",
|
||||
"bytecodes",
|
||||
"bytelist",
|
||||
"bytestream",
|
||||
"countof",
|
||||
"deregistering",
|
||||
"devicetree",
|
||||
"mainpage",
|
||||
"mismanipulation",
|
||||
"pytool",
|
||||
"wbinvd",
|
||||
"armltd",
|
||||
"datacache",
|
||||
"lastattemptstatus",
|
||||
"lastattemptversion",
|
||||
"lowestsupportedversion",
|
||||
"updateable",
|
||||
"pecoff",
|
||||
"autodetect",
|
||||
"harddisk",
|
||||
"toctou",
|
||||
"bugbug",
|
||||
"depexes",
|
||||
"fwvol",
|
||||
"hoblist",
|
||||
"imagehandle",
|
||||
"schedulable",
|
||||
"StandaloneMMCore",
|
||||
"systemtable",
|
||||
"uncacheable",
|
||||
"devpath",
|
||||
"testsuites",
|
||||
"testcase",
|
||||
"pxmldoc",
|
||||
"pcxml",
|
||||
"pclutf",
|
||||
"pcunicode",
|
||||
"ntxmltransformcharacter",
|
||||
"ntxmlcomparestrings",
|
||||
"pcxmldoc",
|
||||
"ntxmlfetchcharacterdecoder",
|
||||
"ntxml",
|
||||
"ntxmlspecialstringcompare",
|
||||
"rtlxmlcallback",
|
||||
"xmlef",
|
||||
"osruntime",
|
||||
"readytoboot",
|
||||
"hwerrrec",
|
||||
"xformed",
|
||||
"xform",
|
||||
"undock",
|
||||
"qrencoder",
|
||||
"selawik",
|
||||
"ntxmlrawnextcharacter",
|
||||
"undocked",
|
||||
"reprompt",
|
||||
"yesno",
|
||||
"okcancel",
|
||||
"qrencoding",
|
||||
"qrlevel",
|
||||
"shiftn",
|
||||
"unenroll",
|
||||
"pcxmlstructure",
|
||||
"pxmlstructure",
|
||||
"pcencoder",
|
||||
"pcvoid",
|
||||
"nofailure",
|
||||
"blockio",
|
||||
"lockv",
|
||||
"uefishelldebug",
|
||||
"mtrrcap",
|
||||
"drhds",
|
||||
"rmrrs",
|
||||
"creatorid",
|
||||
"dxeipl",
|
||||
"swmdialogs",
|
||||
"unrecovered",
|
||||
"cmocka",
|
||||
"unenrolling",
|
||||
"unconfigure",
|
||||
"Loongson",
|
||||
"LOONGARCH"
|
||||
]
|
||||
}
|
@ -1,127 +0,0 @@
|
||||
# UncrustifyCheck Plugin
|
||||
|
||||
This CiBuildPlugin scans all the files in a given package and checks for coding standard compliance issues.
|
||||
|
||||
This plugin is enabled by default. If a package would like to prevent the plugin from reporting errors, it can do
|
||||
so by enabling [`AuditOnly`](#auditonly) mode.
|
||||
|
||||
This plugin requires the directory containing the Uncrustify executable that should be used for this plugin to
|
||||
be specified in an environment variable named `UNCRUSTIFY_CI_PATH`. This unique variable name is used to avoid confusion
|
||||
with other paths to Uncrustify which might not be the expected build for use by this plugin.
|
||||
|
||||
By default, an Uncrustify configuration file named "uncrustify.cfg" located in the same directory as the plugin is
|
||||
used. The value can be overridden to a package-specific path with the `ConfigFilePath` configuration file option.
|
||||
|
||||
* Uncrustify source code and documentation: https://github.com/uncrustify/uncrustify
|
||||
* Project Mu Uncrustify fork source code and documentation: https://dev.azure.com/projectmu/Uncrustify
|
||||
|
||||
## Files Checked in a Package
|
||||
|
||||
By default, this plugin will discover all files in the package with the following default paths:
|
||||
|
||||
```python
|
||||
[
|
||||
# C source
|
||||
"*.c",
|
||||
"*.h"
|
||||
]
|
||||
```
|
||||
|
||||
From this list of files, any files ignored by Git or residing in a Git submodule will be removed. If Git is not
|
||||
found, submodules are not found, or ignored files are not found no changes are made to the list of discovered files.
|
||||
|
||||
To control the paths checked in a given package, review the configuration options described in this file.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin can be configured with a few optional configuration options.
|
||||
|
||||
``` yaml
|
||||
"UncrustifyCheck": {
|
||||
"AdditionalIncludePaths": [], # Additional paths to check formatting (wildcards supported).
|
||||
"AuditOnly": False, # Don't fail the build if there are errors. Just log them.
|
||||
"ConfigFilePath": "", # Custom path to an Uncrustify config file.
|
||||
"IgnoreFiles": [], # A list of file patterns to ignore.
|
||||
"IgnoreStandardPaths": [], # Standard Plugin defined paths that should be ignored.
|
||||
"OutputFileDiffs": True, # Output chunks of formatting diffs in the test case log.
|
||||
# This can significantly slow down the plugin on very large packages.
|
||||
"SkipGitExclusions": False # Don't exclude git ignored files and files in git submodules.
|
||||
}
|
||||
```
|
||||
|
||||
### `AdditionalIncludePaths`
|
||||
|
||||
A package configuration file can specify any additional paths to be included with this option.
|
||||
|
||||
At this time, it is recommended all files run against the plugin be written in the C or C++ language.
|
||||
|
||||
### `AuditOnly`
|
||||
|
||||
`Boolean` - Default is `False`.
|
||||
|
||||
If `True`, run the test in an "audit only mode" which will log all errors but instead of failing the build, it will set
|
||||
the test as skipped. This allows visibility into the failures without breaking the build.
|
||||
|
||||
### `ConfigFilePath`
|
||||
|
||||
`String` - Default is `"uncrustify.cfg"`
|
||||
|
||||
When specified in the config file, this is a package relative path to the Uncrustify configuration file.
|
||||
|
||||
### `IgnoreFiles`
|
||||
|
||||
This option supports .gitignore file and folder matching strings including wildcards.
|
||||
|
||||
The files specified by this configuration option will not be processed by Uncrustify.
|
||||
|
||||
### `IgnoreStandardPaths`
|
||||
|
||||
This plugin by default will check the below standard paths. A package configuration file can specify any of these paths
|
||||
to be ignored.
|
||||
|
||||
```python
|
||||
[
|
||||
# C source
|
||||
"*.c",
|
||||
"*.h"
|
||||
]
|
||||
```
|
||||
|
||||
### `OutputFileDiffs`
|
||||
|
||||
`Boolean` - Default is `True`.
|
||||
|
||||
If `True`, output diffs of formatting changes into the test case log. This is helpful to exactly understand what changes
|
||||
need to be made to the source code in order to fix a coding standard compliance issue.
|
||||
|
||||
Note that calculating the file diffs on a very large set of of results (e.g. >100 files) can significantly slow down
|
||||
plugin execution.
|
||||
|
||||
### `SkipGitExclusions`
|
||||
|
||||
`Boolean` - Default is `False`.
|
||||
|
||||
By default, files in paths matched in a .gitignore file or a recognized git submodule are excluded. If this option
|
||||
is `True`, the plugin will not attempt to recognize these files and exclude them.
|
||||
|
||||
## High-Level Plugin Operation
|
||||
|
||||
This plugin generates two main sets of temporary files:
|
||||
|
||||
1. A working directory in the directory `Build/.pytool/Plugin/Uncrustify`
|
||||
2. For each source file with formatting errors, a sibling file with the `.uncrustify_plugin` extension
|
||||
|
||||
The working directory contains temporary files unique to operation of the plugin. All of these files are removed on
|
||||
exit of the plugin including successful or unsuccessful execution (such as a Python exception occurring). If for any
|
||||
reason, any files in the package exist prior to running the plugin with the `.uncrustify_plugin` extension, the plugin
|
||||
will inform the user to remove these files and exit before running Uncrustify. This is to ensure the accuracy of the
|
||||
results reported from each execution instance of the plugin.
|
||||
|
||||
The plugin determines the list of relevant files to check with Uncrustify and then invokes Uncrustify with that file
|
||||
list. For any files not compliant to the configuration file provided, Uncrustify will generate a corresponding file
|
||||
with the `.uncrustify_plugin` extension. The plugin discovers all of these files. If any such files are present, this
|
||||
indicates a formatting issue was found and the test is marked failed (unless `AuditOnly` mode is enabled).
|
||||
|
||||
The test case log will contain a report of which files failed to format properly, allowing the user to run Uncrustify
|
||||
against the file locally to fix the issue. If the `OutputFileDiffs` configuration option is set to `True`, the plugin
|
||||
will output diff chunks for all code formatting issues in the test case log.
|
@ -1,661 +0,0 @@
|
||||
# @file UncrustifyCheck.py
|
||||
#
|
||||
# An edk2-pytool based plugin wrapper for Uncrustify
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
import configparser
|
||||
import difflib
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import stat
|
||||
import timeit
|
||||
from edk2toolext.environment import version_aggregator
|
||||
from edk2toolext.environment.plugin_manager import PluginManager
|
||||
from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin
|
||||
from edk2toolext.environment.plugintypes.uefi_helper_plugin import HelperFunctions
|
||||
from edk2toolext.environment.var_dict import VarDict
|
||||
from edk2toollib.gitignore_parser import parse_gitignore_lines
|
||||
from edk2toollib.log.junit_report_format import JunitReportTestCase
|
||||
from edk2toollib.uefi.edk2.path_utilities import Edk2Path
|
||||
from edk2toollib.utility_functions import RunCmd
|
||||
from io import StringIO
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
#
|
||||
# Provide more user friendly messages for certain scenarios
|
||||
#
|
||||
class UncrustifyException(Exception):
|
||||
def __init__(self, message, exit_code):
|
||||
super().__init__(message)
|
||||
self.exit_code = exit_code
|
||||
|
||||
|
||||
class UncrustifyAppEnvVarNotFoundException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -101)
|
||||
|
||||
|
||||
class UncrustifyAppVersionErrorException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -102)
|
||||
|
||||
|
||||
class UncrustifyAppExecutionException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -103)
|
||||
|
||||
|
||||
class UncrustifyStalePluginFormattedFilesException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -120)
|
||||
|
||||
|
||||
class UncrustifyInputFileCreationErrorException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -121)
|
||||
|
||||
class UncrustifyInvalidIgnoreStandardPathsException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -122)
|
||||
|
||||
class UncrustifyGitIgnoreFileException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -140)
|
||||
|
||||
|
||||
class UncrustifyGitSubmoduleException(UncrustifyException):
|
||||
def __init__(self, message):
|
||||
super().__init__(message, -141)
|
||||
|
||||
|
||||
class UncrustifyCheck(ICiBuildPlugin):
|
||||
"""
|
||||
A CiBuildPlugin that uses Uncrustify to check the source files in the
|
||||
package being tested for coding standard issues.
|
||||
|
||||
By default, the plugin runs against standard C source file extensions but
|
||||
its configuration can be modified through its configuration file.
|
||||
|
||||
Configuration options:
|
||||
"UncrustifyCheck": {
|
||||
"AdditionalIncludePaths": [], # Additional paths to check formatting (wildcards supported).
|
||||
"AuditOnly": False, # Don't fail the build if there are errors. Just log them.
|
||||
"ConfigFilePath": "", # Custom path to an Uncrustify config file.
|
||||
"IgnoreStandardPaths": [], # Standard Plugin defined paths that should be ignored.
|
||||
"OutputFileDiffs": False, # Output chunks of formatting diffs in the test case log.
|
||||
# This can significantly slow down the plugin on very large packages.
|
||||
"SkipGitExclusions": False # Don't exclude git ignored files and files in git submodules.
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# By default, use an "uncrustify.cfg" config file in the plugin directory
|
||||
# A package can override this path via "ConfigFilePath"
|
||||
#
|
||||
# Note: Values specified via "ConfigFilePath" are relative to the package
|
||||
#
|
||||
DEFAULT_CONFIG_FILE_PATH = os.path.join(
|
||||
pathlib.Path(__file__).parent.resolve(), "uncrustify.cfg")
|
||||
|
||||
#
|
||||
# The extension used for formatted files produced by this plugin
|
||||
#
|
||||
FORMATTED_FILE_EXTENSION = ".uncrustify_plugin"
|
||||
|
||||
#
|
||||
# A package can add any additional paths with "AdditionalIncludePaths"
|
||||
# A package can remove any of these paths with "IgnoreStandardPaths"
|
||||
#
|
||||
STANDARD_PLUGIN_DEFINED_PATHS = ("*.c", "*.h", "*.cpp")
|
||||
|
||||
#
|
||||
# The Uncrustify application path should set in this environment variable
|
||||
#
|
||||
UNCRUSTIFY_PATH_ENV_KEY = "UNCRUSTIFY_CI_PATH"
|
||||
|
||||
def GetTestName(self, packagename: str, environment: VarDict) -> Tuple:
|
||||
""" Provide the testcase name and classname for use in reporting
|
||||
|
||||
Args:
|
||||
packagename: string containing name of package to build
|
||||
environment: The VarDict for the test to run in
|
||||
Returns:
|
||||
A tuple containing the testcase name and the classname
|
||||
(testcasename, classname)
|
||||
testclassname: a descriptive string for the testcase can include whitespace
|
||||
classname: should be patterned <packagename>.<plugin>.<optionally any unique condition>
|
||||
"""
|
||||
return ("Check file coding standard compliance in " + packagename, packagename + ".UncrustifyCheck")
|
||||
|
||||
def RunBuildPlugin(self, package_rel_path: str, edk2_path: Edk2Path, package_config: Dict[str, List[str]], environment_config: Any, plugin_manager: PluginManager, plugin_manager_helper: HelperFunctions, tc: JunitReportTestCase, output_stream=None) -> int:
|
||||
"""
|
||||
External function of plugin. This function is used to perform the task of the CiBuild Plugin.
|
||||
|
||||
Args:
|
||||
- package_rel_path: edk2 workspace relative path to the package
|
||||
- edk2_path: Edk2Path object with workspace and packages paths
|
||||
- package_config: Dictionary with the package configuration
|
||||
- environment_config: Environment configuration
|
||||
- plugin_manager: Plugin Manager Instance
|
||||
- plugin_manager_helper: Plugin Manager Helper Instance
|
||||
- tc: JUnit test case
|
||||
- output_stream: The StringIO output stream from this plugin (logging)
|
||||
|
||||
Returns
|
||||
>0 : Number of errors found
|
||||
0 : Passed successfully
|
||||
-1 : Skipped for missing prereq
|
||||
"""
|
||||
try:
|
||||
# Initialize plugin and check pre-requisites.
|
||||
self._initialize_environment_info(
|
||||
package_rel_path, edk2_path, package_config, tc)
|
||||
self._initialize_configuration()
|
||||
self._check_for_preexisting_formatted_files()
|
||||
|
||||
# Log important context information.
|
||||
self._log_uncrustify_app_info()
|
||||
|
||||
# Get template file contents if specified
|
||||
self._get_template_file_contents()
|
||||
|
||||
# Create meta input files & directories
|
||||
self._create_temp_working_directory()
|
||||
self._create_uncrustify_file_list_file()
|
||||
|
||||
self._run_uncrustify()
|
||||
|
||||
# Post-execution actions.
|
||||
self._process_uncrustify_results()
|
||||
|
||||
except UncrustifyException as e:
|
||||
self._tc.LogStdError(
|
||||
f"Uncrustify error {e.exit_code}. Details:\n\n{str(e)}")
|
||||
logging.warning(
|
||||
f"Uncrustify error {e.exit_code}. Details:\n\n{str(e)}")
|
||||
return -1
|
||||
else:
|
||||
if self._formatted_file_error_count > 0:
|
||||
if self._audit_only_mode:
|
||||
logging.info(
|
||||
"Setting test as skipped since AuditOnly is enabled")
|
||||
self._tc.SetSkipped()
|
||||
return -1
|
||||
else:
|
||||
self._tc.SetFailed(
|
||||
f"{self._plugin_name} failed due to {self._formatted_file_error_count} incorrectly formatted files.", "CHECK_FAILED")
|
||||
else:
|
||||
self._tc.SetSuccess()
|
||||
return self._formatted_file_error_count
|
||||
finally:
|
||||
self._cleanup_temporary_formatted_files()
|
||||
self._cleanup_temporary_directory()
|
||||
|
||||
def _initialize_configuration(self) -> None:
|
||||
"""
|
||||
Initializes plugin configuration.
|
||||
"""
|
||||
self._initialize_app_info()
|
||||
self._initialize_config_file_info()
|
||||
self._initialize_file_to_format_info()
|
||||
self._initialize_test_case_output_options()
|
||||
|
||||
def _check_for_preexisting_formatted_files(self) -> None:
|
||||
"""
|
||||
Checks if any formatted files from prior execution are present.
|
||||
|
||||
Existence of such files is an unexpected condition. This might result
|
||||
from an error that occurred during a previous run or a premature exit from a debug scenario. In any case, the package should be clean before starting a new run.
|
||||
"""
|
||||
pre_existing_formatted_file_count = len(
|
||||
[str(path.resolve()) for path in pathlib.Path(self._abs_package_path).rglob(f'*{UncrustifyCheck.FORMATTED_FILE_EXTENSION}')])
|
||||
|
||||
if pre_existing_formatted_file_count > 0:
|
||||
raise UncrustifyStalePluginFormattedFilesException(
|
||||
f"{pre_existing_formatted_file_count} formatted files already exist. To prevent overwriting these files, please remove them before running this plugin.")
|
||||
|
||||
def _cleanup_temporary_directory(self) -> None:
|
||||
"""
|
||||
Cleans up the temporary directory used for this execution instance.
|
||||
|
||||
This removes the directory and all files created during this instance.
|
||||
"""
|
||||
if hasattr(self, '_working_dir'):
|
||||
self._remove_tree(self._working_dir)
|
||||
|
||||
def _cleanup_temporary_formatted_files(self) -> None:
|
||||
"""
|
||||
Cleans up the temporary formmatted files produced by Uncrustify.
|
||||
|
||||
This will recursively remove all formatted files generated by Uncrustify
|
||||
during this execution instance.
|
||||
"""
|
||||
if hasattr(self, '_abs_package_path'):
|
||||
formatted_files = [str(path.resolve()) for path in pathlib.Path(
|
||||
self._abs_package_path).rglob(f'*{UncrustifyCheck.FORMATTED_FILE_EXTENSION}')]
|
||||
|
||||
for formatted_file in formatted_files:
|
||||
os.remove(formatted_file)
|
||||
|
||||
def _create_temp_working_directory(self) -> None:
|
||||
"""
|
||||
Creates the temporary directory used for this execution instance.
|
||||
"""
|
||||
self._working_dir = os.path.join(
|
||||
self._abs_workspace_path, "Build", ".pytool", "Plugin", f"{self._plugin_name}")
|
||||
|
||||
try:
|
||||
pathlib.Path(self._working_dir).mkdir(parents=True, exist_ok=True)
|
||||
except OSError as e:
|
||||
raise UncrustifyInputFileCreationErrorException(
|
||||
f"Error creating plugin directory {self._working_dir}.\n\n{repr(e)}.")
|
||||
|
||||
def _create_uncrustify_file_list_file(self) -> None:
|
||||
"""
|
||||
Creates the file with the list of source files for Uncrustify to process.
|
||||
"""
|
||||
self._app_input_file_path = os.path.join(
|
||||
self._working_dir, "uncrustify_file_list.txt")
|
||||
|
||||
with open(self._app_input_file_path, 'w', encoding='utf8') as f:
|
||||
f.writelines(f"\n".join(self._abs_file_paths_to_format))
|
||||
|
||||
def _execute_uncrustify(self) -> None:
|
||||
"""
|
||||
Executes Uncrustify with the initialized configuration.
|
||||
"""
|
||||
output = StringIO()
|
||||
self._app_exit_code = RunCmd(
|
||||
self._app_path,
|
||||
f"-c {self._app_config_file} -F {self._app_input_file_path} --if-changed --suffix {UncrustifyCheck.FORMATTED_FILE_EXTENSION}", outstream=output)
|
||||
self._app_output = output.getvalue().strip().splitlines()
|
||||
|
||||
def _get_files_ignored_in_config(self):
|
||||
""""
|
||||
Returns a function that returns true if a given file string path is ignored in the plugin configuration file and false otherwise.
|
||||
"""
|
||||
ignored_files = []
|
||||
if "IgnoreFiles" in self._package_config:
|
||||
ignored_files = self._package_config["IgnoreFiles"]
|
||||
|
||||
# Pass "Package configuration file" as the source file path since
|
||||
# the actual configuration file name is unknown to this plugin and
|
||||
# this provides a generic description of the file that provided
|
||||
# the ignore file content.
|
||||
#
|
||||
# This information is only used for reporting (not used here) and
|
||||
# the ignore lines are being passed directly as they are given to
|
||||
# this plugin.
|
||||
return parse_gitignore_lines(ignored_files, "Package configuration file", self._abs_package_path)
|
||||
|
||||
def _get_git_ignored_paths(self) -> List[str]:
|
||||
""""
|
||||
Returns a list of file absolute path strings to all files ignored in this git repository.
|
||||
|
||||
If git is not found, an empty list will be returned.
|
||||
"""
|
||||
if not shutil.which("git"):
|
||||
logging.warning(
|
||||
"Git is not found on this system. Git submodule paths will not be considered.")
|
||||
return []
|
||||
|
||||
outstream_buffer = StringIO()
|
||||
exit_code = RunCmd("git", "ls-files --other",
|
||||
workingdir=self._abs_workspace_path, outstream=outstream_buffer, logging_level=logging.NOTSET)
|
||||
if (exit_code != 0):
|
||||
raise UncrustifyGitIgnoreFileException(
|
||||
f"An error occurred reading git ignore settings. This will prevent Uncrustify from running against the expected set of files.")
|
||||
|
||||
# Note: This will potentially be a large list, but at least sorted
|
||||
rel_paths = outstream_buffer.getvalue().strip().splitlines()
|
||||
abs_paths = []
|
||||
for path in rel_paths:
|
||||
abs_paths.append(
|
||||
os.path.normpath(os.path.join(self._abs_workspace_path, path)))
|
||||
return abs_paths
|
||||
|
||||
def _get_git_submodule_paths(self) -> List[str]:
|
||||
"""
|
||||
Returns a list of directory absolute path strings to the root of each submodule in the workspace repository.
|
||||
|
||||
If git is not found, an empty list will be returned.
|
||||
"""
|
||||
if not shutil.which("git"):
|
||||
logging.warning(
|
||||
"Git is not found on this system. Git submodule paths will not be considered.")
|
||||
return []
|
||||
|
||||
if os.path.isfile(os.path.join(self._abs_workspace_path, ".gitmodules")):
|
||||
logging.info(
|
||||
f".gitmodules file found. Excluding submodules in {self._package_name}.")
|
||||
|
||||
outstream_buffer = StringIO()
|
||||
exit_code = RunCmd("git", "config --file .gitmodules --get-regexp path", workingdir=self._abs_workspace_path, outstream=outstream_buffer, logging_level=logging.NOTSET)
|
||||
if (exit_code != 0):
|
||||
raise UncrustifyGitSubmoduleException(
|
||||
f".gitmodule file detected but an error occurred reading the file. Cannot proceed with unknown submodule paths.")
|
||||
|
||||
submodule_paths = []
|
||||
for line in outstream_buffer.getvalue().strip().splitlines():
|
||||
submodule_paths.append(
|
||||
os.path.normpath(os.path.join(self._abs_workspace_path, line.split()[1])))
|
||||
|
||||
return submodule_paths
|
||||
else:
|
||||
return []
|
||||
|
||||
def _get_template_file_contents(self) -> None:
|
||||
"""
|
||||
Gets the contents of Uncrustify template files if they are specified
|
||||
in the Uncrustify configuration file.
|
||||
"""
|
||||
|
||||
self._file_template_contents = None
|
||||
self._func_template_contents = None
|
||||
|
||||
# Allow no value to allow "set" statements in the config file which do
|
||||
# not specify value assignment
|
||||
parser = configparser.ConfigParser(allow_no_value=True)
|
||||
with open(self._app_config_file, 'r') as cf:
|
||||
parser.read_string("[dummy_section]\n" + cf.read())
|
||||
|
||||
try:
|
||||
file_template_name = parser["dummy_section"]["cmt_insert_file_header"]
|
||||
|
||||
file_template_path = pathlib.Path(file_template_name)
|
||||
|
||||
if not file_template_path.is_file():
|
||||
file_template_path = pathlib.Path(os.path.join(self._plugin_path, file_template_name))
|
||||
self._file_template_contents = file_template_path.read_text()
|
||||
except KeyError:
|
||||
logging.warning("A file header template is not specified in the config file.")
|
||||
except FileNotFoundError:
|
||||
logging.warning("The specified file header template file was not found.")
|
||||
try:
|
||||
func_template_name = parser["dummy_section"]["cmt_insert_func_header"]
|
||||
|
||||
func_template_path = pathlib.Path(func_template_name)
|
||||
|
||||
if not func_template_path.is_file():
|
||||
func_template_path = pathlib.Path(os.path.join(self._plugin_path, func_template_name))
|
||||
self._func_template_contents = func_template_path.read_text()
|
||||
except KeyError:
|
||||
logging.warning("A function header template is not specified in the config file.")
|
||||
except FileNotFoundError:
|
||||
logging.warning("The specified function header template file was not found.")
|
||||
|
||||
def _initialize_app_info(self) -> None:
|
||||
"""
|
||||
Initialize Uncrustify application information.
|
||||
|
||||
This function will determine the application path and version.
|
||||
"""
|
||||
# Verify Uncrustify is specified in the environment.
|
||||
if UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY not in os.environ:
|
||||
raise UncrustifyAppEnvVarNotFoundException(
|
||||
f"Uncrustify environment variable {UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY} is not present.")
|
||||
|
||||
self._app_path = shutil.which('uncrustify', path=os.environ[UncrustifyCheck.UNCRUSTIFY_PATH_ENV_KEY])
|
||||
|
||||
if self._app_path is None:
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT, os.strerror(errno.ENOENT), self._app_path)
|
||||
|
||||
self._app_path = os.path.normcase(os.path.normpath(self._app_path))
|
||||
|
||||
if not os.path.isfile(self._app_path):
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT, os.strerror(errno.ENOENT), self._app_path)
|
||||
|
||||
# Verify Uncrustify is present at the expected path.
|
||||
return_buffer = StringIO()
|
||||
ret = RunCmd(self._app_path, "--version", outstream=return_buffer)
|
||||
if (ret != 0):
|
||||
raise UncrustifyAppVersionErrorException(
|
||||
f"Error occurred executing --version: {ret}.")
|
||||
|
||||
# Log Uncrustify version information.
|
||||
self._app_version = return_buffer.getvalue().strip()
|
||||
self._tc.LogStdOut(f"Uncrustify version: {self._app_version}")
|
||||
version_aggregator.GetVersionAggregator().ReportVersion(
|
||||
"Uncrustify", self._app_version, version_aggregator.VersionTypes.INFO)
|
||||
|
||||
def _initialize_config_file_info(self) -> None:
|
||||
"""
|
||||
Initialize Uncrustify configuration file info.
|
||||
|
||||
The config file path is relative to the package root.
|
||||
"""
|
||||
self._app_config_file = UncrustifyCheck.DEFAULT_CONFIG_FILE_PATH
|
||||
if "ConfigFilePath" in self._package_config:
|
||||
self._app_config_file = self._package_config["ConfigFilePath"].strip()
|
||||
|
||||
self._app_config_file = os.path.normpath(
|
||||
os.path.join(self._abs_package_path, self._app_config_file))
|
||||
|
||||
if not os.path.isfile(self._app_config_file):
|
||||
raise FileNotFoundError(
|
||||
errno.ENOENT, os.strerror(errno.ENOENT), self._app_config_file)
|
||||
|
||||
def _initialize_environment_info(self, package_rel_path: str, edk2_path: Edk2Path, package_config: Dict[str, List[str]], tc: JunitReportTestCase) -> None:
|
||||
"""
|
||||
Initializes plugin environment information.
|
||||
"""
|
||||
self._abs_package_path = edk2_path.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
|
||||
package_rel_path)
|
||||
self._abs_workspace_path = edk2_path.WorkspacePath
|
||||
self._package_config = package_config
|
||||
self._package_name = os.path.basename(
|
||||
os.path.normpath(package_rel_path))
|
||||
self._plugin_name = self.__class__.__name__
|
||||
self._plugin_path = os.path.dirname(os.path.realpath(__file__))
|
||||
self._rel_package_path = package_rel_path
|
||||
self._tc = tc
|
||||
|
||||
def _initialize_file_to_format_info(self) -> None:
|
||||
"""
|
||||
Forms the list of source files for Uncrustify to process.
|
||||
"""
|
||||
# Create a list of all the package relative file paths in the package to run against Uncrustify.
|
||||
rel_file_paths_to_format = list(
|
||||
UncrustifyCheck.STANDARD_PLUGIN_DEFINED_PATHS)
|
||||
|
||||
# Allow the ci.yaml to remove any of the pre-defined standard paths
|
||||
if "IgnoreStandardPaths" in self._package_config:
|
||||
for a in self._package_config["IgnoreStandardPaths"]:
|
||||
if a.strip() in rel_file_paths_to_format:
|
||||
self._tc.LogStdOut(
|
||||
f"Ignoring standard path due to ci.yaml ignore: {a}")
|
||||
rel_file_paths_to_format.remove(a.strip())
|
||||
else:
|
||||
raise UncrustifyInvalidIgnoreStandardPathsException(f"Invalid IgnoreStandardPaths value: {a}")
|
||||
|
||||
# Allow the ci.yaml to specify additional include paths for this package
|
||||
if "AdditionalIncludePaths" in self._package_config:
|
||||
rel_file_paths_to_format.extend(
|
||||
self._package_config["AdditionalIncludePaths"])
|
||||
|
||||
self._abs_file_paths_to_format = []
|
||||
for path in rel_file_paths_to_format:
|
||||
self._abs_file_paths_to_format.extend(
|
||||
[str(path.resolve()) for path in pathlib.Path(self._abs_package_path).rglob(path)])
|
||||
|
||||
# Remove files ignore in the plugin configuration file
|
||||
plugin_ignored_files = list(filter(self._get_files_ignored_in_config(), self._abs_file_paths_to_format))
|
||||
|
||||
if plugin_ignored_files:
|
||||
logging.info(
|
||||
f"{self._package_name} file count before plugin ignore file exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
for path in plugin_ignored_files:
|
||||
if path in self._abs_file_paths_to_format:
|
||||
logging.info(f" File ignored in plugin config file: {path}")
|
||||
self._abs_file_paths_to_format.remove(path)
|
||||
logging.info(
|
||||
f"{self._package_name} file count after plugin ignore file exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
|
||||
if not "SkipGitExclusions" in self._package_config or not self._package_config["SkipGitExclusions"]:
|
||||
# Remove files ignored by git
|
||||
logging.info(
|
||||
f"{self._package_name} file count before git ignore file exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
|
||||
ignored_paths = self._get_git_ignored_paths()
|
||||
self._abs_file_paths_to_format = list(
|
||||
set(self._abs_file_paths_to_format).difference(ignored_paths))
|
||||
|
||||
logging.info(
|
||||
f"{self._package_name} file count after git ignore file exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
|
||||
# Remove files in submodules
|
||||
logging.info(
|
||||
f"{self._package_name} file count before submodule exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
|
||||
submodule_paths = tuple(self._get_git_submodule_paths())
|
||||
for path in submodule_paths:
|
||||
logging.info(f" submodule path: {path}")
|
||||
|
||||
self._abs_file_paths_to_format = [
|
||||
f for f in self._abs_file_paths_to_format if not f.startswith(submodule_paths)]
|
||||
|
||||
logging.info(
|
||||
f"{self._package_name} file count after submodule exclusion: {len(self._abs_file_paths_to_format)}")
|
||||
|
||||
# Sort the files for more consistent results
|
||||
self._abs_file_paths_to_format.sort()
|
||||
|
||||
def _initialize_test_case_output_options(self) -> None:
|
||||
"""
|
||||
Initializes options that influence test case output.
|
||||
"""
|
||||
self._audit_only_mode = False
|
||||
self._output_file_diffs = True
|
||||
|
||||
if "AuditOnly" in self._package_config and self._package_config["AuditOnly"]:
|
||||
self._audit_only_mode = True
|
||||
|
||||
if "OutputFileDiffs" in self._package_config and not self._package_config["OutputFileDiffs"]:
|
||||
self._output_file_diffs = False
|
||||
|
||||
def _log_uncrustify_app_info(self) -> None:
|
||||
"""
|
||||
Logs Uncrustify application information.
|
||||
"""
|
||||
self._tc.LogStdOut(f"Found Uncrustify at {self._app_path}")
|
||||
self._tc.LogStdOut(f"Uncrustify version: {self._app_version}")
|
||||
self._tc.LogStdOut('\n')
|
||||
logging.info(f"Found Uncrustify at {self._app_path}")
|
||||
logging.info(f"Uncrustify version: {self._app_version}")
|
||||
logging.info('\n')
|
||||
|
||||
def _process_uncrustify_results(self) -> None:
|
||||
"""
|
||||
Process the results from Uncrustify.
|
||||
|
||||
Determines whether formatting errors are present and logs failures.
|
||||
"""
|
||||
formatted_files = [str(path.resolve()) for path in pathlib.Path(
|
||||
self._abs_package_path).rglob(f'*{UncrustifyCheck.FORMATTED_FILE_EXTENSION}')]
|
||||
|
||||
self._formatted_file_error_count = len(formatted_files)
|
||||
|
||||
if self._formatted_file_error_count > 0:
|
||||
logging.error(
|
||||
"Visit the following instructions to learn "
|
||||
"how to find the detailed formatting errors in Azure "
|
||||
"DevOps CI: "
|
||||
"https://github.com/tianocore/tianocore.github.io/wiki/EDK-II-Code-Formatting#how-to-find-uncrustify-formatting-errors-in-continuous-integration-ci")
|
||||
self._tc.LogStdError("Files with formatting errors:\n")
|
||||
|
||||
if self._output_file_diffs:
|
||||
logging.info("Calculating file diffs. This might take a while...")
|
||||
|
||||
for formatted_file in formatted_files:
|
||||
pre_formatted_file = formatted_file[:-
|
||||
len(UncrustifyCheck.FORMATTED_FILE_EXTENSION)]
|
||||
logging.error(pre_formatted_file)
|
||||
|
||||
if (self._output_file_diffs or
|
||||
self._file_template_contents is not None or
|
||||
self._func_template_contents is not None):
|
||||
self._tc.LogStdError(
|
||||
f"Formatting errors in {os.path.relpath(pre_formatted_file, self._abs_package_path)}\n")
|
||||
|
||||
with open(formatted_file) as ff:
|
||||
formatted_file_text = ff.read()
|
||||
|
||||
if (self._file_template_contents is not None and
|
||||
self._file_template_contents in formatted_file_text):
|
||||
self._tc.LogStdError(f"File header is missing in {os.path.relpath(pre_formatted_file, self._abs_package_path)}\n")
|
||||
|
||||
if (self._func_template_contents is not None and
|
||||
self._func_template_contents in formatted_file_text):
|
||||
self._tc.LogStdError(f"A function header is missing in {os.path.relpath(pre_formatted_file, self._abs_package_path)}\n")
|
||||
|
||||
if self._output_file_diffs:
|
||||
with open(pre_formatted_file) as pf:
|
||||
pre_formatted_file_text = pf.read()
|
||||
|
||||
for line in difflib.unified_diff(pre_formatted_file_text.split('\n'), formatted_file_text.split('\n'), fromfile=pre_formatted_file, tofile=formatted_file, n=3):
|
||||
self._tc.LogStdError(line)
|
||||
|
||||
self._tc.LogStdError('\n')
|
||||
else:
|
||||
self._tc.LogStdError(pre_formatted_file)
|
||||
|
||||
def _remove_tree(self, dir_path: str, ignore_errors: bool = False) -> None:
|
||||
"""
|
||||
Helper for removing a directory. Over time there have been
|
||||
many private implementations of this due to reliability issues in the
|
||||
shutil implementations. To consolidate on a single function this helper is added.
|
||||
|
||||
On error try to change file attributes. Also add retry logic.
|
||||
|
||||
This function is temporarily borrowed from edk2toollib.utility_functions
|
||||
since the version used in edk2 is not recent enough to include the
|
||||
function.
|
||||
|
||||
This function should be replaced by "RemoveTree" when it is available.
|
||||
|
||||
Args:
|
||||
- dir_path: Path to directory to remove.
|
||||
- ignore_errors: Whether to ignore errors during removal
|
||||
"""
|
||||
|
||||
def _remove_readonly(func, path, _):
|
||||
"""
|
||||
Private function to attempt to change permissions on file/folder being deleted.
|
||||
"""
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
func(path)
|
||||
|
||||
for _ in range(3): # retry up to 3 times
|
||||
try:
|
||||
shutil.rmtree(dir_path, ignore_errors=ignore_errors, onerror=_remove_readonly)
|
||||
except OSError as err:
|
||||
logging.warning(f"Failed to fully remove {dir_path}: {err}")
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(f"Failed to remove {dir_path}")
|
||||
|
||||
def _run_uncrustify(self) -> None:
|
||||
"""
|
||||
Runs Uncrustify for this instance of plugin execution.
|
||||
"""
|
||||
logging.info("Executing Uncrustify. This might take a while...")
|
||||
start_time = timeit.default_timer()
|
||||
self._execute_uncrustify()
|
||||
end_time = timeit.default_timer() - start_time
|
||||
|
||||
execution_summary = f"Uncrustify executed against {len(self._abs_file_paths_to_format)} files in {self._package_name} in {end_time:.2f} seconds.\n"
|
||||
|
||||
self._tc.LogStdOut(execution_summary)
|
||||
logging.info(execution_summary)
|
||||
|
||||
if self._app_exit_code != 0 and self._app_exit_code != 1:
|
||||
raise UncrustifyAppExecutionException(
|
||||
f"Error {str(self._app_exit_code)} returned from Uncrustify:\n\n{str(self._app_output)}")
|
@ -1,9 +0,0 @@
|
||||
/** @file
|
||||
Brief description of the file's purpose.
|
||||
|
||||
Detailed description of the file's contents and other useful
|
||||
information for a person viewing the file for the first time.
|
||||
|
||||
<<Copyright>>
|
||||
SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
**/
|
@ -1,15 +0,0 @@
|
||||
/**
|
||||
Brief description of this function's purpose.
|
||||
|
||||
Follow it immediately with the detailed description.
|
||||
|
||||
@param[in] Arg1 Description of Arg1.
|
||||
@param[in] Arg2 Description of Arg2 This is complicated and requires
|
||||
multiple lines to describe.
|
||||
@param[out] Arg3 Description of Arg3.
|
||||
@param[in, out] Arg4 Description of Arg4.
|
||||
|
||||
@retval VAL_ONE Description of what VAL_ONE signifies.
|
||||
@retval OTHER This is the only other return value. If there were other
|
||||
return values, they would be listed.
|
||||
**/
|
@ -1,462 +0,0 @@
|
||||
## @file
|
||||
# Uncrustify Configuration File for EDK II C Code
|
||||
#
|
||||
# Coding Standard: https://edk2-docs.gitbook.io/edk-ii-c-coding-standards-specification/
|
||||
#
|
||||
# This configuration file is meant to be a "best attempt" to align with the
|
||||
# definitions in the EDK II C Coding Standards Specification.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
|
||||
# Force UTF-8 encoding (no UTF-16)
|
||||
enable_digraphs = false
|
||||
utf8_byte = false
|
||||
utf8_force = true
|
||||
|
||||
# Code width / line splitting
|
||||
#code_width =120 # TODO: This causes non-deterministic behaviour in some cases when code wraps
|
||||
ls_code_width =false
|
||||
ls_for_split_full =true
|
||||
ls_func_split_full =true
|
||||
pos_comma =trail
|
||||
|
||||
# 5.1.7 All files must end with CRLF
|
||||
newlines = crlf
|
||||
|
||||
# 5.1.2 Do not use tab characters
|
||||
|
||||
cmt_convert_tab_to_spaces = true # Whether to convert all tabs to spaces in comments. If false, tabs in
|
||||
# comments are left alone, unless used for indenting.
|
||||
indent_columns = 2 # Number of spaces for indentation
|
||||
indent_with_tabs = 0 # Do not use TAB characters
|
||||
string_replace_tab_chars = true # Replace TAB with SPACE
|
||||
# Note: This will break .robot files but is needed for edk2 style
|
||||
|
||||
# 5.2.1.1 There shall be only one statement on a line (statement ends with ;)
|
||||
nl_multi_line_cond = true # Add a newline between ')' and '{' if the ')' is on a different line than
|
||||
# the if/for/etc.
|
||||
nl_after_semicolon = true # Whether to add a newline after semicolons, except in 'for' statements.
|
||||
|
||||
# 5.2.1.3 An open brace '{' goes on the same line as the closing parenthesis ')' of simple predicate expressions
|
||||
mod_full_brace_do = add # Add or remove braces on a single-line 'do' statement.
|
||||
mod_full_brace_for = add
|
||||
mod_full_brace_function = add # Add or remove braces on a single-line function definition.
|
||||
mod_full_brace_if = add # Add or remove braces on a single-line 'if' statement. Braces will not be
|
||||
# removed if the braced statement contains an 'else'.
|
||||
mod_full_brace_if_chain = false
|
||||
mod_full_brace_while = add
|
||||
|
||||
# 5.2.1.4 A close brace '}' always goes at the beginning of the last line of the body
|
||||
eat_blanks_after_open_brace = true
|
||||
eat_blanks_before_close_brace = true # Whether to remove blank lines before '}'.
|
||||
|
||||
# 5.2.2.2 Always put space before and after binary operators.
|
||||
sp_assign = add # Add or remove space around assignment operator '=', '+=', etc.
|
||||
sp_assign_default = add
|
||||
sp_bool = add # Add or remove space around boolean operators '&&' and '||'.
|
||||
sp_compare = add # Add or remove space around compare operator '<', '>', '==', etc.
|
||||
|
||||
# 5.2.2.3 Do not put space between unary operators and their object
|
||||
sp_addr = remove # A or remove space after the '&' (address-of) unary operator.
|
||||
sp_incdec = remove # Add or remove space between '++' and '--' the word to which it is being
|
||||
# applied, as in '(--x)' or 'y++;'.
|
||||
sp_inv = remove # Add or remove space after the '~' (invert) unary operator.
|
||||
sp_not = remove # Add or remove space after the '!' (not) unary operator.
|
||||
sp_sign = remove # Add or remove space after '+' or '-', as in 'x = -5' or 'y = +7'.
|
||||
|
||||
# 5.2.2.4 Subsequent lines of multi-line function calls should line up two spaces from the beginning of the function
|
||||
# name
|
||||
nl_func_call_args_multi_line = true # Whether to add a newline after each ',' in a function call if '(' and ')'
|
||||
# are in different lines.
|
||||
nl_func_call_args_multi_line_ignore_closures = false
|
||||
|
||||
# - Indent each argument 2 spaces from the start of the function name. If a
|
||||
# function is called through a structure or union member, of type
|
||||
# pointer-to-function, then indent each argument 2 spaces from the start of the
|
||||
# member name.
|
||||
indent_func_call_edk2_style = true # Use EDK2 indentation style for function calls (**CUSTOM SETTING**)
|
||||
indent_paren_after_func_call = true # Whether to indent the open parenthesis of a function call, if the
|
||||
# parenthesis is on its own line.
|
||||
|
||||
# - Align the close parenthesis with the start of the last argument
|
||||
indent_paren_close = 0 # How to indent a close parenthesis after a newline.
|
||||
# (0: Body, 1: Openparenthesis, 2: Brace level)
|
||||
|
||||
|
||||
# 5.2.2.5 Always put space after commas or semicolons that separate items
|
||||
sp_after_comma = force # Add or remove space after ',', i.e. 'a,b' vs. 'a, b'.
|
||||
sp_before_comma = remove # Add or remove space before ','.
|
||||
|
||||
# 5.2.2.6 Always put space before an open parenthesis
|
||||
sp_after_sparen = add # Add or remove space after ')' of control statements.
|
||||
sp_attribute_paren = add # Add or remove space between '__attribute__' and '('.
|
||||
sp_before_sparen = force # Add or remove space before '(' of control statements
|
||||
# ('if', 'for', 'switch', 'while', etc.).
|
||||
sp_defined_paren = force # Add or remove space between 'defined' and '(' in '#if defined (FOO)'.
|
||||
sp_func_call_paren = force # Add or remove space between function name and '(' on function calls.
|
||||
sp_func_call_paren_empty = force # Add or remove space between function name and '()' on function calls
|
||||
# without parameters. If set to ignore (the default), sp_func_call_paren is
|
||||
# used.
|
||||
sp_func_def_paren = add # Add or remove space between alias name and '(' of a non-pointer function
|
||||
# type typedef.
|
||||
sp_func_proto_paren = add # Add or remove space between function name and '()' on function declaration
|
||||
sp_sizeof_paren = force # Add or remove space between 'sizeof' and '('.
|
||||
sp_type_func = add # Add or remove space between return type and function name. A minimum of 1
|
||||
# is forced except for pointer return types.
|
||||
|
||||
# Not specified, but also good style to remove spaces inside parentheses (Optional)
|
||||
sp_cparen_oparen = remove # Add or remove space between back-to-back parentheses, i.e. ')(' vs. ') ('.
|
||||
sp_inside_fparen = remove # Add or remove space inside function '(' and ')'.
|
||||
sp_inside_fparens = remove # Add or remove space inside empty function '()'.
|
||||
sp_inside_paren = remove # Add or remove space inside '(' and ')'.
|
||||
sp_inside_paren_cast = remove # Add or remove spaces inside cast parentheses. '(int)x'
|
||||
sp_inside_square = remove # Add or remove space inside a non-empty '[' and ']'.
|
||||
sp_paren_paren = remove # Add or remove space between nested parentheses, i.e. '((' vs. ') )'.
|
||||
sp_square_fparen = remove # Add or remove space between ']' and '(' when part of a function call.
|
||||
|
||||
# 5.2.2.7 Put a space before an open brace if it is not on its own line
|
||||
sp_do_brace_open = force # Add or remove space between 'do' and '{'.
|
||||
sp_paren_brace = force # Add or remove space between ')' and '{'.
|
||||
sp_sparen_brace = force # Add or remove space between ')' and '{' of of control statements.
|
||||
|
||||
# 5.2.2.8 Do not put spaces around structure member and pointer operators
|
||||
sp_after_byref = remove # Add or remove space after reference sign '&', if followed by a word.
|
||||
sp_before_byref = add # Add or remove space before a reference sign '&'.
|
||||
sp_deref = remove # Add or remove space after the '*' (dereference) unary operator. This does
|
||||
# not affect the spacing after a '*' that is part of a type.
|
||||
sp_member = remove # Add or remove space around the '.' or '->' operators.
|
||||
|
||||
# 5.2.2.9 Do not put spaces before open brackets of array subscripts
|
||||
sp_before_square = remove # Add or remove space before '[' (except '[]').
|
||||
sp_before_squares = remove # Add or remove space before '[]'.
|
||||
sp_before_vardef_square = remove # Add or remove space before '[' for a variable definition.
|
||||
|
||||
# 5.2.2.10 Use extra parentheses rather than depending on in-depth knowledge of the order of precedence of C
|
||||
mod_full_paren_if_bool = true # Whether to fully parenthesize Boolean expressions in 'while' and 'if'
|
||||
# statement, as in 'if (a && b > c)' => 'if (a && (b > c))'.
|
||||
|
||||
# 5.2.2.11 Align a continuation line with the part of the line that it continues.
|
||||
use_indent_continue_only_once = true
|
||||
|
||||
# Additional '{}' bracing rules (Optional)
|
||||
# NOTE - The style guide specifies two different styles for braces,
|
||||
# so these are ignored for now to allow developers some flexibility.
|
||||
nl_after_brace_close = true # Whether to add a newline after '}'. Does not apply if followed by a
|
||||
# necessary ';'.
|
||||
nl_brace_else = remove # Add or remove newline between '}' and 'else'.
|
||||
nl_brace_while = remove # Add or remove newline between '}' and 'while' of 'do' statement.
|
||||
nl_do_brace = remove # Add or remove newline between 'do' and '{'.
|
||||
nl_else_brace = remove # Add or remove newline between 'else' and '{'.
|
||||
nl_else_if = remove # Add or remove newline between 'else' and 'if'.
|
||||
nl_elseif_brace = remove # Add or remove newline between 'else if' and '{'.
|
||||
nl_enum_brace = remove # Add or remove newline between 'enum' and '{'.
|
||||
nl_fcall_brace = remove # Add or remove newline between a function call's ')' and '{',
|
||||
# as in 'list_for_each(item, &list) { }'.
|
||||
nl_for_brace = remove # Add or remove newline between 'for' and '{'.
|
||||
nl_if_brace = remove # Add or remove newline between 'if' and '{'.
|
||||
nl_struct_brace = remove # Add or remove newline between 'struct and '{'.
|
||||
nl_switch_brace = remove # Add or remove newline between 'switch' and '{'.
|
||||
nl_union_brace = remove # Add or remove newline between 'union' and '{'.
|
||||
nl_while_brace = remove # Add or remove newline between 'while' and '{'.
|
||||
|
||||
# Additional whitespace rules (Optional)
|
||||
sp_after_ptr_star = remove # Add or remove space after pointer star '*', if followed by a word.
|
||||
# Useful when paired with align_var_def_star_style==2
|
||||
sp_after_ptr_star_func = remove # Add or remove space after a pointer star '*', if followed by a function
|
||||
# prototype or function definition.
|
||||
sp_after_semi = remove # Add or remove space after ';', except when followed by a comment.
|
||||
sp_before_case_colon = remove # Add or remove space before case ':'.
|
||||
sp_before_ptr_star = add # Add or remove space before pointer star '*'.
|
||||
sp_before_ptr_star_func = add # Add or remove space before a pointer star '*', if followed by a function
|
||||
# prototype or function definition.
|
||||
sp_before_semi = remove # Add or remove space before ';'
|
||||
sp_before_semi_for = remove # Add or remove space before ';' in non-empty 'for' statements.
|
||||
sp_before_semi_for_empty = add # Add or remove space before a semicolon of an empty part of a for statement
|
||||
sp_between_ptr_star = remove # Add or remove space between pointer stars '*'. (ie, 'VOID **')
|
||||
sp_brace_close_while = force # Add or remove space between '}' and 'while'.
|
||||
|
||||
sp_after_cast = remove
|
||||
sp_after_type = add
|
||||
sp_balance_nested_parens = false
|
||||
sp_before_nl_cont = add
|
||||
sp_before_square_asm_block = ignore
|
||||
sp_before_unnamed_byref = add
|
||||
sp_brace_brace = ignore
|
||||
sp_brace_else = force
|
||||
sp_brace_typedef = add
|
||||
sp_case_label = force
|
||||
sp_cmt_cpp_doxygen = true
|
||||
sp_cond_colon = add
|
||||
sp_cond_question = add
|
||||
sp_cpp_cast_paren = force
|
||||
sp_else_brace = force
|
||||
sp_endif_cmt = force
|
||||
sp_enum_assign = add
|
||||
sp_inside_braces = force
|
||||
sp_inside_braces_empty = force
|
||||
sp_inside_braces_enum = force
|
||||
sp_inside_braces_struct = force
|
||||
sp_pp_concat = add
|
||||
sp_pp_stringify = add
|
||||
sp_return_paren = add
|
||||
sp_special_semi = force
|
||||
sp_while_paren_open = force
|
||||
|
||||
# Additional Indentation Rules
|
||||
indent_access_spec = 1
|
||||
indent_access_spec_body = false
|
||||
indent_align_assign = true
|
||||
indent_align_string = true
|
||||
indent_bool_paren = true
|
||||
indent_brace_parent = false
|
||||
indent_braces = false
|
||||
indent_braces_no_class = false
|
||||
indent_braces_no_func = true
|
||||
indent_braces_no_struct = false
|
||||
indent_class = true
|
||||
indent_class_colon = false
|
||||
indent_cmt_with_tabs = false # Whether to indent comments that are not at a brace level with tabs on
|
||||
# a tabstop. Requires indent_with_tabs=2. If false, will use spaces.
|
||||
indent_col1_comment = true
|
||||
indent_col1_multi_string_literal= true
|
||||
indent_comma_paren = true
|
||||
indent_else_if = true
|
||||
indent_extern = true
|
||||
indent_first_bool_expr = true
|
||||
|
||||
indent_func_def_param_paren_pos_threshold = 0
|
||||
indent_func_param_double = false
|
||||
indent_func_proto_param = true
|
||||
indent_ignore_asm_block = true
|
||||
indent_label = 1
|
||||
indent_member = 2
|
||||
indent_namespace = false
|
||||
indent_param = 2
|
||||
indent_paren_nl = false
|
||||
indent_paren_open_brace = false
|
||||
indent_preserve_sql = false
|
||||
indent_relative_single_line_comments = false
|
||||
indent_sing_line_comments = 0
|
||||
indent_single_newlines = false
|
||||
indent_square_nl = false
|
||||
indent_switch_case = 2
|
||||
indent_template_param = true
|
||||
indent_var_def_blk = 0
|
||||
indent_var_def_cont = false
|
||||
|
||||
# Tidy-up rules (Optional)
|
||||
mod_move_case_break = true # Whether to move a 'break' that appears after a fully braced 'case'
|
||||
# before the close brace, as in 'case X: { ... } break;' =>
|
||||
# 'case X: { ... break; }'.
|
||||
mod_pawn_semicolon = false
|
||||
mod_remove_empty_return = false # Whether to remove a void 'return;' that appears as the last statement
|
||||
# in a function.
|
||||
mod_remove_extra_semicolon = true
|
||||
mod_sort_import = false
|
||||
mod_sort_include = false
|
||||
mod_sort_using = false
|
||||
nl_after_case = false # Whether to add a newline after a 'case' statement.
|
||||
nl_end_of_file = force # Add or remove newline at the end of the file.
|
||||
nl_end_of_file_min = 1 # The minimum number of newlines at the end of the file
|
||||
nl_max = 2 # The maximum number of consecutive newlines (3 = 2 blank lines).
|
||||
nl_start_of_file = remove # Add or remove newlines at the start of the file.
|
||||
|
||||
# Code alignment rules (Optional)
|
||||
align_asm_colon = false
|
||||
align_assign_span = 1 # The span for aligning on '=' in assignments.
|
||||
align_assign_thresh = 0
|
||||
align_edk2_style = true # Whether to apply edk2-specific alignment formatting
|
||||
align_enum_equ_span = 1 # The span for aligning on '=' in enums.
|
||||
align_func_params = true # Whether to align variable definitions in prototypes and functions.
|
||||
align_func_params_gap = 2
|
||||
align_func_params_span = 2 # The span for aligning parameter definitions in function on parameter name.
|
||||
align_func_params_thresh = 0
|
||||
align_func_proto_span = 0
|
||||
align_keep_tabs = false
|
||||
align_left_shift = false
|
||||
align_mix_var_proto = false
|
||||
align_nl_cont = false
|
||||
align_oc_decl_colon = false
|
||||
align_on_operator = false
|
||||
align_on_tabstop = false
|
||||
align_pp_define_gap = 2
|
||||
align_pp_define_span = 1
|
||||
align_right_cmt_at_col = 0 # Align trailing comment at or beyond column N; 'pulls in' comments as
|
||||
# a bonus side effect (0=ignore)
|
||||
align_right_cmt_gap = 0 # If a trailing comment is more than this number of columns away from the
|
||||
# text it follows,
|
||||
# it will qualify for being aligned. This has to be > 0 to do anything.
|
||||
align_right_cmt_mix = false # If aligning comments, mix with comments after '}' and #endif with less
|
||||
# than 3 spaces before the comment
|
||||
align_right_cmt_same_level = true # Whether to only align trailing comments that are at the same brace level.
|
||||
align_right_cmt_span = 2 # The span for aligning comments that end lines.
|
||||
align_same_func_call_params = false
|
||||
align_single_line_brace = true
|
||||
align_single_line_func = true
|
||||
align_struct_init_span = 1 # The span for aligning struct initializer values.
|
||||
align_typedef_amp_style = 1
|
||||
align_typedef_func = 1 # How to align typedef'd functions with other typedefs.
|
||||
# (0: No align, 1: Align open paranthesis, 2: Align function type name)
|
||||
align_typedef_gap = 2
|
||||
align_typedef_span = 1 # The span for aligning single-line typedefs.
|
||||
align_typedef_star_style = 1
|
||||
align_var_def_amp_style = 1
|
||||
align_var_def_attribute = true
|
||||
align_var_def_colon = true # Whether to align the colon in struct bit fields.
|
||||
align_var_def_gap = 2 # The gap (minimum spacing for aligned items) for variable definitions.
|
||||
align_var_def_inline = false
|
||||
align_var_def_span = 1 # The span (lines needed to align) for aligning variable definitions.
|
||||
align_var_def_star_style = 1 # How to consider (or treat) the '*' in the alignment of variable
|
||||
# definitions.
|
||||
# 0: Part of the type 'void * foo;' (default)
|
||||
# 1: Part of the variable 'void *foo;'
|
||||
# 2: Dangling 'void *foo;'
|
||||
# (Note - should also set sp_after_ptr_star=remove)
|
||||
align_var_struct_gap = 4
|
||||
align_var_struct_span = 8 # The span for aligning struct/union member definitions.
|
||||
align_var_struct_thresh = 0
|
||||
align_with_tabs = false
|
||||
|
||||
# Comment formatting
|
||||
cmt_align_doxygen_javadoc_tags = true # Whether to align doxygen javadoc-style tags ('@param', '@return', etc.)
|
||||
# TODO: Eats '[' in '[in]'
|
||||
cmt_c_group = false
|
||||
cmt_c_nl_end = true # Whether to add a newline before the closing '*/' of the combined c-comment.
|
||||
cmt_c_nl_start = true
|
||||
cmt_cpp_group = false
|
||||
cmt_cpp_nl_end = true
|
||||
cmt_cpp_nl_start = true
|
||||
cmt_cpp_to_c = false
|
||||
cmt_indent_multi = false # Whether to apply changes to multi-line comments, including cmt_width,
|
||||
# keyword substitution and leading chars.
|
||||
cmt_insert_before_preproc = false
|
||||
#cmt_insert_file_header = default_file_header.txt
|
||||
#cmt_insert_func_header = default_function_header.txt
|
||||
cmt_multi_check_last = false
|
||||
cmt_multi_first_len_minimum = 2
|
||||
cmt_reflow_mode = 1 # How to reflow comments.
|
||||
# (0:No reflow, 1:No touching at all, 2: Full reflow)
|
||||
cmt_sp_after_star_cont = 0 # The number of spaces to insert after the star on subsequent comment lines.
|
||||
cmt_sp_before_star_cont = 0 # The number of spaces to insert at the start of subsequent comment lines.
|
||||
cmt_star_cont = false # Whether to put a star on subsequent comment lines.
|
||||
cmt_width = 120 # Try to wrap comments at N columns.
|
||||
sp_cmt_cpp_start = add # Add or remove space after the opening of a C++ comment, as in
|
||||
# '// <here> A'. NOTE: Breaks indentation within comments.
|
||||
|
||||
# Function definitions / declarations
|
||||
indent_func_call_param = false # Whether to indent continued function call parameters one indent level,
|
||||
# rather than aligning parameters under the open parenthesis.
|
||||
indent_func_class_param = false # Whether to indent continued function call declaration one indent level,
|
||||
# rather than aligning parameters under the open parenthesis.
|
||||
indent_func_ctor_var_param = false # Whether to indent continued class variable constructors one indent level,
|
||||
# rather than aligning parameters under the open parenthesis.
|
||||
indent_func_def_param = true # Whether to indent continued function definition parameters one indent
|
||||
# level, rather than aligning parameters under the open parenthesis.
|
||||
nl_fdef_brace = add # Add or remove newline between function signature and '{'.
|
||||
nl_func_call_end_multi_line = true # Whether to add a newline before ')' in a function call if '(' and ')' are
|
||||
# in different lines.
|
||||
nl_func_call_paren = remove # Add or remove newline between a function name and the opening '(' in the
|
||||
# call.
|
||||
nl_func_call_start_multi_line = true # Whether to add a newline after '(' in a function call if '(' and ')' are
|
||||
# in different lines.
|
||||
nl_func_decl_args = force # Add or remove newline after each ',' in a function declaration.
|
||||
nl_func_decl_empty = add # Add or remove newline between '()' in a function declaration.
|
||||
nl_func_def_args = force # Add or remove newline after each ',' in a function definition.
|
||||
nl_func_def_empty = add # Add or remove newline between '()' in a function definition.
|
||||
nl_func_def_paren = remove # Add or remove newline between a function name and the opening '('
|
||||
# in the definition.
|
||||
nl_func_paren = remove # Add or remove newline between a function name and the opening '(' in
|
||||
# the declaration.
|
||||
nl_func_type_name = add # Add or remove newline between return type and function name in a function
|
||||
# definition.
|
||||
sp_fparen_brace = force # Add or remove space between ')' and '{' of function.
|
||||
use_indent_func_call_param = true # indent_func_call_param will be used
|
||||
|
||||
# Additional Newline Rules
|
||||
nl_after_brace_open = true # Whether to add a newline after '{'. This also adds a newline
|
||||
# before the matching '}'.
|
||||
nl_after_brace_open_cmt = true # Whether to add a newline between the open brace and a
|
||||
# trailing single-line comment.
|
||||
# Requires nl_after_brace_open = true.
|
||||
nl_after_do = add # Add or remove blank line after 'do/while' statement.
|
||||
nl_after_for = add # Add or remove blank line after 'for' statement.
|
||||
nl_after_func_body = 2 # The number of newlines after '}' of a multi-line function body
|
||||
nl_after_func_body_one_liner = 2
|
||||
nl_after_func_proto = 2
|
||||
nl_after_func_proto_group = 2
|
||||
nl_after_if = add
|
||||
nl_after_multiline_comment = false
|
||||
nl_after_return = false
|
||||
nl_after_struct = 2
|
||||
nl_after_switch = add
|
||||
nl_after_vbrace_close = true
|
||||
nl_after_vbrace_open = true
|
||||
nl_after_vbrace_open_empty = true
|
||||
nl_after_while = add
|
||||
nl_assign_leave_one_liners = true
|
||||
nl_before_block_comment = 2
|
||||
nl_before_case = false
|
||||
nl_before_do = ignore
|
||||
nl_before_for = ignore
|
||||
nl_before_if = ignore
|
||||
nl_before_switch = ignore
|
||||
nl_before_while = ignore
|
||||
nl_before_whole_file_ifdef = 2
|
||||
nl_brace_brace = force
|
||||
nl_brace_struct_var = remove
|
||||
nl_case_colon_brace = add
|
||||
nl_class_leave_one_liners = false
|
||||
nl_collapse_empty_body = false
|
||||
nl_comment_func_def = 1
|
||||
nl_create_for_one_liner = false
|
||||
nl_create_if_one_liner = false
|
||||
nl_create_while_one_liner = false
|
||||
nl_define_macro = false
|
||||
nl_ds_struct_enum_close_brace = true
|
||||
nl_ds_struct_enum_cmt = false
|
||||
nl_enum_leave_one_liners = false
|
||||
nl_func_decl_end = add
|
||||
nl_func_decl_start = add
|
||||
nl_func_def_end = add
|
||||
nl_func_def_start = add
|
||||
nl_func_leave_one_liners = false
|
||||
nl_func_proto_type_name = add
|
||||
nl_func_var_def_blk = 1
|
||||
nl_getset_leave_one_liners = false
|
||||
nl_if_leave_one_liners = false
|
||||
nl_multi_line_define = false
|
||||
nl_squeeze_ifdef = false
|
||||
nl_var_def_blk_end = 0
|
||||
nl_var_def_blk_start = 0
|
||||
|
||||
# Preprocessor Rules
|
||||
pp_define_at_level = true
|
||||
pp_if_indent_code = false
|
||||
pp_indent_func_def = false
|
||||
pp_indent_extern = false
|
||||
pp_ignore_define_body = true # Workaround: Turn off processing for #define body
|
||||
# (current rules do not work for some defines)
|
||||
pp_indent = add
|
||||
pp_indent_at_level = true
|
||||
pp_indent_count = 2
|
||||
pp_indent_if = 2
|
||||
pp_indent_region = 2
|
||||
pp_region_indent_code = false
|
||||
pp_space = remove
|
||||
|
||||
#
|
||||
# The tokens below are assigned specific types so they are always recognized properly.
|
||||
#
|
||||
|
||||
# Explicitly define EDK II qualifiers
|
||||
set QUALIFIER CONST
|
||||
set QUALIFIER EFIAPI
|
||||
set QUALIFIER IN
|
||||
set QUALIFIER OPTIONAL
|
||||
set QUALIFIER OUT
|
||||
|
||||
# Explicitly define EDK II types
|
||||
set TYPE EFI_STATUS
|
||||
set TYPE VOID
|
@ -1,16 +0,0 @@
|
||||
## @file
|
||||
# Downloads the Uncrustify application from a Project Mu NuGet package.
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"id": "uncrustify-ci-1",
|
||||
"scope": "cibuild",
|
||||
"type": "nuget",
|
||||
"name": "mu-uncrustify-release",
|
||||
"source": "https://pkgs.dev.azure.com/projectmu/Uncrustify/_packaging/mu_uncrustify/nuget/v3/index.json",
|
||||
"version": "73.0.8",
|
||||
"flags": ["set_shell_var", "host_specific"],
|
||||
"var_name": "UNCRUSTIFY_CI_PATH"
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
## @file
|
||||
# CiBuildPlugin used to check coding standard compliance of EDK II style C source code
|
||||
#
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
##
|
||||
{
|
||||
"scope": "cibuild",
|
||||
"name": "Uncrustify Coding Standard Test",
|
||||
"module": "UncrustifyCheck"
|
||||
}
|
@ -1,280 +0,0 @@
|
||||
# Edk2 Continuous Integration
|
||||
|
||||
This file focuses on information for those working with the `.pytools` directory
|
||||
directly or interested in lower-level details about how CI works.
|
||||
|
||||
If you just want to get started building code, visit
|
||||
[Build Instructions](https://github.com/tianocore/tianocore.github.io/wiki/Build-Instruction)
|
||||
on the TianoCore wiki.
|
||||
|
||||
## Basic Status
|
||||
|
||||
| Package | Windows VS2019 (IA32/X64)| Ubuntu GCC (IA32/X64/ARM/AARCH64) | Known Issues |
|
||||
| :---- | :----- | :---- | :--- |
|
||||
| ArmPkg | | :heavy_check_mark: |
|
||||
| ArmPlatformPkg | | :heavy_check_mark: |
|
||||
| ArmVirtPkg | SEE PACKAGE README | SEE PACKAGE README |
|
||||
| CryptoPkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode
|
||||
| DynamicTablesPkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| EmbeddedPkg |
|
||||
| EmulatorPkg | SEE PACKAGE README | SEE PACKAGE README | Spell checking in audit mode
|
||||
| FatPkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| FmpDevicePkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| IntelFsp2Pkg |
|
||||
| IntelFsp2WrapperPkg |
|
||||
| MdeModulePkg | :heavy_check_mark: | :heavy_check_mark: | DxeIpl dependency on ArmPkg, Depends on StandaloneMmPkg, Spell checking in audit mode
|
||||
| MdePkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode
|
||||
| NetworkPkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode
|
||||
| OvmfPkg | SEE PACKAGE README | SEE PACKAGE README | Spell checking in audit mode
|
||||
| PcAtChipsetPkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| SecurityPkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode
|
||||
| ShellPkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode, 3 modules are not being built by DSC
|
||||
| SignedCapsulePkg |
|
||||
| SourceLevelDebugPkg |
|
||||
| StandaloneMmPkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| UefiCpuPkg | :heavy_check_mark: | :heavy_check_mark: | Spell checking in audit mode, 2 binary modules not being built by DSC
|
||||
| UefiPayloadPkg |
|
||||
| UnitTestFrameworkPkg | :heavy_check_mark: | :heavy_check_mark: |
|
||||
|
||||
For more detailed status look at the test results of the latest CI run on the
|
||||
repo readme.
|
||||
|
||||
## Background
|
||||
|
||||
This Continuous integration and testing infrastructure leverages the TianoCore EDKII Tools PIP modules:
|
||||
[library](https://pypi.org/project/edk2-pytool-library/) and
|
||||
[extensions](https://pypi.org/project/edk2-pytool-extensions/) (with repos
|
||||
located [here](https://github.com/tianocore/edk2-pytool-library) and
|
||||
[here](https://github.com/tianocore/edk2-pytool-extensions)).
|
||||
|
||||
The primary execution flows can be found in the
|
||||
`.azurepipelines/Windows-VS2019.yml` and `.azurepipelines/Ubuntu-GCC5.yml`
|
||||
files. These YAML files are consumed by the Azure Dev Ops Build Pipeline and
|
||||
dictate what server resources should be used, how they should be configured, and
|
||||
what processes should be run on them. An overview of this schema can be found
|
||||
[here](https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema).
|
||||
|
||||
Inspection of these files reveals the EDKII Tools commands that make up the
|
||||
primary processes for the CI build: 'stuart_setup', 'stuart_update', and
|
||||
'stuart_ci_build'. These commands come from the EDKII Tools PIP modules and are
|
||||
configured as described below. More documentation on the tools can be
|
||||
found [here](https://github.com/tianocore/edk2-pytool-extensions/blob/master/docs/using.md)
|
||||
and [here](https://github.com/tianocore/edk2-pytool-extensions/blob/master/docs/features/feature_invocables.md).
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuration of the CI process consists of (in order of precedence):
|
||||
|
||||
* command-line arguments passed in via the Pipeline YAML
|
||||
* a per-package configuration file (e.g. `<package-name>.ci.yaml`) that is
|
||||
detected by the CI system in EDKII Tools.
|
||||
* a global configuration Python module (e.g. `CISetting.py`) passed in via the
|
||||
command-line
|
||||
|
||||
The global configuration file is described in
|
||||
[this readme](https://github.com/tianocore/edk2-pytool-extensions/blob/master/docs/usability/using_settings_manager.md)
|
||||
from the EDKII Tools documentation. This configuration is written as a Python
|
||||
module so that decisions can be made dynamically based on command line
|
||||
parameters and codebase state.
|
||||
|
||||
The per-package configuration file can override most settings in the global
|
||||
configuration file, but is not dynamic. This file can be used to skip or
|
||||
customize tests that may be incompatible with a specific package. Each test generally requires
|
||||
per package configuration which comes from this file.
|
||||
|
||||
## Running CI locally
|
||||
|
||||
The EDKII Tools environment (and by extension the ci) is designed to support
|
||||
easily and consistently running locally and in a cloud ci environment. To do
|
||||
that a few steps should be followed. Details of EDKII Tools can be found in the
|
||||
[docs folder here](https://github.com/tianocore/edk2-pytool-extensions/tree/master/docs)
|
||||
|
||||
### Running CI
|
||||
|
||||
Quick notes:
|
||||
|
||||
* By default all CI plugins are opted in.
|
||||
* Setting the plugin to `skip` as an argument will skip running the plugin.
|
||||
Examples:
|
||||
* `CompilerPlugin=skip` skip the build test
|
||||
* `GuidCheck=skip` skip the Guid check
|
||||
* `SpellCheck=skip` skip the spell checker
|
||||
* etc.
|
||||
* Detailed reports and logs per package are captured in the `Build` directory.
|
||||
|
||||
## Current PyTool Test Capabilities
|
||||
|
||||
All CI tests are instances of EDKII Tools plugins. Documentation on the plugin
|
||||
system can be found [here](https://github.com/tianocore/edk2-pytool-extensions/blob/master/docs/usability/using_plugin_manager.md)
|
||||
and [here](https://github.com/tianocore/edk2-pytool-extensions/blob/master/docs/features/feature_plugin_manager.md).
|
||||
Upon invocation, each plugin will be passed the path to the current package
|
||||
under test and a dictionary containing its targeted configuration, as assembled
|
||||
from the command line, per-package configuration, and global configuration.
|
||||
|
||||
Note: CI plugins are considered unique from build plugins and helper plugins,
|
||||
even though some CI plugins may execute steps of a build.
|
||||
|
||||
In the example, these plugins live alongside the code under test (in the
|
||||
`.pytool/Plugin` directory), but may be moved to the 'edk2-test' repo if that
|
||||
location makes more sense for the community.
|
||||
|
||||
### Module Inclusion Test - DscCompleteCheck
|
||||
|
||||
This scans all INF files from a package and confirms they are
|
||||
listed in the package level DSC file. The test considers it an error if any INF
|
||||
does not appear in the `Components` section of the package-level DSC (indicating
|
||||
that it would not be built if the package were built). This is critical because
|
||||
much of the CI infrastructure assumes that all modules will be listed in the DSC
|
||||
and compiled.
|
||||
|
||||
This test will ignore INFs in the following cases:
|
||||
|
||||
1. When `MODULE_TYPE` = `HOST_APPLICATION`
|
||||
2. When a Library instance **only** supports the `HOST_APPLICATION` environment
|
||||
|
||||
### Host Module Inclusion Test - HostUnitTestDscCompleteCheck
|
||||
|
||||
This test scans all INF files from a package for those related to host
|
||||
based unit tests and confirms they are listed in the unit test DSC file for the package.
|
||||
The test considers it an error if any INF meeting the requirements does not appear
|
||||
in the `Components` section of the unit test DSC. This is critical because
|
||||
much of the CI infrastructure assumes that modules will be listed in the DSC
|
||||
and compiled.
|
||||
|
||||
This test will only require INFs in the following cases:
|
||||
|
||||
1. When `MODULE_TYPE` = `HOST_APPLICATION`
|
||||
2. When a Library instance explicitly supports the `HOST_APPLICATION` environment
|
||||
|
||||
### Code Compilation Test - CompilerPlugin
|
||||
|
||||
Once the Module Inclusion Test has verified that all modules would be built if
|
||||
all package-level DSCs were built, the Code Compilation Test simply runs through
|
||||
and builds every package-level DSC on every toolchain and for every architecture
|
||||
that is supported. Any module that fails to build is considered an error.
|
||||
|
||||
### Host Unit Test Compilation and Run Test - HostUnitTestCompilerPlugin
|
||||
|
||||
A test that compiles the dsc for host based unit test apps.
|
||||
On Windows this will also enable a build plugin to execute that will run the unit tests and verify the results.
|
||||
|
||||
These tools will be invoked on any CI
|
||||
pass that includes the NOOPT target. In order for these tools to do their job,
|
||||
the package and tests must be configured in a particular way...
|
||||
|
||||
#### Including Host-Based Tests in the Package YAML
|
||||
|
||||
For example, looking at the `MdeModulePkg.ci.yaml` config file, there are two
|
||||
config options that control HostBased test behavior:
|
||||
|
||||
```json
|
||||
## options defined .pytool/Plugin/HostUnitTestCompilerPlugin
|
||||
"HostUnitTestCompilerPlugin": {
|
||||
"DscPath": "Test/MdeModulePkgHostTest.dsc"
|
||||
},
|
||||
```
|
||||
|
||||
This option tell the test builder to run. The test builder needs to know which
|
||||
modules in this package are host-based tests, so that DSC path is provided.
|
||||
|
||||
#### Configuring the HostBased DSC
|
||||
|
||||
The HostBased DSC for `MdeModulePkg` is located at
|
||||
`MdeModulePkg/Test/MdeModulePkgHostTest.dsc`.
|
||||
|
||||
To add automated host-based unit test building to a new package, create a
|
||||
similar DSC. The new DSC should make sure to have the `NOOPT` BUILD_TARGET
|
||||
and should include the line:
|
||||
|
||||
```
|
||||
!include UnitTestFrameworkPkg/UnitTestFrameworkPkgHost.dsc.inc
|
||||
```
|
||||
|
||||
All of the modules that are included in the `Components` section of this
|
||||
DSC should be of type HOST_APPLICATION.
|
||||
|
||||
### GUID Uniqueness Test - GuidCheck
|
||||
|
||||
This test works on the collection of all packages rather than an individual
|
||||
package. It looks at all FILE_GUIDs and GUIDs declared in DEC files and ensures
|
||||
that they are unique for the codebase. This prevents, for example, accidental
|
||||
duplication of GUIDs when using an existing INF as a template for a new module.
|
||||
|
||||
### Cross-Package Dependency Test - DependencyCheck
|
||||
|
||||
This test compares the list of all packages used in INFs files for a given
|
||||
package against a list of "allowed dependencies" in plugin configuration for
|
||||
that package. Any module that depends on a disallowed package will cause a test
|
||||
failure.
|
||||
|
||||
### Library Declaration Test - LibraryClassCheck
|
||||
|
||||
This test scans at all library header files found in the `Library` folders in
|
||||
all of the package's declared include directories and ensures that all files
|
||||
have a matching LibraryClass declaration in the DEC file for the package. Any
|
||||
missing declarations will cause a failure.
|
||||
|
||||
### Invalid Character Test - CharEncodingCheck
|
||||
|
||||
This test scans all files in a package to make sure that there are no invalid
|
||||
Unicode characters that may cause build errors in some character
|
||||
sets/localizations.
|
||||
|
||||
### Spell Checking - cspell
|
||||
|
||||
This test runs a spell checker on all files within the package. This is done
|
||||
using the NodeJs cspell tool. For details check `.pytool/Plugin/SpellCheck`.
|
||||
For this plugin to run during ci you must install nodejs and cspell and have
|
||||
both available to the command line when running your CI.
|
||||
|
||||
Install
|
||||
|
||||
* Install nodejs from https://nodejs.org/en/
|
||||
* Install cspell
|
||||
1. Open cmd prompt with access to node and npm
|
||||
2. Run `npm install -g cspell`
|
||||
|
||||
More cspell info: https://github.com/streetsidesoftware/cspell
|
||||
|
||||
### License Checking - LicenseCheck
|
||||
|
||||
Scans all new added files in a package to make sure code is contributed under
|
||||
BSD-2-Clause-Patent.
|
||||
|
||||
### Ecc tool - EccCheck
|
||||
|
||||
Run the Ecc tool on the package. The Ecc tool is available in the BaseTools
|
||||
package. It checks that the code complies to the EDKII coding standard.
|
||||
|
||||
### Coding Standard Compliance - UncrustifyCheck
|
||||
|
||||
Runs the Uncrustify application to check for coding standard compliance issues.
|
||||
|
||||
## PyTool Scopes
|
||||
|
||||
Scopes are how the PyTool ext_dep, path_env, and plugins are activated. Meaning
|
||||
that if an invocable process has a scope active then those ext_dep and path_env
|
||||
will be active. To allow easy integration of PyTools capabilities there are a
|
||||
few standard scopes.
|
||||
|
||||
| Scope | Invocable | Description |
|
||||
| :---- | :----- | :---- |
|
||||
| global | edk2_invocable++ - should be base_abstract_invocable | Running an invocables |
|
||||
| global-win | edk2_invocable++ | Running on Microsoft Windows |
|
||||
| global-nix | edk2_invocable++ | Running on Linux based OS |
|
||||
| edk2-build | | This indicates that an invocable is building EDK2 based UEFI code |
|
||||
| cibuild | set in .pytool/CISettings.py | Suggested target for edk2 continuous integration builds. Tools used for CiBuilds can use this scope. Example: asl compiler |
|
||||
| host-based-test | set in .pytool/CISettings.py | Turns on the host based tests and plugin |
|
||||
| host-test-win | set in .pytool/CISettings.py | Enables the host based test runner for Windows |
|
||||
|
||||
## Future investments
|
||||
|
||||
* PatchCheck tests as plugins
|
||||
* MacOS/xcode support
|
||||
* Clang/LLVM support
|
||||
* Visual Studio AARCH64 and ARM support
|
||||
* BaseTools C tools CI/PR and binary release process
|
||||
* BaseTools Python tools CI/PR process
|
||||
* Extensible private/closed source platform reporting
|
||||
* UEFI SCTs
|
||||
* Other automation
|
36
AppPkg/AppPkg.dec
Normal file
36
AppPkg/AppPkg.dec
Normal file
@ -0,0 +1,36 @@
|
||||
## @file
|
||||
# Declarations for the UDK Standard Libraries.
|
||||
#
|
||||
# Copyright (c) 2010 - 2012, Intel Corporation. All rights reserved.<BR>
|
||||
#
|
||||
# This program and the accompanying materials are licensed and made available under
|
||||
# the terms and conditions of the BSD License which accompanies this distribution.
|
||||
# The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
##
|
||||
|
||||
|
||||
[Defines]
|
||||
DEC_SPECIFICATION = 0x00010005
|
||||
PACKAGE_NAME = AppPkg
|
||||
PACKAGE_GUID = B3E3D3D5-D62B-4497-A175-264F489D127E
|
||||
PACKAGE_VERSION = 0.01
|
||||
|
||||
|
||||
[Guids]
|
||||
gAppPkgTokenSpaceGuid = { 0xe7e1efa6, 0x7607, 0x4a78, { 0xa7, 0xdd, 0x43, 0xe4, 0xbd, 0x72, 0xc0, 0x99 }}
|
||||
|
||||
|
||||
[PcdsFixedAtBuild]
|
||||
gAppPkgTokenSpaceGuid.DataSource_Port|1234|UINT16|0
|
||||
gAppPkgTokenSpaceGuid.Tftp_AckLogBase|4|UINT32|1
|
||||
gAppPkgTokenSpaceGuid.Tftp_AckMultiplier|4|UINT32|2
|
||||
gAppPkgTokenSpaceGuid.Tftp_Bandwidth|0|BOOLEAN|3
|
||||
gAppPkgTokenSpaceGuid.Tftp_HighSpeed|0|BOOLEAN|4
|
||||
gAppPkgTokenSpaceGuid.Tftp_MaxRetry|10|UINT32|5
|
||||
gAppPkgTokenSpaceGuid.Tftp_MaxTimeoutInSec|3|UINT32|6
|
||||
gAppPkgTokenSpaceGuid.WebServer_HttpPort|80|UINT16|7
|
||||
|
150
AppPkg/AppPkg.dsc
Normal file
150
AppPkg/AppPkg.dsc
Normal file
@ -0,0 +1,150 @@
|
||||
## @file
|
||||
# Intel(r) UEFI Application Development Kit for EDK II.
|
||||
# This package contains applications which depend upon Standard Libraries
|
||||
# from the StdLib package.
|
||||
#
|
||||
# See the comments in the [LibraryClasses.IA32] and [BuildOptions] sections
|
||||
# for important information about configuring this package for your
|
||||
# environment.
|
||||
#
|
||||
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
##
|
||||
|
||||
[Defines]
|
||||
PLATFORM_NAME = AppPkg
|
||||
PLATFORM_GUID = 0458dade-8b6e-4e45-b773-1b27cbda3e06
|
||||
PLATFORM_VERSION = 0.01
|
||||
DSC_SPECIFICATION = 0x00010006
|
||||
OUTPUT_DIRECTORY = Build/AppPkg
|
||||
SUPPORTED_ARCHITECTURES = IA32|X64|ARM|AARCH64
|
||||
BUILD_TARGETS = DEBUG|RELEASE|NOOPT
|
||||
SKUID_IDENTIFIER = DEFAULT
|
||||
|
||||
#
|
||||
# Debug output control
|
||||
#
|
||||
DEFINE DEBUG_ENABLE_OUTPUT = FALSE # Set to TRUE to enable debug output
|
||||
DEFINE DEBUG_PRINT_ERROR_LEVEL = 0x80000040 # Flags to control amount of debug output
|
||||
DEFINE DEBUG_PROPERTY_MASK = 0
|
||||
|
||||
[PcdsFeatureFlag]
|
||||
|
||||
[PcdsFixedAtBuild]
|
||||
gEfiMdePkgTokenSpaceGuid.PcdDebugPropertyMask|$(DEBUG_PROPERTY_MASK)
|
||||
gEfiMdePkgTokenSpaceGuid.PcdDebugPrintErrorLevel|$(DEBUG_PRINT_ERROR_LEVEL)
|
||||
|
||||
[LibraryClasses]
|
||||
#
|
||||
# Entry Point Libraries
|
||||
#
|
||||
UefiApplicationEntryPoint|MdePkg/Library/UefiApplicationEntryPoint/UefiApplicationEntryPoint.inf
|
||||
ShellCEntryLib|ShellPkg/Library/UefiShellCEntryLib/UefiShellCEntryLib.inf
|
||||
UefiDriverEntryPoint|MdePkg/Library/UefiDriverEntryPoint/UefiDriverEntryPoint.inf
|
||||
#
|
||||
# Common Libraries
|
||||
#
|
||||
BaseLib|MdePkg/Library/BaseLib/BaseLib.inf
|
||||
BaseMemoryLib|MdePkg/Library/BaseMemoryLib/BaseMemoryLib.inf
|
||||
UefiLib|MdePkg/Library/UefiLib/UefiLib.inf
|
||||
PrintLib|MdePkg/Library/BasePrintLib/BasePrintLib.inf
|
||||
PcdLib|MdePkg/Library/BasePcdLibNull/BasePcdLibNull.inf
|
||||
MemoryAllocationLib|MdePkg/Library/UefiMemoryAllocationLib/UefiMemoryAllocationLib.inf
|
||||
UefiBootServicesTableLib|MdePkg/Library/UefiBootServicesTableLib/UefiBootServicesTableLib.inf
|
||||
UefiRuntimeServicesTableLib|MdePkg/Library/UefiRuntimeServicesTableLib/UefiRuntimeServicesTableLib.inf
|
||||
!if $(DEBUG_ENABLE_OUTPUT)
|
||||
DebugLib|MdePkg/Library/UefiDebugLibConOut/UefiDebugLibConOut.inf
|
||||
DebugPrintErrorLevelLib|MdePkg/Library/BaseDebugPrintErrorLevelLib/BaseDebugPrintErrorLevelLib.inf
|
||||
!else ## DEBUG_ENABLE_OUTPUT
|
||||
DebugLib|MdePkg/Library/BaseDebugLibNull/BaseDebugLibNull.inf
|
||||
!endif ## DEBUG_ENABLE_OUTPUT
|
||||
|
||||
DevicePathLib|MdePkg/Library/UefiDevicePathLib/UefiDevicePathLib.inf
|
||||
PeCoffGetEntryPointLib|MdePkg/Library/BasePeCoffGetEntryPointLib/BasePeCoffGetEntryPointLib.inf
|
||||
IoLib|MdePkg/Library/BaseIoLibIntrinsic/BaseIoLibIntrinsic.inf
|
||||
PciLib|MdePkg/Library/BasePciLibCf8/BasePciLibCf8.inf
|
||||
PciCf8Lib|MdePkg/Library/BasePciCf8Lib/BasePciCf8Lib.inf
|
||||
SynchronizationLib|MdePkg/Library/BaseSynchronizationLib/BaseSynchronizationLib.inf
|
||||
UefiRuntimeLib|MdePkg/Library/UefiRuntimeLib/UefiRuntimeLib.inf
|
||||
HiiLib|MdeModulePkg/Library/UefiHiiLib/UefiHiiLib.inf
|
||||
UefiHiiServicesLib|MdeModulePkg/Library/UefiHiiServicesLib/UefiHiiServicesLib.inf
|
||||
PerformanceLib|MdeModulePkg/Library/DxePerformanceLib/DxePerformanceLib.inf
|
||||
HobLib|MdePkg/Library/DxeHobLib/DxeHobLib.inf
|
||||
FileHandleLib|MdePkg/Library/UefiFileHandleLib/UefiFileHandleLib.inf
|
||||
SortLib|MdeModulePkg/Library/UefiSortLib/UefiSortLib.inf
|
||||
|
||||
ShellLib|ShellPkg/Library/UefiShellLib/UefiShellLib.inf
|
||||
|
||||
CacheMaintenanceLib|MdePkg/Library/BaseCacheMaintenanceLib/BaseCacheMaintenanceLib.inf
|
||||
|
||||
###################################################################################################
|
||||
#
|
||||
# Components Section - list of the modules and components that will be processed by compilation
|
||||
# tools and the EDK II tools to generate PE32/PE32+/Coff image files.
|
||||
#
|
||||
# Note: The EDK II DSC file is not used to specify how compiled binary images get placed
|
||||
# into firmware volume images. This section is just a list of modules to compile from
|
||||
# source into UEFI-compliant binaries.
|
||||
# It is the FDF file that contains information on combining binary files into firmware
|
||||
# volume images, whose concept is beyond UEFI and is described in PI specification.
|
||||
# Binary modules do not need to be listed in this section, as they should be
|
||||
# specified in the FDF file. For example: Shell binary (Shell_Full.efi), FAT binary (Fat.efi),
|
||||
# Logo (Logo.bmp), and etc.
|
||||
# There may also be modules listed in this section that are not required in the FDF file,
|
||||
# When a module listed here is excluded from FDF file, then UEFI-compliant binary will be
|
||||
# generated for it, but the binary will not be put into any firmware volume.
|
||||
#
|
||||
###################################################################################################
|
||||
|
||||
[Components]
|
||||
|
||||
#### Sample Applications.
|
||||
AppPkg/Applications/Hello/Hello.inf # No LibC includes or functions.
|
||||
AppPkg/Applications/Main/Main.inf # Simple invocation. No other LibC functions.
|
||||
AppPkg/Applications/Enquire/Enquire.inf #
|
||||
AppPkg/Applications/ArithChk/ArithChk.inf #
|
||||
|
||||
#### A simple fuzzer for OrderedCollectionLib, in particular for
|
||||
#### BaseOrderedCollectionRedBlackTreeLib.
|
||||
AppPkg/Applications/OrderedCollectionTest/OrderedCollectionTest.inf {
|
||||
<LibraryClasses>
|
||||
OrderedCollectionLib|MdePkg/Library/BaseOrderedCollectionRedBlackTreeLib/BaseOrderedCollectionRedBlackTreeLib.inf
|
||||
DebugLib|MdePkg/Library/UefiDebugLibConOut/UefiDebugLibConOut.inf
|
||||
DebugPrintErrorLevelLib|MdePkg/Library/BaseDebugPrintErrorLevelLib/BaseDebugPrintErrorLevelLib.inf
|
||||
<PcdsFeatureFlag>
|
||||
gEfiMdePkgTokenSpaceGuid.PcdValidateOrderedCollection|TRUE
|
||||
<PcdsFixedAtBuild>
|
||||
gEfiMdePkgTokenSpaceGuid.PcdDebugPropertyMask|0x2F
|
||||
gEfiMdePkgTokenSpaceGuid.PcdDebugPrintErrorLevel|0x80400040
|
||||
}
|
||||
|
||||
#### Un-comment the following line to build Python 2.7.2.
|
||||
# AppPkg/Applications/Python/PythonCore.inf
|
||||
|
||||
#### Un-comment the following line to build Python 2.7.10.
|
||||
# AppPkg/Applications/Python/Python-2.7.10/Python2710.inf
|
||||
|
||||
#### Un-comment the following line to build Lua.
|
||||
# AppPkg/Applications/Lua/Lua.inf
|
||||
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Specify whether we are running in an emulation environment, or not.
|
||||
# Define EMULATE if we are, else keep the DEFINE commented out.
|
||||
#
|
||||
# DEFINE EMULATE = 1
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Include Boilerplate text required for building with the Standard Libraries.
|
||||
#
|
||||
##############################################################################
|
||||
!include StdLib/StdLib.inc
|
||||
!include AppPkg/Applications/Sockets/Sockets.inc
|
41
AppPkg/Applications/ArithChk/ArithChk.inf
Normal file
41
AppPkg/Applications/ArithChk/ArithChk.inf
Normal file
@ -0,0 +1,41 @@
|
||||
## @file
|
||||
# Program to generate an arith.h for use with the gdtoa binary to decimal and decimal to binary
|
||||
# conversion library.
|
||||
#
|
||||
# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
##
|
||||
|
||||
[Defines]
|
||||
INF_VERSION = 0x00010006
|
||||
BASE_NAME = ArithChk
|
||||
FILE_GUID = B6C0DCB6-434E-4BEC-BDAC-8EE7ED8A4EC8
|
||||
MODULE_TYPE = UEFI_APPLICATION
|
||||
VERSION_STRING = 0.1
|
||||
ENTRY_POINT = ShellCEntryLib
|
||||
|
||||
#
|
||||
# VALID_ARCHITECTURES = IA32 X64
|
||||
#
|
||||
|
||||
[Sources]
|
||||
arithchk.c
|
||||
|
||||
[Packages]
|
||||
StdLib/StdLib.dec
|
||||
MdePkg/MdePkg.dec
|
||||
|
||||
[LibraryClasses]
|
||||
UefiLib
|
||||
LibC
|
||||
LibStdio
|
||||
LibGdtoa
|
||||
|
||||
[BuildOptions]
|
||||
GCC:*_*_*_CC_FLAGS = -Wno-format-security
|
197
AppPkg/Applications/ArithChk/arithchk.c
Normal file
197
AppPkg/Applications/ArithChk/arithchk.c
Normal file
@ -0,0 +1,197 @@
|
||||
/** @file
|
||||
Program to generate an arith.h for use with the gdtoa binary to decimal and decimal to binary
|
||||
conversion library.
|
||||
|
||||
Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
|
||||
This program and the accompanying materials
|
||||
are licensed and made available under the terms and conditions of the BSD License
|
||||
which accompanies this distribution. The full text of the license may be found at
|
||||
http://opensource.org/licenses/bsd-license.
|
||||
|
||||
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
Copyright (C) 1997, 1998 Lucent Technologies
|
||||
All Rights Reserved
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and
|
||||
its documentation for any purpose and without fee is hereby
|
||||
granted, provided that the above copyright notice appear in all
|
||||
copies and that both that the copyright notice and this
|
||||
permission notice and warranty disclaimer appear in supporting
|
||||
documentation, and that the name of Lucent or any of its entities
|
||||
not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
LUCENT DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
|
||||
IN NO EVENT SHALL LUCENT OR ANY OF ITS ENTITIES BE LIABLE FOR ANY
|
||||
SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
|
||||
IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
|
||||
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
||||
THIS SOFTWARE.
|
||||
|
||||
NetBSD: arithchk.c,v 1.2 2006/01/25 15:27:42 kleink Exp
|
||||
****************************************************************/
|
||||
#include <sys/EfiCdefs.h>
|
||||
|
||||
/* Try to deduce arith.h from arithmetic properties. */
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
static int dalign;
|
||||
|
||||
typedef struct Akind {
|
||||
char *name;
|
||||
int kind;
|
||||
} Akind;
|
||||
|
||||
static Akind IEEE_LITTLE_ENDIAN = { "IEEE_LITTLE_ENDIAN", 1 };
|
||||
static Akind IEEE_BIG_ENDIAN = { "IEEE_BIG_ENDIAN", 2 };
|
||||
static Akind IBM = { "IBM", 3 };
|
||||
static Akind VAX = { "VAX", 4 };
|
||||
static Akind CRAY = { "CRAY", 5};
|
||||
|
||||
static Akind *
|
||||
Lcheck()
|
||||
{
|
||||
union {
|
||||
double d;
|
||||
long L[2];
|
||||
} u;
|
||||
struct {
|
||||
double d;
|
||||
long L;
|
||||
} x[2];
|
||||
|
||||
if (sizeof(x) > 2*(sizeof(double) + sizeof(long)))
|
||||
dalign = 1;
|
||||
u.L[0] = u.L[1] = 0;
|
||||
u.d = 1e13;
|
||||
if (u.L[0] == 1117925532 && u.L[1] == -448790528)
|
||||
return &IEEE_BIG_ENDIAN;
|
||||
if (u.L[1] == 1117925532 && u.L[0] == -448790528)
|
||||
return &IEEE_LITTLE_ENDIAN;
|
||||
if (u.L[0] == -2065213935 && u.L[1] == 10752)
|
||||
return &VAX;
|
||||
if (u.L[0] == 1267827943 && u.L[1] == 704643072)
|
||||
return &IBM;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static Akind *
|
||||
icheck()
|
||||
{
|
||||
union {
|
||||
double d;
|
||||
int L[2];
|
||||
} u;
|
||||
struct {
|
||||
double d;
|
||||
int L;
|
||||
} x[2];
|
||||
|
||||
if (sizeof(x) > 2*(sizeof(double) + sizeof(int)))
|
||||
dalign = 1;
|
||||
u.L[0] = u.L[1] = 0;
|
||||
u.d = 1e13;
|
||||
if (u.L[0] == 1117925532 && u.L[1] == -448790528)
|
||||
return &IEEE_BIG_ENDIAN;
|
||||
if (u.L[1] == 1117925532 && u.L[0] == -448790528)
|
||||
return &IEEE_LITTLE_ENDIAN;
|
||||
if (u.L[0] == -2065213935 && u.L[1] == 10752)
|
||||
return &VAX;
|
||||
if (u.L[0] == 1267827943 && u.L[1] == 704643072)
|
||||
return &IBM;
|
||||
return 0;
|
||||
}
|
||||
|
||||
char *emptyfmt = ""; /* avoid possible warning message with printf("") */
|
||||
|
||||
static Akind *
|
||||
ccheck()
|
||||
{
|
||||
union {
|
||||
double d;
|
||||
long L;
|
||||
} u;
|
||||
long Cray1;
|
||||
|
||||
/* Cray1 = 4617762693716115456 -- without overflow on non-Crays */
|
||||
Cray1 = printf(emptyfmt) < 0 ? 0 : 4617762;
|
||||
if (printf(emptyfmt, Cray1) >= 0)
|
||||
Cray1 = 1000000*Cray1 + 693716;
|
||||
if (printf(emptyfmt, Cray1) >= 0)
|
||||
Cray1 = 1000000*Cray1 + 115456;
|
||||
u.d = 1e13;
|
||||
if (u.L == Cray1)
|
||||
return &CRAY;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
fzcheck()
|
||||
{
|
||||
double a, b;
|
||||
int i;
|
||||
|
||||
a = 1.;
|
||||
b = .1;
|
||||
for(i = 155;; b *= b, i >>= 1) {
|
||||
if (i & 1) {
|
||||
a *= b;
|
||||
if (i == 1)
|
||||
break;
|
||||
}
|
||||
}
|
||||
b = a * a;
|
||||
return b == 0.;
|
||||
}
|
||||
|
||||
int
|
||||
main()
|
||||
{
|
||||
Akind *a = 0;
|
||||
int Ldef = 0;
|
||||
FILE *f;
|
||||
|
||||
#ifdef WRITE_ARITH_H /* for Symantec's buggy "make" */
|
||||
f = fopen("arith.h", "w");
|
||||
if (!f) {
|
||||
printf("Cannot open arith.h\n");
|
||||
return 1;
|
||||
}
|
||||
#else
|
||||
f = stdout;
|
||||
#endif
|
||||
|
||||
if (sizeof(double) == 2*sizeof(long))
|
||||
a = Lcheck();
|
||||
else if (sizeof(double) == 2*sizeof(int)) {
|
||||
Ldef = 1;
|
||||
a = icheck();
|
||||
}
|
||||
else if (sizeof(double) == sizeof(long))
|
||||
a = ccheck();
|
||||
if (a) {
|
||||
fprintf(f, "#define %s\n#define Arith_Kind_ASL %d\n",
|
||||
a->name, a->kind);
|
||||
if (Ldef)
|
||||
fprintf(f, "#define Long int\n#define Intcast (int)(long)\n");
|
||||
if (dalign)
|
||||
fprintf(f, "#define Double_Align\n");
|
||||
if (sizeof(char*) == 8)
|
||||
fprintf(f, "#define X64_bit_pointers\n");
|
||||
#ifndef NO_LONG_LONG
|
||||
if (sizeof(long long) < 8)
|
||||
#endif
|
||||
fprintf(f, "#define NO_LONG_LONG\n");
|
||||
if (a->kind <= 2 && fzcheck())
|
||||
fprintf(f, "#define Sudden_Underflow\n");
|
||||
return 0;
|
||||
}
|
||||
fprintf(f, "/* Unknown arithmetic */\n");
|
||||
return 1;
|
||||
}
|
3377
AppPkg/Applications/Enquire/Enquire.c
Normal file
3377
AppPkg/Applications/Enquire/Enquire.c
Normal file
File diff suppressed because it is too large
Load Diff
53
AppPkg/Applications/Enquire/Enquire.inf
Normal file
53
AppPkg/Applications/Enquire/Enquire.inf
Normal file
@ -0,0 +1,53 @@
|
||||
## @file
|
||||
# Enquire application for system integer and floating point characteristics
|
||||
# enquiry.
|
||||
#
|
||||
# Due to the level of hardware introspection, this application MUST be built
|
||||
# with optimizations disabled.
|
||||
#
|
||||
# COPYRIGHT(c) 1993-9 Steven Pemberton, CWI. All rights reserved.
|
||||
# NOTE: Improvements gratefully received. Please mention the version.
|
||||
# "http://www.cwi.nl/~steven/enquire.html"
|
||||
#
|
||||
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
##
|
||||
|
||||
[Defines]
|
||||
INF_VERSION = 0x00010006
|
||||
BASE_NAME = Enquire
|
||||
FILE_GUID = 42f58b27-5dc3-4fa7-844d-5a7dbff06432
|
||||
MODULE_TYPE = UEFI_APPLICATION
|
||||
VERSION_STRING = 0.1
|
||||
ENTRY_POINT = ShellCEntryLib
|
||||
|
||||
#
|
||||
# VALID_ARCHITECTURES = IA32 X64
|
||||
#
|
||||
|
||||
[Sources]
|
||||
Enquire.c
|
||||
|
||||
[Packages]
|
||||
StdLib/StdLib.dec
|
||||
MdePkg/MdePkg.dec
|
||||
|
||||
[LibraryClasses]
|
||||
UefiLib
|
||||
LibC
|
||||
LibString
|
||||
LibStdio
|
||||
LibGdtoa
|
||||
LibWchar
|
||||
|
||||
[BuildOptions]
|
||||
INTEL:*_*_*_CC_FLAGS = /Qdiag-disable:181,186
|
||||
MSFT:*_*_*_CC_FLAGS = /Od
|
||||
GCC:*_*_*_CC_FLAGS = -O0 -Wno-unused-variable
|
37
AppPkg/Applications/Hello/Hello.c
Normal file
37
AppPkg/Applications/Hello/Hello.c
Normal file
@ -0,0 +1,37 @@
|
||||
/** @file
|
||||
A simple, basic, EDK II native, "hello" application to verify that
|
||||
we can build applications without LibC.
|
||||
|
||||
Copyright (c) 2010 - 2011, Intel Corporation. All rights reserved.<BR>
|
||||
This program and the accompanying materials
|
||||
are licensed and made available under the terms and conditions of the BSD License
|
||||
which accompanies this distribution. The full text of the license may be found at
|
||||
http://opensource.org/licenses/bsd-license.
|
||||
|
||||
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
**/
|
||||
#include <Uefi.h>
|
||||
#include <Library/UefiLib.h>
|
||||
#include <Library/ShellCEntryLib.h>
|
||||
|
||||
/***
|
||||
Print a welcoming message.
|
||||
|
||||
Establishes the main structure of the application.
|
||||
|
||||
@retval 0 The application exited normally.
|
||||
@retval Other An error occurred.
|
||||
***/
|
||||
INTN
|
||||
EFIAPI
|
||||
ShellAppMain (
|
||||
IN UINTN Argc,
|
||||
IN CHAR16 **Argv
|
||||
)
|
||||
{
|
||||
Print(L"Hello there fellow Programmer.\n");
|
||||
Print(L"Welcome to the world of EDK II.\n");
|
||||
|
||||
return(0);
|
||||
}
|
36
AppPkg/Applications/Hello/Hello.inf
Normal file
36
AppPkg/Applications/Hello/Hello.inf
Normal file
@ -0,0 +1,36 @@
|
||||
## @file
|
||||
# A simple, basic, EDK II native, "hello" application.
|
||||
#
|
||||
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||
# This program and the accompanying materials
|
||||
# are licensed and made available under the terms and conditions of the BSD License
|
||||
# which accompanies this distribution. The full text of the license may be found at
|
||||
# http://opensource.org/licenses/bsd-license.
|
||||
#
|
||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
#
|
||||
##
|
||||
|
||||
[Defines]
|
||||
INF_VERSION = 0x00010006
|
||||
BASE_NAME = Hello
|
||||
FILE_GUID = a912f198-7f0e-4803-b908-b757b806ec83
|
||||
MODULE_TYPE = UEFI_APPLICATION
|
||||
VERSION_STRING = 0.1
|
||||
ENTRY_POINT = ShellCEntryLib
|
||||
|
||||
#
|
||||
# VALID_ARCHITECTURES = IA32 X64
|
||||
#
|
||||
|
||||
[Sources]
|
||||
Hello.c
|
||||
|
||||
[Packages]
|
||||
MdePkg/MdePkg.dec
|
||||
ShellPkg/ShellPkg.dec
|
||||
|
||||
[LibraryClasses]
|
||||
UefiLib
|
||||
ShellCEntryLib
|
69
AppPkg/Applications/Lua/Copyright.txt
Normal file
69
AppPkg/Applications/Lua/Copyright.txt
Normal file
@ -0,0 +1,69 @@
|
||||
Lua is designed, implemented, and maintained by a team at PUC-Rio, the
|
||||
Pontifical Catholic University of Rio de Janeiro in Brazil. Lua was born and
|
||||
raised in Tecgraf, formerly the Computer Graphics Technology Group of PUC-Rio.
|
||||
Lua is now housed at LabLua, a laboratory of the Department of Computer Science
|
||||
of PUC-Rio.
|
||||
|
||||
The initial work to provide the UEFI implementation of Lua was done at
|
||||
Emulex Corporation.
|
||||
|
||||
Some final packaging and build file "beautification" was done at
|
||||
Intel Corporation.
|
||||
|
||||
Copyright notices, applying to this package and its contents, follow below.
|
||||
These notices will be updated as necessitated by ongoing maintenance and
|
||||
enhancement.
|
||||
|
||||
--------
|
||||
Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
|
||||
This program and the accompanying materials are licensed and made available under
|
||||
the terms and conditions of the BSD License that accompanies this distribution.
|
||||
The full text of the license may be found at
|
||||
http://opensource.org/licenses/bsd-license.
|
||||
|
||||
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
|
||||
--------
|
||||
Copyright (c) 2014, Emulex Corporation 3333 Susan Street, Costa Mesa, CA 92626
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
--------
|
||||
Copyright (C) 1994-2013 Lua.org, PUC-Rio.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
------------------------------------------------------------------------------
|
54
AppPkg/Applications/Lua/Lua.inf
Normal file
54
AppPkg/Applications/Lua/Lua.inf
Normal file
@ -0,0 +1,54 @@
|
||||
## @file
|
||||
# Lua.inf
|
||||
#
|
||||
# UEFI port of the Lua scripting language, Lua 5.2.3, released on 11 Nov 2013
|
||||
#
|
||||
# Copyright (c) 2014, Emulex Corporation 3333 Susan Street, Costa Mesa, CA 92626
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
##
|
||||
|
||||
[Defines]
|
||||
INF_VERSION = 0x00010006
|
||||
BASE_NAME = Lua
|
||||
FILE_GUID = d6a9a1b9-4bfd-d61e-f037-3fa4ca06e046
|
||||
MODULE_TYPE = UEFI_APPLICATION
|
||||
VERSION_STRING = 0.1
|
||||
ENTRY_POINT = ShellCEntryLib
|
||||
|
||||
[Sources]
|
||||
./src/lua.c
|
||||
|
||||
[Packages]
|
||||
StdLib/StdLib.dec
|
||||
MdePkg/MdePkg.dec
|
||||
ShellPkg/ShellPkg.dec
|
||||
|
||||
[LibraryClasses]
|
||||
LibC
|
||||
LibStdio
|
||||
LibStdLib
|
||||
LibSignal
|
||||
LibString
|
||||
LibMath
|
||||
LibTime
|
||||
DevShell
|
||||
UefiLib
|
||||
ShellCEntryLib
|
||||
LuaLib
|
72
AppPkg/Applications/Lua/LuaLib.inf
Normal file
72
AppPkg/Applications/Lua/LuaLib.inf
Normal file
@ -0,0 +1,72 @@
|
||||
## @file
|
||||
# LuaLib.inf
|
||||
#
|
||||
# Copyright (c) 2014, Emulex Corporation 3333 Susan Street, Costa Mesa, CA 92626
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
##
|
||||
|
||||
[Defines]
|
||||
INF_VERSION = 0x00010006
|
||||
BASE_NAME = LuaLib
|
||||
FILE_GUID = 4e38555c-4f92-20c5-a563-fe907585662d
|
||||
MODULE_TYPE = UEFI_APPLICATION
|
||||
VERSION_STRING = 0.1
|
||||
LIBRARY_CLASS = LuaLib
|
||||
|
||||
[Sources]
|
||||
src/lapi.c
|
||||
src/lauxlib.c
|
||||
src/lbaselib.c
|
||||
src/lbitlib.c
|
||||
src/lcode.c
|
||||
src/lcorolib.c
|
||||
src/lctype.c
|
||||
src/ldblib.c
|
||||
src/ldebug.c
|
||||
src/ldo.c
|
||||
src/ldump.c
|
||||
src/lfunc.c
|
||||
src/lgc.c
|
||||
src/linit.c
|
||||
src/liolib.c
|
||||
src/llex.c
|
||||
src/lmathlib.c
|
||||
src/lmem.c
|
||||
src/loadlib.c
|
||||
src/lobject.c
|
||||
src/lopcodes.c
|
||||
src/loslib.c
|
||||
src/lparser.c
|
||||
src/lstate.c
|
||||
src/lstring.c
|
||||
src/lstrlib.c
|
||||
src/ltable.c
|
||||
src/ltablib.c
|
||||
src/ltm.c
|
||||
src/lundump.c
|
||||
src/lvm.c
|
||||
src/lzio.c
|
||||
|
||||
[Packages]
|
||||
StdLib/StdLib.dec
|
||||
MdePkg/MdePkg.dec
|
||||
|
||||
[BuildOptions]
|
||||
MSFT:*_*_*_CC_FLAGS = /Oi- /wd4702
|
35
AppPkg/Applications/Lua/ReadMe.txt
Normal file
35
AppPkg/Applications/Lua/ReadMe.txt
Normal file
@ -0,0 +1,35 @@
|
||||
This is Lua 5.2.3, released on 11 Nov 2013.
|
||||
|
||||
For installation instructions, license details, and
|
||||
further information about Lua, see doc/readme.html.
|
||||
=================================================
|
||||
|
||||
Embedding Lua
|
||||
-------------
|
||||
The Lua library instance, LuaLib, is defined by StdLib.inc. Since, currently, all applications which
|
||||
embed Lua are also StdLib applications, StdLib.inc will be included by your package's .DSC file.
|
||||
|
||||
The header files required to use LuaLib are in the standard include path at StdLib\Include\Lua.
|
||||
They may be referenced as:
|
||||
#include <Lua/lua.h>
|
||||
#include <Lua/lualib.h>
|
||||
#include <Lua/lauxlib>
|
||||
#include <Lua/luaconf.h>
|
||||
|
||||
Lua/luaconf.h is the Lua configuration file. If you wish to build Lua with custom characteristics,
|
||||
this is the file to modify. Modify the file in StdLib\Include\Lua since the file in the Lua
|
||||
source tree is just a stub which references the file in StdLib.
|
||||
|
||||
|
||||
Installation on UEFI
|
||||
--------------------
|
||||
Install the Lua.efi file into \Efi\Tools. This is the standalone Lua interpreter.
|
||||
Create a directory, \Efi\StdLib\lib\Lua. This is the default location for Lua scripts.
|
||||
|
||||
If desired, copy the files from AppPkg\Applications\Lua\scripts, in the source tree, into
|
||||
\Efi\StdLib\lib\Lua.
|
||||
|
||||
Bugs and Other Issues
|
||||
---------------------
|
||||
EOF characters, ^D or ^Z, are not properly recognized by the console and can't be used to
|
||||
terminate an application. Use os.exit() to exit Lua.
|
533
AppPkg/Applications/Lua/doc/contents.html
Normal file
533
AppPkg/Applications/Lua/doc/contents.html
Normal file
@ -0,0 +1,533 @@
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<HTML>
|
||||
<HEAD>
|
||||
<TITLE>Lua 5.2 Reference Manual - contents</TITLE>
|
||||
<LINK REL="stylesheet" TYPE="text/css" HREF="lua.css">
|
||||
<META HTTP-EQUIV="content-type" CONTENT="text/html; charset=iso-8859-1">
|
||||
<STYLE TYPE="text/css">
|
||||
ul {
|
||||
list-style-type: none ;
|
||||
list-style-position: outside ;
|
||||
}
|
||||
</STYLE>
|
||||
</HEAD>
|
||||
|
||||
<BODY>
|
||||
|
||||
<HR>
|
||||
<H1>
|
||||
<A HREF="http://www.lua.org/"><IMG SRC="logo.gif" ALT="" BORDER=0></A>
|
||||
Lua 5.2 Reference Manual
|
||||
</H1>
|
||||
|
||||
<P>
|
||||
The reference manual is the official definition of the Lua language.
|
||||
For a complete introduction to Lua programming, see the book
|
||||
<A HREF="http://www.lua.org/pil/">Programming in Lua</A>.
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html">start</A>
|
||||
·
|
||||
<A HREF="#contents">contents</A>
|
||||
·
|
||||
<A HREF="#index">index</A>
|
||||
<HR>
|
||||
<SMALL>
|
||||
Copyright © 2011–2013 Lua.org, PUC-Rio.
|
||||
Freely available under the terms of the
|
||||
<A HREF="http://www.lua.org/license.html">Lua license</A>.
|
||||
</SMALL>
|
||||
|
||||
<H2><A NAME="contents">Contents</A></H2>
|
||||
<UL style="padding: 0">
|
||||
<LI><A HREF="manual.html">1 – Introduction</A>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#2">2 – Basic Concepts</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#2.1">2.1 – Values and Types</A>
|
||||
<LI><A HREF="manual.html#2.2">2.2 – Environments and the Global Environment</A>
|
||||
<LI><A HREF="manual.html#2.3">2.3 – Error Handling</A>
|
||||
<LI><A HREF="manual.html#2.4">2.4 – Metatables and Metamethods</A>
|
||||
<LI><A HREF="manual.html#2.5">2.5 – Garbage Collection</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#2.5.1">2.5.1 – Garbage-Collection Metamethods</A>
|
||||
<LI><A HREF="manual.html#2.5.2">2.5.2 – Weak Tables</A>
|
||||
</UL>
|
||||
<LI><A HREF="manual.html#2.6">2.6 – Coroutines</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#3">3 – The Language</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#3.1">3.1 – Lexical Conventions</A>
|
||||
<LI><A HREF="manual.html#3.2">3.2 – Variables</A>
|
||||
<LI><A HREF="manual.html#3.3">3.3 – Statements</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#3.3.1">3.3.1 – Blocks</A>
|
||||
<LI><A HREF="manual.html#3.3.2">3.3.2 – Chunks</A>
|
||||
<LI><A HREF="manual.html#3.3.3">3.3.3 – Assignment</A>
|
||||
<LI><A HREF="manual.html#3.3.4">3.3.4 – Control Structures</A>
|
||||
<LI><A HREF="manual.html#3.3.5">3.3.5 – For Statement</A>
|
||||
<LI><A HREF="manual.html#3.3.6">3.3.6 – Function Calls as Statements</A>
|
||||
<LI><A HREF="manual.html#3.3.7">3.3.7 – Local Declarations</A>
|
||||
</UL>
|
||||
<LI><A HREF="manual.html#3.4">3.4 – Expressions</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#3.4.1">3.4.1 – Arithmetic Operators</A>
|
||||
<LI><A HREF="manual.html#3.4.2">3.4.2 – Coercion</A>
|
||||
<LI><A HREF="manual.html#3.4.3">3.4.3 – Relational Operators</A>
|
||||
<LI><A HREF="manual.html#3.4.4">3.4.4 – Logical Operators</A>
|
||||
<LI><A HREF="manual.html#3.4.5">3.4.5 – Concatenation</A>
|
||||
<LI><A HREF="manual.html#3.4.6">3.4.6 – The Length Operator</A>
|
||||
<LI><A HREF="manual.html#3.4.7">3.4.7 – Precedence</A>
|
||||
<LI><A HREF="manual.html#3.4.8">3.4.8 – Table Constructors</A>
|
||||
<LI><A HREF="manual.html#3.4.9">3.4.9 – Function Calls</A>
|
||||
<LI><A HREF="manual.html#3.4.10">3.4.10 – Function Definitions</A>
|
||||
</UL>
|
||||
<LI><A HREF="manual.html#3.5">3.5 – Visibility Rules</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#4">4 – The Application Program Interface</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#4.1">4.1 – The Stack</A>
|
||||
<LI><A HREF="manual.html#4.2">4.2 – Stack Size</A>
|
||||
<LI><A HREF="manual.html#4.3">4.3 – Valid and Acceptable Indices</A>
|
||||
<LI><A HREF="manual.html#4.4">4.4 – C Closures</A>
|
||||
<LI><A HREF="manual.html#4.5">4.5 – Registry</A>
|
||||
<LI><A HREF="manual.html#4.6">4.6 – Error Handling in C</A>
|
||||
<LI><A HREF="manual.html#4.7">4.7 – Handling Yields in C</A>
|
||||
<LI><A HREF="manual.html#4.8">4.8 – Functions and Types</A>
|
||||
<LI><A HREF="manual.html#4.9">4.9 – The Debug Interface</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#5">5 – The Auxiliary Library</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#5.1">5.1 – Functions and Types</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#6">6 – Standard Libraries</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#6.1">6.1 – Basic Functions</A>
|
||||
<LI><A HREF="manual.html#6.2">6.2 – Coroutine Manipulation</A>
|
||||
<LI><A HREF="manual.html#6.3">6.3 – Modules</A>
|
||||
<LI><A HREF="manual.html#6.4">6.4 – String Manipulation</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#6.4.1">6.4.1 – Patterns</A>
|
||||
</UL>
|
||||
<LI><A HREF="manual.html#6.5">6.5 – Table Manipulation</A>
|
||||
<LI><A HREF="manual.html#6.6">6.6 – Mathematical Functions</A>
|
||||
<LI><A HREF="manual.html#6.7">6.7 – Bitwise Operations</A>
|
||||
<LI><A HREF="manual.html#6.8">6.8 – Input and Output Facilities</A>
|
||||
<LI><A HREF="manual.html#6.9">6.9 – Operating System Facilities</A>
|
||||
<LI><A HREF="manual.html#6.10">6.10 – The Debug Library</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#7">7 – Lua Standalone</A>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#8">8 – Incompatibilities with the Previous Version</A>
|
||||
<UL>
|
||||
<LI><A HREF="manual.html#8.1">8.1 – Changes in the Language</A>
|
||||
<LI><A HREF="manual.html#8.2">8.2 – Changes in the Libraries</A>
|
||||
<LI><A HREF="manual.html#8.3">8.3 – Changes in the API</A>
|
||||
</UL>
|
||||
<P>
|
||||
<LI><A HREF="manual.html#9">9 – The Complete Syntax of Lua</A>
|
||||
</UL>
|
||||
|
||||
<H2><A NAME="index">Index</A></H2>
|
||||
<TABLE WIDTH="100%">
|
||||
<TR VALIGN="top">
|
||||
<TD>
|
||||
<H3><A NAME="functions">Lua functions</A></H3>
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-_G">_G</A><BR>
|
||||
<A HREF="manual.html#pdf-_VERSION">_VERSION</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-assert">assert</A><BR>
|
||||
<A HREF="manual.html#pdf-collectgarbage">collectgarbage</A><BR>
|
||||
<A HREF="manual.html#pdf-dofile">dofile</A><BR>
|
||||
<A HREF="manual.html#pdf-error">error</A><BR>
|
||||
<A HREF="manual.html#pdf-getmetatable">getmetatable</A><BR>
|
||||
<A HREF="manual.html#pdf-ipairs">ipairs</A><BR>
|
||||
<A HREF="manual.html#pdf-load">load</A><BR>
|
||||
<A HREF="manual.html#pdf-loadfile">loadfile</A><BR>
|
||||
<A HREF="manual.html#pdf-next">next</A><BR>
|
||||
<A HREF="manual.html#pdf-pairs">pairs</A><BR>
|
||||
<A HREF="manual.html#pdf-pcall">pcall</A><BR>
|
||||
<A HREF="manual.html#pdf-print">print</A><BR>
|
||||
<A HREF="manual.html#pdf-rawequal">rawequal</A><BR>
|
||||
<A HREF="manual.html#pdf-rawget">rawget</A><BR>
|
||||
<A HREF="manual.html#pdf-rawlen">rawlen</A><BR>
|
||||
<A HREF="manual.html#pdf-rawset">rawset</A><BR>
|
||||
<A HREF="manual.html#pdf-require">require</A><BR>
|
||||
<A HREF="manual.html#pdf-select">select</A><BR>
|
||||
<A HREF="manual.html#pdf-setmetatable">setmetatable</A><BR>
|
||||
<A HREF="manual.html#pdf-tonumber">tonumber</A><BR>
|
||||
<A HREF="manual.html#pdf-tostring">tostring</A><BR>
|
||||
<A HREF="manual.html#pdf-type">type</A><BR>
|
||||
<A HREF="manual.html#pdf-xpcall">xpcall</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-bit32.arshift">bit32.arshift</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.band">bit32.band</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.bnot">bit32.bnot</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.bor">bit32.bor</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.btest">bit32.btest</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.bxor">bit32.bxor</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.extract">bit32.extract</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.lrotate">bit32.lrotate</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.lshift">bit32.lshift</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.replace">bit32.replace</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.rrotate">bit32.rrotate</A><BR>
|
||||
<A HREF="manual.html#pdf-bit32.rshift">bit32.rshift</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-coroutine.create">coroutine.create</A><BR>
|
||||
<A HREF="manual.html#pdf-coroutine.resume">coroutine.resume</A><BR>
|
||||
<A HREF="manual.html#pdf-coroutine.running">coroutine.running</A><BR>
|
||||
<A HREF="manual.html#pdf-coroutine.status">coroutine.status</A><BR>
|
||||
<A HREF="manual.html#pdf-coroutine.wrap">coroutine.wrap</A><BR>
|
||||
<A HREF="manual.html#pdf-coroutine.yield">coroutine.yield</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-debug.debug">debug.debug</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getuservalue">debug.getuservalue</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.gethook">debug.gethook</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getinfo">debug.getinfo</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getlocal">debug.getlocal</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getmetatable">debug.getmetatable</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getregistry">debug.getregistry</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.getupvalue">debug.getupvalue</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.setuservalue">debug.setuservalue</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.sethook">debug.sethook</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.setlocal">debug.setlocal</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.setmetatable">debug.setmetatable</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.setupvalue">debug.setupvalue</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.traceback">debug.traceback</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.upvalueid">debug.upvalueid</A><BR>
|
||||
<A HREF="manual.html#pdf-debug.upvaluejoin">debug.upvaluejoin</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-file:close">file:close</A><BR>
|
||||
<A HREF="manual.html#pdf-file:flush">file:flush</A><BR>
|
||||
<A HREF="manual.html#pdf-file:lines">file:lines</A><BR>
|
||||
<A HREF="manual.html#pdf-file:read">file:read</A><BR>
|
||||
<A HREF="manual.html#pdf-file:seek">file:seek</A><BR>
|
||||
<A HREF="manual.html#pdf-file:setvbuf">file:setvbuf</A><BR>
|
||||
<A HREF="manual.html#pdf-file:write">file:write</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-io.close">io.close</A><BR>
|
||||
<A HREF="manual.html#pdf-io.flush">io.flush</A><BR>
|
||||
<A HREF="manual.html#pdf-io.input">io.input</A><BR>
|
||||
<A HREF="manual.html#pdf-io.lines">io.lines</A><BR>
|
||||
<A HREF="manual.html#pdf-io.open">io.open</A><BR>
|
||||
<A HREF="manual.html#pdf-io.output">io.output</A><BR>
|
||||
<A HREF="manual.html#pdf-io.popen">io.popen</A><BR>
|
||||
<A HREF="manual.html#pdf-io.read">io.read</A><BR>
|
||||
<A HREF="manual.html#pdf-io.stderr">io.stderr</A><BR>
|
||||
<A HREF="manual.html#pdf-io.stdin">io.stdin</A><BR>
|
||||
<A HREF="manual.html#pdf-io.stdout">io.stdout</A><BR>
|
||||
<A HREF="manual.html#pdf-io.tmpfile">io.tmpfile</A><BR>
|
||||
<A HREF="manual.html#pdf-io.type">io.type</A><BR>
|
||||
<A HREF="manual.html#pdf-io.write">io.write</A><BR>
|
||||
|
||||
</TD>
|
||||
<TD>
|
||||
<H3> </H3>
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-math.abs">math.abs</A><BR>
|
||||
<A HREF="manual.html#pdf-math.acos">math.acos</A><BR>
|
||||
<A HREF="manual.html#pdf-math.asin">math.asin</A><BR>
|
||||
<A HREF="manual.html#pdf-math.atan">math.atan</A><BR>
|
||||
<A HREF="manual.html#pdf-math.atan2">math.atan2</A><BR>
|
||||
<A HREF="manual.html#pdf-math.ceil">math.ceil</A><BR>
|
||||
<A HREF="manual.html#pdf-math.cos">math.cos</A><BR>
|
||||
<A HREF="manual.html#pdf-math.cosh">math.cosh</A><BR>
|
||||
<A HREF="manual.html#pdf-math.deg">math.deg</A><BR>
|
||||
<A HREF="manual.html#pdf-math.exp">math.exp</A><BR>
|
||||
<A HREF="manual.html#pdf-math.floor">math.floor</A><BR>
|
||||
<A HREF="manual.html#pdf-math.fmod">math.fmod</A><BR>
|
||||
<A HREF="manual.html#pdf-math.frexp">math.frexp</A><BR>
|
||||
<A HREF="manual.html#pdf-math.huge">math.huge</A><BR>
|
||||
<A HREF="manual.html#pdf-math.ldexp">math.ldexp</A><BR>
|
||||
<A HREF="manual.html#pdf-math.log">math.log</A><BR>
|
||||
<A HREF="manual.html#pdf-math.max">math.max</A><BR>
|
||||
<A HREF="manual.html#pdf-math.min">math.min</A><BR>
|
||||
<A HREF="manual.html#pdf-math.modf">math.modf</A><BR>
|
||||
<A HREF="manual.html#pdf-math.pi">math.pi</A><BR>
|
||||
<A HREF="manual.html#pdf-math.pow">math.pow</A><BR>
|
||||
<A HREF="manual.html#pdf-math.rad">math.rad</A><BR>
|
||||
<A HREF="manual.html#pdf-math.random">math.random</A><BR>
|
||||
<A HREF="manual.html#pdf-math.randomseed">math.randomseed</A><BR>
|
||||
<A HREF="manual.html#pdf-math.sin">math.sin</A><BR>
|
||||
<A HREF="manual.html#pdf-math.sinh">math.sinh</A><BR>
|
||||
<A HREF="manual.html#pdf-math.sqrt">math.sqrt</A><BR>
|
||||
<A HREF="manual.html#pdf-math.tan">math.tan</A><BR>
|
||||
<A HREF="manual.html#pdf-math.tanh">math.tanh</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-os.clock">os.clock</A><BR>
|
||||
<A HREF="manual.html#pdf-os.date">os.date</A><BR>
|
||||
<A HREF="manual.html#pdf-os.difftime">os.difftime</A><BR>
|
||||
<A HREF="manual.html#pdf-os.execute">os.execute</A><BR>
|
||||
<A HREF="manual.html#pdf-os.exit">os.exit</A><BR>
|
||||
<A HREF="manual.html#pdf-os.getenv">os.getenv</A><BR>
|
||||
<A HREF="manual.html#pdf-os.remove">os.remove</A><BR>
|
||||
<A HREF="manual.html#pdf-os.rename">os.rename</A><BR>
|
||||
<A HREF="manual.html#pdf-os.setlocale">os.setlocale</A><BR>
|
||||
<A HREF="manual.html#pdf-os.time">os.time</A><BR>
|
||||
<A HREF="manual.html#pdf-os.tmpname">os.tmpname</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-package.config">package.config</A><BR>
|
||||
<A HREF="manual.html#pdf-package.cpath">package.cpath</A><BR>
|
||||
<A HREF="manual.html#pdf-package.loaded">package.loaded</A><BR>
|
||||
<A HREF="manual.html#pdf-package.loadlib">package.loadlib</A><BR>
|
||||
<A HREF="manual.html#pdf-package.path">package.path</A><BR>
|
||||
<A HREF="manual.html#pdf-package.preload">package.preload</A><BR>
|
||||
<A HREF="manual.html#pdf-package.searchers">package.searchers</A><BR>
|
||||
<A HREF="manual.html#pdf-package.searchpath">package.searchpath</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-string.byte">string.byte</A><BR>
|
||||
<A HREF="manual.html#pdf-string.char">string.char</A><BR>
|
||||
<A HREF="manual.html#pdf-string.dump">string.dump</A><BR>
|
||||
<A HREF="manual.html#pdf-string.find">string.find</A><BR>
|
||||
<A HREF="manual.html#pdf-string.format">string.format</A><BR>
|
||||
<A HREF="manual.html#pdf-string.gmatch">string.gmatch</A><BR>
|
||||
<A HREF="manual.html#pdf-string.gsub">string.gsub</A><BR>
|
||||
<A HREF="manual.html#pdf-string.len">string.len</A><BR>
|
||||
<A HREF="manual.html#pdf-string.lower">string.lower</A><BR>
|
||||
<A HREF="manual.html#pdf-string.match">string.match</A><BR>
|
||||
<A HREF="manual.html#pdf-string.rep">string.rep</A><BR>
|
||||
<A HREF="manual.html#pdf-string.reverse">string.reverse</A><BR>
|
||||
<A HREF="manual.html#pdf-string.sub">string.sub</A><BR>
|
||||
<A HREF="manual.html#pdf-string.upper">string.upper</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#pdf-table.concat">table.concat</A><BR>
|
||||
<A HREF="manual.html#pdf-table.insert">table.insert</A><BR>
|
||||
<A HREF="manual.html#pdf-table.pack">table.pack</A><BR>
|
||||
<A HREF="manual.html#pdf-table.remove">table.remove</A><BR>
|
||||
<A HREF="manual.html#pdf-table.sort">table.sort</A><BR>
|
||||
<A HREF="manual.html#pdf-table.unpack">table.unpack</A><BR>
|
||||
|
||||
</TD>
|
||||
<TD>
|
||||
<H3>C API</H3>
|
||||
<P>
|
||||
<A HREF="manual.html#lua_Alloc">lua_Alloc</A><BR>
|
||||
<A HREF="manual.html#lua_CFunction">lua_CFunction</A><BR>
|
||||
<A HREF="manual.html#lua_Debug">lua_Debug</A><BR>
|
||||
<A HREF="manual.html#lua_Hook">lua_Hook</A><BR>
|
||||
<A HREF="manual.html#lua_Integer">lua_Integer</A><BR>
|
||||
<A HREF="manual.html#lua_Number">lua_Number</A><BR>
|
||||
<A HREF="manual.html#lua_Reader">lua_Reader</A><BR>
|
||||
<A HREF="manual.html#lua_State">lua_State</A><BR>
|
||||
<A HREF="manual.html#lua_Unsigned">lua_Unsigned</A><BR>
|
||||
<A HREF="manual.html#lua_Writer">lua_Writer</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#lua_absindex">lua_absindex</A><BR>
|
||||
<A HREF="manual.html#lua_arith">lua_arith</A><BR>
|
||||
<A HREF="manual.html#lua_atpanic">lua_atpanic</A><BR>
|
||||
<A HREF="manual.html#lua_call">lua_call</A><BR>
|
||||
<A HREF="manual.html#lua_callk">lua_callk</A><BR>
|
||||
<A HREF="manual.html#lua_checkstack">lua_checkstack</A><BR>
|
||||
<A HREF="manual.html#lua_close">lua_close</A><BR>
|
||||
<A HREF="manual.html#lua_compare">lua_compare</A><BR>
|
||||
<A HREF="manual.html#lua_concat">lua_concat</A><BR>
|
||||
<A HREF="manual.html#lua_copy">lua_copy</A><BR>
|
||||
<A HREF="manual.html#lua_createtable">lua_createtable</A><BR>
|
||||
<A HREF="manual.html#lua_dump">lua_dump</A><BR>
|
||||
<A HREF="manual.html#lua_error">lua_error</A><BR>
|
||||
<A HREF="manual.html#lua_gc">lua_gc</A><BR>
|
||||
<A HREF="manual.html#lua_getallocf">lua_getallocf</A><BR>
|
||||
<A HREF="manual.html#lua_getctx">lua_getctx</A><BR>
|
||||
<A HREF="manual.html#lua_getfield">lua_getfield</A><BR>
|
||||
<A HREF="manual.html#lua_getglobal">lua_getglobal</A><BR>
|
||||
<A HREF="manual.html#lua_gethook">lua_gethook</A><BR>
|
||||
<A HREF="manual.html#lua_gethookcount">lua_gethookcount</A><BR>
|
||||
<A HREF="manual.html#lua_gethookmask">lua_gethookmask</A><BR>
|
||||
<A HREF="manual.html#lua_getinfo">lua_getinfo</A><BR>
|
||||
<A HREF="manual.html#lua_getlocal">lua_getlocal</A><BR>
|
||||
<A HREF="manual.html#lua_getmetatable">lua_getmetatable</A><BR>
|
||||
<A HREF="manual.html#lua_getstack">lua_getstack</A><BR>
|
||||
<A HREF="manual.html#lua_gettable">lua_gettable</A><BR>
|
||||
<A HREF="manual.html#lua_gettop">lua_gettop</A><BR>
|
||||
<A HREF="manual.html#lua_getupvalue">lua_getupvalue</A><BR>
|
||||
<A HREF="manual.html#lua_getuservalue">lua_getuservalue</A><BR>
|
||||
<A HREF="manual.html#lua_insert">lua_insert</A><BR>
|
||||
<A HREF="manual.html#lua_isboolean">lua_isboolean</A><BR>
|
||||
<A HREF="manual.html#lua_iscfunction">lua_iscfunction</A><BR>
|
||||
<A HREF="manual.html#lua_isfunction">lua_isfunction</A><BR>
|
||||
<A HREF="manual.html#lua_islightuserdata">lua_islightuserdata</A><BR>
|
||||
<A HREF="manual.html#lua_isnil">lua_isnil</A><BR>
|
||||
<A HREF="manual.html#lua_isnone">lua_isnone</A><BR>
|
||||
<A HREF="manual.html#lua_isnoneornil">lua_isnoneornil</A><BR>
|
||||
<A HREF="manual.html#lua_isnumber">lua_isnumber</A><BR>
|
||||
<A HREF="manual.html#lua_isstring">lua_isstring</A><BR>
|
||||
<A HREF="manual.html#lua_istable">lua_istable</A><BR>
|
||||
<A HREF="manual.html#lua_isthread">lua_isthread</A><BR>
|
||||
<A HREF="manual.html#lua_isuserdata">lua_isuserdata</A><BR>
|
||||
<A HREF="manual.html#lua_len">lua_len</A><BR>
|
||||
<A HREF="manual.html#lua_load">lua_load</A><BR>
|
||||
<A HREF="manual.html#lua_newstate">lua_newstate</A><BR>
|
||||
<A HREF="manual.html#lua_newtable">lua_newtable</A><BR>
|
||||
<A HREF="manual.html#lua_newthread">lua_newthread</A><BR>
|
||||
<A HREF="manual.html#lua_newuserdata">lua_newuserdata</A><BR>
|
||||
<A HREF="manual.html#lua_next">lua_next</A><BR>
|
||||
<A HREF="manual.html#lua_pcall">lua_pcall</A><BR>
|
||||
<A HREF="manual.html#lua_pcallk">lua_pcallk</A><BR>
|
||||
<A HREF="manual.html#lua_pop">lua_pop</A><BR>
|
||||
<A HREF="manual.html#lua_pushboolean">lua_pushboolean</A><BR>
|
||||
<A HREF="manual.html#lua_pushcclosure">lua_pushcclosure</A><BR>
|
||||
<A HREF="manual.html#lua_pushcfunction">lua_pushcfunction</A><BR>
|
||||
<A HREF="manual.html#lua_pushfstring">lua_pushfstring</A><BR>
|
||||
<A HREF="manual.html#lua_pushglobaltable">lua_pushglobaltable</A><BR>
|
||||
<A HREF="manual.html#lua_pushinteger">lua_pushinteger</A><BR>
|
||||
<A HREF="manual.html#lua_pushlightuserdata">lua_pushlightuserdata</A><BR>
|
||||
<A HREF="manual.html#lua_pushliteral">lua_pushliteral</A><BR>
|
||||
<A HREF="manual.html#lua_pushlstring">lua_pushlstring</A><BR>
|
||||
<A HREF="manual.html#lua_pushnil">lua_pushnil</A><BR>
|
||||
<A HREF="manual.html#lua_pushnumber">lua_pushnumber</A><BR>
|
||||
<A HREF="manual.html#lua_pushstring">lua_pushstring</A><BR>
|
||||
<A HREF="manual.html#lua_pushthread">lua_pushthread</A><BR>
|
||||
<A HREF="manual.html#lua_pushunsigned">lua_pushunsigned</A><BR>
|
||||
<A HREF="manual.html#lua_pushvalue">lua_pushvalue</A><BR>
|
||||
<A HREF="manual.html#lua_pushvfstring">lua_pushvfstring</A><BR>
|
||||
<A HREF="manual.html#lua_rawequal">lua_rawequal</A><BR>
|
||||
<A HREF="manual.html#lua_rawget">lua_rawget</A><BR>
|
||||
<A HREF="manual.html#lua_rawgeti">lua_rawgeti</A><BR>
|
||||
<A HREF="manual.html#lua_rawgetp">lua_rawgetp</A><BR>
|
||||
<A HREF="manual.html#lua_rawlen">lua_rawlen</A><BR>
|
||||
<A HREF="manual.html#lua_rawset">lua_rawset</A><BR>
|
||||
<A HREF="manual.html#lua_rawseti">lua_rawseti</A><BR>
|
||||
<A HREF="manual.html#lua_rawsetp">lua_rawsetp</A><BR>
|
||||
<A HREF="manual.html#lua_register">lua_register</A><BR>
|
||||
<A HREF="manual.html#lua_remove">lua_remove</A><BR>
|
||||
<A HREF="manual.html#lua_replace">lua_replace</A><BR>
|
||||
<A HREF="manual.html#lua_resume">lua_resume</A><BR>
|
||||
<A HREF="manual.html#lua_setallocf">lua_setallocf</A><BR>
|
||||
<A HREF="manual.html#lua_setfield">lua_setfield</A><BR>
|
||||
<A HREF="manual.html#lua_setglobal">lua_setglobal</A><BR>
|
||||
<A HREF="manual.html#lua_sethook">lua_sethook</A><BR>
|
||||
<A HREF="manual.html#lua_setlocal">lua_setlocal</A><BR>
|
||||
<A HREF="manual.html#lua_setmetatable">lua_setmetatable</A><BR>
|
||||
<A HREF="manual.html#lua_settable">lua_settable</A><BR>
|
||||
<A HREF="manual.html#lua_settop">lua_settop</A><BR>
|
||||
<A HREF="manual.html#lua_setupvalue">lua_setupvalue</A><BR>
|
||||
<A HREF="manual.html#lua_setuservalue">lua_setuservalue</A><BR>
|
||||
<A HREF="manual.html#lua_status">lua_status</A><BR>
|
||||
<A HREF="manual.html#lua_toboolean">lua_toboolean</A><BR>
|
||||
<A HREF="manual.html#lua_tocfunction">lua_tocfunction</A><BR>
|
||||
<A HREF="manual.html#lua_tointeger">lua_tointeger</A><BR>
|
||||
<A HREF="manual.html#lua_tointegerx">lua_tointegerx</A><BR>
|
||||
<A HREF="manual.html#lua_tolstring">lua_tolstring</A><BR>
|
||||
<A HREF="manual.html#lua_tonumber">lua_tonumber</A><BR>
|
||||
<A HREF="manual.html#lua_tonumberx">lua_tonumberx</A><BR>
|
||||
<A HREF="manual.html#lua_topointer">lua_topointer</A><BR>
|
||||
<A HREF="manual.html#lua_tostring">lua_tostring</A><BR>
|
||||
<A HREF="manual.html#lua_tothread">lua_tothread</A><BR>
|
||||
<A HREF="manual.html#lua_tounsigned">lua_tounsigned</A><BR>
|
||||
<A HREF="manual.html#lua_tounsignedx">lua_tounsignedx</A><BR>
|
||||
<A HREF="manual.html#lua_touserdata">lua_touserdata</A><BR>
|
||||
<A HREF="manual.html#lua_type">lua_type</A><BR>
|
||||
<A HREF="manual.html#lua_typename">lua_typename</A><BR>
|
||||
<A HREF="manual.html#lua_upvalueid">lua_upvalueid</A><BR>
|
||||
<A HREF="manual.html#lua_upvalueindex">lua_upvalueindex</A><BR>
|
||||
<A HREF="manual.html#lua_upvaluejoin">lua_upvaluejoin</A><BR>
|
||||
<A HREF="manual.html#lua_version">lua_version</A><BR>
|
||||
<A HREF="manual.html#lua_xmove">lua_xmove</A><BR>
|
||||
<A HREF="manual.html#lua_yield">lua_yield</A><BR>
|
||||
<A HREF="manual.html#lua_yieldk">lua_yieldk</A><BR>
|
||||
|
||||
</TD>
|
||||
<TD>
|
||||
<H3>auxiliary library</H3>
|
||||
<P>
|
||||
<A HREF="manual.html#luaL_Buffer">luaL_Buffer</A><BR>
|
||||
<A HREF="manual.html#luaL_Reg">luaL_Reg</A><BR>
|
||||
|
||||
<P>
|
||||
<A HREF="manual.html#luaL_addchar">luaL_addchar</A><BR>
|
||||
<A HREF="manual.html#luaL_addlstring">luaL_addlstring</A><BR>
|
||||
<A HREF="manual.html#luaL_addsize">luaL_addsize</A><BR>
|
||||
<A HREF="manual.html#luaL_addstring">luaL_addstring</A><BR>
|
||||
<A HREF="manual.html#luaL_addvalue">luaL_addvalue</A><BR>
|
||||
<A HREF="manual.html#luaL_argcheck">luaL_argcheck</A><BR>
|
||||
<A HREF="manual.html#luaL_argerror">luaL_argerror</A><BR>
|
||||
<A HREF="manual.html#luaL_buffinit">luaL_buffinit</A><BR>
|
||||
<A HREF="manual.html#luaL_buffinitsize">luaL_buffinitsize</A><BR>
|
||||
<A HREF="manual.html#luaL_callmeta">luaL_callmeta</A><BR>
|
||||
<A HREF="manual.html#luaL_checkany">luaL_checkany</A><BR>
|
||||
<A HREF="manual.html#luaL_checkint">luaL_checkint</A><BR>
|
||||
<A HREF="manual.html#luaL_checkinteger">luaL_checkinteger</A><BR>
|
||||
<A HREF="manual.html#luaL_checklong">luaL_checklong</A><BR>
|
||||
<A HREF="manual.html#luaL_checklstring">luaL_checklstring</A><BR>
|
||||
<A HREF="manual.html#luaL_checknumber">luaL_checknumber</A><BR>
|
||||
<A HREF="manual.html#luaL_checkoption">luaL_checkoption</A><BR>
|
||||
<A HREF="manual.html#luaL_checkstack">luaL_checkstack</A><BR>
|
||||
<A HREF="manual.html#luaL_checkstring">luaL_checkstring</A><BR>
|
||||
<A HREF="manual.html#luaL_checktype">luaL_checktype</A><BR>
|
||||
<A HREF="manual.html#luaL_checkudata">luaL_checkudata</A><BR>
|
||||
<A HREF="manual.html#luaL_checkunsigned">luaL_checkunsigned</A><BR>
|
||||
<A HREF="manual.html#luaL_checkversion">luaL_checkversion</A><BR>
|
||||
<A HREF="manual.html#luaL_dofile">luaL_dofile</A><BR>
|
||||
<A HREF="manual.html#luaL_dostring">luaL_dostring</A><BR>
|
||||
<A HREF="manual.html#luaL_error">luaL_error</A><BR>
|
||||
<A HREF="manual.html#luaL_execresult">luaL_execresult</A><BR>
|
||||
<A HREF="manual.html#luaL_fileresult">luaL_fileresult</A><BR>
|
||||
<A HREF="manual.html#luaL_getmetafield">luaL_getmetafield</A><BR>
|
||||
<A HREF="manual.html#luaL_getmetatable">luaL_getmetatable</A><BR>
|
||||
<A HREF="manual.html#luaL_getsubtable">luaL_getsubtable</A><BR>
|
||||
<A HREF="manual.html#luaL_gsub">luaL_gsub</A><BR>
|
||||
<A HREF="manual.html#luaL_len">luaL_len</A><BR>
|
||||
<A HREF="manual.html#luaL_loadbuffer">luaL_loadbuffer</A><BR>
|
||||
<A HREF="manual.html#luaL_loadbufferx">luaL_loadbufferx</A><BR>
|
||||
<A HREF="manual.html#luaL_loadfile">luaL_loadfile</A><BR>
|
||||
<A HREF="manual.html#luaL_loadfilex">luaL_loadfilex</A><BR>
|
||||
<A HREF="manual.html#luaL_loadstring">luaL_loadstring</A><BR>
|
||||
<A HREF="manual.html#luaL_newlib">luaL_newlib</A><BR>
|
||||
<A HREF="manual.html#luaL_newlibtable">luaL_newlibtable</A><BR>
|
||||
<A HREF="manual.html#luaL_newmetatable">luaL_newmetatable</A><BR>
|
||||
<A HREF="manual.html#luaL_newstate">luaL_newstate</A><BR>
|
||||
<A HREF="manual.html#luaL_openlibs">luaL_openlibs</A><BR>
|
||||
<A HREF="manual.html#luaL_optint">luaL_optint</A><BR>
|
||||
<A HREF="manual.html#luaL_optinteger">luaL_optinteger</A><BR>
|
||||
<A HREF="manual.html#luaL_optlong">luaL_optlong</A><BR>
|
||||
<A HREF="manual.html#luaL_optlstring">luaL_optlstring</A><BR>
|
||||
<A HREF="manual.html#luaL_optnumber">luaL_optnumber</A><BR>
|
||||
<A HREF="manual.html#luaL_optstring">luaL_optstring</A><BR>
|
||||
<A HREF="manual.html#luaL_optunsigned">luaL_optunsigned</A><BR>
|
||||
<A HREF="manual.html#luaL_prepbuffer">luaL_prepbuffer</A><BR>
|
||||
<A HREF="manual.html#luaL_prepbuffsize">luaL_prepbuffsize</A><BR>
|
||||
<A HREF="manual.html#luaL_pushresult">luaL_pushresult</A><BR>
|
||||
<A HREF="manual.html#luaL_pushresultsize">luaL_pushresultsize</A><BR>
|
||||
<A HREF="manual.html#luaL_ref">luaL_ref</A><BR>
|
||||
<A HREF="manual.html#luaL_requiref">luaL_requiref</A><BR>
|
||||
<A HREF="manual.html#luaL_setfuncs">luaL_setfuncs</A><BR>
|
||||
<A HREF="manual.html#luaL_setmetatable">luaL_setmetatable</A><BR>
|
||||
<A HREF="manual.html#luaL_testudata">luaL_testudata</A><BR>
|
||||
<A HREF="manual.html#luaL_tolstring">luaL_tolstring</A><BR>
|
||||
<A HREF="manual.html#luaL_traceback">luaL_traceback</A><BR>
|
||||
<A HREF="manual.html#luaL_typename">luaL_typename</A><BR>
|
||||
<A HREF="manual.html#luaL_unref">luaL_unref</A><BR>
|
||||
<A HREF="manual.html#luaL_where">luaL_where</A><BR>
|
||||
|
||||
</TD>
|
||||
</TR>
|
||||
</TABLE>
|
||||
|
||||
<HR>
|
||||
<SMALL CLASS="footer">
|
||||
Last update:
|
||||
Tue Mar 12 11:22:18 BRT 2013
|
||||
</SMALL>
|
||||
<!--
|
||||
Last change: revised for Lua 5.2.2
|
||||
-->
|
||||
|
||||
</BODY>
|
||||
</HTML>
|
BIN
AppPkg/Applications/Lua/doc/logo.gif
Normal file
BIN
AppPkg/Applications/Lua/doc/logo.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.1 KiB |
116
AppPkg/Applications/Lua/doc/lua.1
Normal file
116
AppPkg/Applications/Lua/doc/lua.1
Normal file
@ -0,0 +1,116 @@
|
||||
.\" $Id: lua.man,v 1.13 2011/11/16 17:16:53 lhf Exp $
|
||||
.TH LUA 1 "$Date: 2011/11/16 17:16:53 $"
|
||||
.SH NAME
|
||||
lua \- Lua interpreter
|
||||
.SH SYNOPSIS
|
||||
.B lua
|
||||
[
|
||||
.I options
|
||||
]
|
||||
[
|
||||
.I script
|
||||
[
|
||||
.I args
|
||||
]
|
||||
]
|
||||
.SH DESCRIPTION
|
||||
.B lua
|
||||
is the standalone Lua interpreter.
|
||||
It loads and executes Lua programs,
|
||||
either in textual source form or
|
||||
in precompiled binary form.
|
||||
(Precompiled binaries are output by
|
||||
.BR luac ,
|
||||
the Lua compiler.)
|
||||
.B lua
|
||||
can be used as a batch interpreter and also interactively.
|
||||
.LP
|
||||
The given
|
||||
.I options
|
||||
are handled in order and then
|
||||
the Lua program in file
|
||||
.I script
|
||||
is loaded and executed.
|
||||
The given
|
||||
.I args
|
||||
are available to
|
||||
.I script
|
||||
as strings in a global table named
|
||||
.BR arg .
|
||||
If no options or arguments are given,
|
||||
then
|
||||
.B "\-v \-i"
|
||||
is assumed when the standard input is a terminal;
|
||||
otherwise,
|
||||
.B "\-"
|
||||
is assumed.
|
||||
.LP
|
||||
In interactive mode,
|
||||
.B lua
|
||||
prompts the user,
|
||||
reads lines from the standard input,
|
||||
and executes them as they are read.
|
||||
If a line does not contain a complete statement,
|
||||
then a secondary prompt is displayed and
|
||||
lines are read until a complete statement is formed or
|
||||
a syntax error is found.
|
||||
If a line starts with
|
||||
.BR '=' ,
|
||||
then
|
||||
.B lua
|
||||
evaluates and displays
|
||||
the values of the expressions in the remainder of the line.
|
||||
.LP
|
||||
At the very start,
|
||||
before even handling the command line,
|
||||
.B lua
|
||||
checks the contents of the environment variables
|
||||
.B LUA_INIT_5_2
|
||||
or
|
||||
.BR LUA_INIT ,
|
||||
in that order.
|
||||
If the contents is of the form
|
||||
.RI '@ filename ',
|
||||
then
|
||||
.I filename
|
||||
is executed.
|
||||
Otherwise, the string is assumed to be a Lua statement and is executed.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.BI \-e " stat"
|
||||
execute statement
|
||||
.IR stat .
|
||||
.TP
|
||||
.B \-i
|
||||
enter interactive mode after executing
|
||||
.IR script .
|
||||
.TP
|
||||
.BI \-l " name"
|
||||
execute the equivalent of
|
||||
.IB name =require(' name ')
|
||||
before executing
|
||||
.IR script .
|
||||
.TP
|
||||
.B \-v
|
||||
show version information.
|
||||
.TP
|
||||
.B \-E
|
||||
ignore environment variables.
|
||||
.TP
|
||||
.B \-\-
|
||||
stop handling options.
|
||||
.TP
|
||||
.B \-
|
||||
stop handling options and execute the standard input as a file.
|
||||
.SH "SEE ALSO"
|
||||
.BR luac (1)
|
||||
.br
|
||||
The documentation at lua.org,
|
||||
especially section 7 of the reference manual.
|
||||
.SH DIAGNOSTICS
|
||||
Error messages should be self explanatory.
|
||||
.SH AUTHORS
|
||||
R. Ierusalimschy,
|
||||
L. H. de Figueiredo,
|
||||
W. Celes
|
||||
.\" EOF
|
96
AppPkg/Applications/Lua/doc/lua.css
Normal file
96
AppPkg/Applications/Lua/doc/lua.css
Normal file
@ -0,0 +1,96 @@
|
||||
html {
|
||||
background-color: #F8F8F8 ;
|
||||
}
|
||||
|
||||
body {
|
||||
border: solid #a0a0a0 1px ;
|
||||
border-radius: 20px ;
|
||||
padding: 26px ;
|
||||
margin: 16px ;
|
||||
color: #000000 ;
|
||||
background-color: #FFFFFF ;
|
||||
font-family: Helvetica, Arial, sans-serif ;
|
||||
text-align: justify ;
|
||||
}
|
||||
|
||||
h1, h2, h3, h4 {
|
||||
font-family: Verdana, Geneva, sans-serif ;
|
||||
font-weight: normal ;
|
||||
font-style: normal ;
|
||||
}
|
||||
|
||||
h2 {
|
||||
padding-top: 0.4em ;
|
||||
padding-bottom: 0.4em ;
|
||||
padding-left: 0.8em ;
|
||||
padding-right: 0.8em ;
|
||||
background-color: #D0D0FF ;
|
||||
border-radius: 8px ;
|
||||
border: solid #a0a0a0 1px ;
|
||||
}
|
||||
|
||||
h3 {
|
||||
padding-left: 0.5em ;
|
||||
border-left: solid #D0D0FF 1em ;
|
||||
}
|
||||
|
||||
table h3 {
|
||||
padding-left: 0px ;
|
||||
border-left: none ;
|
||||
}
|
||||
|
||||
a:link {
|
||||
color: #000080 ;
|
||||
background-color: inherit ;
|
||||
text-decoration: none ;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
background-color: inherit ;
|
||||
text-decoration: none ;
|
||||
}
|
||||
|
||||
a:link:hover, a:visited:hover {
|
||||
color: #000080 ;
|
||||
background-color: #D0D0FF ;
|
||||
}
|
||||
|
||||
a:link:active, a:visited:active {
|
||||
color: #FF0000 ;
|
||||
}
|
||||
|
||||
hr {
|
||||
border: 0 ;
|
||||
height: 1px ;
|
||||
color: #a0a0a0 ;
|
||||
background-color: #a0a0a0 ;
|
||||
display: none ;
|
||||
}
|
||||
|
||||
table hr {
|
||||
display: block ;
|
||||
}
|
||||
|
||||
:target {
|
||||
background-color: #F8F8F8 ;
|
||||
padding: 8px ;
|
||||
border: solid #a0a0a0 2px ;
|
||||
border-radius: 8px ;
|
||||
}
|
||||
|
||||
.footer {
|
||||
color: gray ;
|
||||
font-size: x-small ;
|
||||
}
|
||||
|
||||
input[type=text] {
|
||||
border: solid #a0a0a0 2px ;
|
||||
border-radius: 2em ;
|
||||
-moz-border-radius: 2em ;
|
||||
background-image: url('images/search.png') ;
|
||||
background-repeat: no-repeat;
|
||||
background-position: 4px center ;
|
||||
padding-left: 20px ;
|
||||
height: 2em ;
|
||||
}
|
||||
|
118
AppPkg/Applications/Lua/doc/luac.1
Normal file
118
AppPkg/Applications/Lua/doc/luac.1
Normal file
@ -0,0 +1,118 @@
|
||||
.\" $Id: luac.man,v 1.29 2011/11/16 13:53:40 lhf Exp $
|
||||
.TH LUAC 1 "$Date: 2011/11/16 13:53:40 $"
|
||||
.SH NAME
|
||||
luac \- Lua compiler
|
||||
.SH SYNOPSIS
|
||||
.B luac
|
||||
[
|
||||
.I options
|
||||
] [
|
||||
.I filenames
|
||||
]
|
||||
.SH DESCRIPTION
|
||||
.B luac
|
||||
is the Lua compiler.
|
||||
It translates programs written in the Lua programming language
|
||||
into binary files containing precompiled chunks
|
||||
that can be later loaded and executed.
|
||||
.LP
|
||||
The main advantages of precompiling chunks are:
|
||||
faster loading,
|
||||
protecting source code from accidental user changes,
|
||||
and
|
||||
off-line syntax checking.
|
||||
Precompiling does not imply faster execution
|
||||
because in Lua chunks are always compiled into bytecodes before being executed.
|
||||
.B luac
|
||||
simply allows those bytecodes to be saved in a file for later execution.
|
||||
Precompiled chunks are not necessarily smaller than the corresponding source.
|
||||
The main goal in precompiling is faster loading.
|
||||
.LP
|
||||
In the command line,
|
||||
you can mix
|
||||
text files containing Lua source and
|
||||
binary files containing precompiled chunks.
|
||||
.B luac
|
||||
produces a single output file containing the combined bytecodes
|
||||
for all files given.
|
||||
Executing the combined file is equivalent to executing the given files.
|
||||
By default,
|
||||
the output file is named
|
||||
.BR luac.out ,
|
||||
but you can change this with the
|
||||
.B \-o
|
||||
option.
|
||||
.LP
|
||||
Precompiled chunks are
|
||||
.I not
|
||||
portable across different architectures.
|
||||
Moreover,
|
||||
the internal format of precompiled chunks
|
||||
is likely to change when a new version of Lua is released.
|
||||
Make sure you save the source files of all Lua programs that you precompile.
|
||||
.LP
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-l
|
||||
produce a listing of the compiled bytecode for Lua's virtual machine.
|
||||
Listing bytecodes is useful to learn about Lua's virtual machine.
|
||||
If no files are given, then
|
||||
.B luac
|
||||
loads
|
||||
.B luac.out
|
||||
and lists its contents.
|
||||
Use
|
||||
.B \-l \-l
|
||||
for a full listing.
|
||||
.TP
|
||||
.BI \-o " file"
|
||||
output to
|
||||
.IR file ,
|
||||
instead of the default
|
||||
.BR luac.out .
|
||||
(You can use
|
||||
.B "'\-'"
|
||||
for standard output,
|
||||
but not on platforms that open standard output in text mode.)
|
||||
The output file may be one of the given files because
|
||||
all files are loaded before the output file is written.
|
||||
Be careful not to overwrite precious files.
|
||||
.TP
|
||||
.B \-p
|
||||
load files but do not generate any output file.
|
||||
Used mainly for syntax checking and for testing precompiled chunks:
|
||||
corrupted files will probably generate errors when loaded.
|
||||
If no files are given, then
|
||||
.B luac
|
||||
loads
|
||||
.B luac.out
|
||||
and tests its contents.
|
||||
No messages are displayed if the file loads without errors.
|
||||
.TP
|
||||
.B \-s
|
||||
strip debug information before writing the output file.
|
||||
This saves some space in very large chunks,
|
||||
but if errors occur when running a stripped chunk,
|
||||
then the error messages may not contain the full information they usually do.
|
||||
In particular,
|
||||
line numbers and names of local variables are lost.
|
||||
.TP
|
||||
.B \-v
|
||||
show version information.
|
||||
.TP
|
||||
.B \-\-
|
||||
stop handling options.
|
||||
.TP
|
||||
.B \-
|
||||
stop handling options and process standard input.
|
||||
.SH "SEE ALSO"
|
||||
.BR lua (1)
|
||||
.br
|
||||
The documentation at lua.org.
|
||||
.SH DIAGNOSTICS
|
||||
Error messages should be self explanatory.
|
||||
.SH AUTHORS
|
||||
R. Ierusalimschy,
|
||||
L. H. de Figueiredo,
|
||||
W. Celes
|
||||
.\" EOF
|
27
AppPkg/Applications/Lua/doc/manual.css
Normal file
27
AppPkg/Applications/Lua/doc/manual.css
Normal file
@ -0,0 +1,27 @@
|
||||
h3 code {
|
||||
font-family: inherit ;
|
||||
font-size: inherit ;
|
||||
}
|
||||
|
||||
pre, code {
|
||||
font-size: 12pt ;
|
||||
}
|
||||
|
||||
span.apii {
|
||||
float: right ;
|
||||
font-family: inherit ;
|
||||
font-style: normal ;
|
||||
font-size: small ;
|
||||
color: gray ;
|
||||
}
|
||||
|
||||
p+h1, ul+h1 {
|
||||
font-style: normal ;
|
||||
padding-top: 0.4em ;
|
||||
padding-bottom: 0.4em ;
|
||||
padding-left: 16px ;
|
||||
margin-left: -16px ;
|
||||
background-color: #D0D0FF ;
|
||||
border-radius: 8px ;
|
||||
border: solid #000080 1px ;
|
||||
}
|
10507
AppPkg/Applications/Lua/doc/manual.html
Normal file
10507
AppPkg/Applications/Lua/doc/manual.html
Normal file
File diff suppressed because it is too large
Load Diff
BIN
AppPkg/Applications/Lua/doc/osi-certified-72x60.png
Normal file
BIN
AppPkg/Applications/Lua/doc/osi-certified-72x60.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.7 KiB |
413
AppPkg/Applications/Lua/doc/readme.html
Normal file
413
AppPkg/Applications/Lua/doc/readme.html
Normal file
@ -0,0 +1,413 @@
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<HTML>
|
||||
<HEAD>
|
||||
<TITLE>Lua 5.2 readme</TITLE>
|
||||
<LINK REL="stylesheet" TYPE="text/css" HREF="lua.css">
|
||||
<META HTTP-EQUIV="content-type" CONTENT="text/html; charset=iso-8859-1">
|
||||
<STYLE TYPE="text/css">
|
||||
blockquote, .display {
|
||||
border: solid #a0a0a0 2px ;
|
||||
border-radius: 8px ;
|
||||
padding: 1em ;
|
||||
margin: 0px ;
|
||||
}
|
||||
|
||||
.display {
|
||||
word-spacing: 0.25em ;
|
||||
}
|
||||
|
||||
dl.display dd {
|
||||
padding-bottom: 0.2em ;
|
||||
}
|
||||
|
||||
tt, kbd, code {
|
||||
font-size: 12pt ;
|
||||
}
|
||||
</STYLE>
|
||||
</HEAD>
|
||||
|
||||
<BODY>
|
||||
|
||||
<HR>
|
||||
<H1>
|
||||
<A HREF="http://www.lua.org/"><IMG SRC="logo.gif" ALT="Lua" BORDER=0></A>
|
||||
Welcome to Lua 5.2
|
||||
</H1>
|
||||
|
||||
<P>
|
||||
<A HREF="#about">about</A>
|
||||
·
|
||||
<A HREF="#install">installation</A>
|
||||
·
|
||||
<A HREF="#changes">changes</A>
|
||||
·
|
||||
<A HREF="#license">license</A>
|
||||
·
|
||||
<A HREF="contents.html">reference manual</A>
|
||||
|
||||
<H2><A NAME="about">About Lua</A></H2>
|
||||
|
||||
<P>
|
||||
Lua is a powerful, fast, lightweight, embeddable scripting language
|
||||
developed by a
|
||||
<A HREF="http://www.lua.org/authors.html">team</A>
|
||||
at
|
||||
<A HREF="http://www.puc-rio.br/">PUC-Rio</A>,
|
||||
the Pontifical Catholic University of Rio de Janeiro in Brazil.
|
||||
Lua is
|
||||
<A HREF="#license">free software</A>
|
||||
used in many products and projects around the world.
|
||||
|
||||
<P>
|
||||
Lua's
|
||||
<A HREF="http://www.lua.org/">official web site</A>
|
||||
provides complete information
|
||||
about Lua,
|
||||
including
|
||||
an
|
||||
<A HREF="http://www.lua.org/about.html">executive summary</A>
|
||||
and
|
||||
updated
|
||||
<A HREF="http://www.lua.org/docs.html">documentation</A>,
|
||||
especially the
|
||||
<A HREF="http://www.lua.org/manual/5.2/">reference manual</A>,
|
||||
which may differ slightly from the
|
||||
<A HREF="contents.html">local copy</A>
|
||||
distributed in this package.
|
||||
|
||||
<H2><A NAME="install">Installing Lua</A></H2>
|
||||
|
||||
<P>
|
||||
Lua is distributed in
|
||||
<A HREF="http://www.lua.org/ftp/">source</A>
|
||||
form.
|
||||
You need to build it before using it.
|
||||
Building Lua should be straightforward
|
||||
because
|
||||
Lua is implemented in pure ANSI C and compiles unmodified in all known
|
||||
platforms that have an ANSI C compiler.
|
||||
Lua also compiles unmodified as C++.
|
||||
The instructions given below for building Lua are for Unix-like platforms.
|
||||
See also
|
||||
<A HREF="#other">instructions for other systems</A>
|
||||
and
|
||||
<A HREF="#customization">customization options</A>.
|
||||
|
||||
<P>
|
||||
If you don't have the time or the inclination to compile Lua yourself,
|
||||
get a binary from
|
||||
<A HREF="http://lua-users.org/wiki/LuaBinaries">LuaBinaries</A>.
|
||||
Try also
|
||||
<A HREF="http://luaforwindows.luaforge.net/">Lua for Windows</A>,
|
||||
an easy-to-use distribution of Lua that includes many useful libraries.
|
||||
|
||||
<H3>Building Lua</H3>
|
||||
|
||||
<P>
|
||||
In most Unix-like platforms, simply do "<KBD>make</KBD>" with a suitable target.
|
||||
Here are the details.
|
||||
|
||||
<OL>
|
||||
<LI>
|
||||
Open a terminal window and move to
|
||||
the top-level directory, which is named <TT>lua-5.2.3</TT>.
|
||||
The Makefile there controls both the build process and the installation process.
|
||||
<P>
|
||||
<LI>
|
||||
Do "<KBD>make</KBD>" and see if your platform is listed.
|
||||
The platforms currently supported are:
|
||||
<P>
|
||||
<P CLASS="display">
|
||||
aix ansi bsd freebsd generic linux macosx mingw posix solaris
|
||||
</P>
|
||||
<P>
|
||||
If your platform is listed, just do "<KBD>make xxx</KBD>", where xxx
|
||||
is your platform name.
|
||||
<P>
|
||||
If your platform is not listed, try the closest one or posix, generic,
|
||||
ansi, in this order.
|
||||
<P>
|
||||
<LI>
|
||||
The compilation takes only a few moments
|
||||
and produces three files in the <TT>src</TT> directory:
|
||||
lua (the interpreter),
|
||||
luac (the compiler),
|
||||
and liblua.a (the library).
|
||||
<P>
|
||||
<LI>
|
||||
To check that Lua has been built correctly, do "<KBD>make test</KBD>"
|
||||
after building Lua. This will run the interpreter and print its version string.
|
||||
</OL>
|
||||
<P>
|
||||
If you're running Linux and get compilation errors,
|
||||
make sure you have installed the <TT>readline</TT> development package.
|
||||
If you get link errors after that,
|
||||
then try "<KBD>make linux MYLIBS=-ltermcap</KBD>".
|
||||
|
||||
<H3>Installing Lua</H3>
|
||||
<P>
|
||||
Once you have built Lua, you may want to install it in an official
|
||||
place in your system. In this case, do "<KBD>make install</KBD>". The official
|
||||
place and the way to install files are defined in the Makefile. You'll
|
||||
probably need the right permissions to install files.
|
||||
|
||||
<P>
|
||||
To build and install Lua in one step, do "<KBD>make xxx install</KBD>",
|
||||
where xxx is your platform name.
|
||||
|
||||
<P>
|
||||
To install Lua locally, do "<KBD>make local</KBD>".
|
||||
This will create a directory <TT>install</TT> with subdirectories
|
||||
<TT>bin</TT>, <TT>include</TT>, <TT>lib</TT>, <TT>man</TT>,
|
||||
and install Lua as listed below.
|
||||
|
||||
To install Lua locally, but in some other directory, do
|
||||
"<KBD>make install INSTALL_TOP=xxx</KBD>", where xxx is your chosen directory.
|
||||
|
||||
<DL CLASS="display">
|
||||
<DT>
|
||||
bin:
|
||||
<DD>
|
||||
lua luac
|
||||
<DT>
|
||||
include:
|
||||
<DD>
|
||||
lua.h luaconf.h lualib.h lauxlib.h lua.hpp
|
||||
<DT>
|
||||
lib:
|
||||
<DD>
|
||||
liblua.a
|
||||
<DT>
|
||||
man/man1:
|
||||
<DD>
|
||||
lua.1 luac.1
|
||||
</DL>
|
||||
|
||||
<P>
|
||||
These are the only directories you need for development.
|
||||
If you only want to run Lua programs,
|
||||
you only need the files in bin and man.
|
||||
The files in include and lib are needed for
|
||||
embedding Lua in C or C++ programs.
|
||||
|
||||
<H3><A NAME="customization">Customization</A></H3>
|
||||
<P>
|
||||
Three kinds of things can be customized by editing a file:
|
||||
<UL>
|
||||
<LI> Where and how to install Lua — edit <TT>Makefile</TT>.
|
||||
<LI> How to build Lua — edit <TT>src/Makefile</TT>.
|
||||
<LI> Lua features — edit <TT>src/luaconf.h</TT>.
|
||||
</UL>
|
||||
|
||||
<P>
|
||||
You don't actually need to edit the Makefiles because you may set the
|
||||
relevant variables in the command line when invoking make.
|
||||
Nevertheless, it's probably best to edit and save the Makefiles to
|
||||
record the changes you need.
|
||||
|
||||
<P>
|
||||
On the other hand, if you need to customize some Lua features, you'll need
|
||||
to edit <TT>src/luaconf.h</TT> before building and installing Lua.
|
||||
The edited file will be the one installed, and
|
||||
it will be used by any Lua clients that you build, to ensure consistency.
|
||||
Further customization is available to experts by editing the Lua sources.
|
||||
|
||||
<P>
|
||||
We strongly recommend that you enable dynamic loading in <TT>src/luaconf.h</TT>.
|
||||
This is done automatically for all platforms listed above that have
|
||||
this feature and also for Windows.
|
||||
|
||||
<H3><A NAME="other">Building Lua on other systems</A></H3>
|
||||
|
||||
<P>
|
||||
If you're not using the usual Unix tools, then the instructions for
|
||||
building Lua depend on the compiler you use. You'll need to create
|
||||
projects (or whatever your compiler uses) for building the library,
|
||||
the interpreter, and the compiler, as follows:
|
||||
|
||||
<DL CLASS="display">
|
||||
<DT>
|
||||
library:
|
||||
<DD>
|
||||
lapi.c lcode.c lctype.c ldebug.c ldo.c ldump.c lfunc.c lgc.c llex.c
|
||||
lmem.c lobject.c lopcodes.c lparser.c lstate.c lstring.c ltable.c
|
||||
ltm.c lundump.c lvm.c lzio.c
|
||||
lauxlib.c lbaselib.c lbitlib.c lcorolib.c ldblib.c liolib.c
|
||||
lmathlib.c loslib.c lstrlib.c ltablib.c loadlib.c linit.c
|
||||
<DT>
|
||||
interpreter:
|
||||
<DD>
|
||||
library, lua.c
|
||||
<DT>
|
||||
compiler:
|
||||
<DD>
|
||||
library, luac.c
|
||||
</DL>
|
||||
|
||||
<P>
|
||||
To use Lua as a library in your own programs you'll need to know how to
|
||||
create and use libraries with your compiler. Moreover, to dynamically load
|
||||
C libraries for Lua you'll need to know how to create dynamic libraries
|
||||
and you'll need to make sure that the Lua API functions are accessible to
|
||||
those dynamic libraries — but <EM>don't</EM> link the Lua library
|
||||
into each dynamic library. For Unix, we recommend that the Lua library
|
||||
be linked statically into the host program and its symbols exported for
|
||||
dynamic linking; <TT>src/Makefile</TT> does this for the Lua interpreter.
|
||||
For Windows, we recommend that the Lua library be a DLL.
|
||||
|
||||
<P>
|
||||
As mentioned above, you may edit <TT>src/luaconf.h</TT> to customize
|
||||
some features before building Lua.
|
||||
|
||||
<H2><A NAME="changes">Changes since Lua 5.1</A></H2>
|
||||
|
||||
<P>
|
||||
Here are the main changes introduced in Lua 5.2.
|
||||
The
|
||||
<A HREF="contents.html">reference manual</A>
|
||||
lists the
|
||||
<A HREF="manual.html#8">incompatibilities</A> that had to be introduced.
|
||||
|
||||
<H3>Main changes</H3>
|
||||
<UL>
|
||||
<LI> yieldable pcall and metamethods
|
||||
<LI> new lexical scheme for globals
|
||||
<LI> ephemeron tables
|
||||
<LI> new library for bitwise operations
|
||||
<LI> light C functions
|
||||
<LI> emergency garbage collector
|
||||
<LI> <CODE>goto</CODE> statement
|
||||
<LI> finalizers for tables
|
||||
</UL>
|
||||
|
||||
Here are the other changes introduced in Lua 5.2:
|
||||
<H3>Language</H3>
|
||||
<UL>
|
||||
<LI> no more fenv for threads or functions
|
||||
<LI> tables honor the <CODE>__len</CODE> metamethod
|
||||
<LI> hex and <CODE>\z</CODE> escapes in strings
|
||||
<LI> support for hexadecimal floats
|
||||
<LI> order metamethods work for different types
|
||||
<LI> no more verification of opcode consistency
|
||||
<LI> hook event "tail return" replaced by "tail call"
|
||||
<LI> empty statement
|
||||
<LI> <CODE>break</CODE> statement may appear in the middle of a block
|
||||
</UL>
|
||||
|
||||
<H3>Libraries</H3>
|
||||
<UL>
|
||||
<LI> arguments for function called through <CODE>xpcall</CODE>
|
||||
<LI> optional 'mode' argument to load and loadfile (to control binary x text)
|
||||
<LI> optional 'env' argument to load and loadfile (environment for loaded chunk)
|
||||
<LI> <CODE>loadlib</CODE> may load libraries with global names (RTLD_GLOBAL)
|
||||
<LI> new function <CODE>package.searchpath</CODE>
|
||||
<LI> modules receive their paths when loaded
|
||||
<LI> optional base in <CODE>math.log</CODE>
|
||||
<LI> optional separator in <CODE>string.rep</CODE>
|
||||
<LI> <CODE>file:write</CODE> returns <CODE>file</CODE>
|
||||
<LI> closing a pipe returns exit status
|
||||
<LI> <CODE>os.exit</CODE> may close state
|
||||
<LI> new metamethods <CODE>__pairs</CODE> and <CODE>__ipairs</CODE>
|
||||
<LI> new option 'isrunning' for <CODE>collectgarbage</CODE> and <CODE>lua_gc</CODE>
|
||||
<LI> frontier patterns
|
||||
<LI> <CODE>\0</CODE> in patterns
|
||||
<LI> new option <CODE>*L</CODE> for <CODE>io.read</CODE>
|
||||
<LI> options for <CODE>io.lines</CODE>
|
||||
<LI> <CODE>debug.getlocal</CODE> can access function varargs
|
||||
</UL>
|
||||
|
||||
<H3>C API</H3>
|
||||
<UL>
|
||||
<LI> main thread predefined in the registry
|
||||
<LI> new functions
|
||||
<CODE>lua_absindex</CODE>,
|
||||
<CODE>lua_arith</CODE>,
|
||||
<CODE>lua_compare</CODE>,
|
||||
<CODE>lua_copy</CODE>,
|
||||
<CODE>lua_len</CODE>,
|
||||
<CODE>lua_rawgetp</CODE>,
|
||||
<CODE>lua_rawsetp</CODE>,
|
||||
<CODE>lua_upvalueid</CODE>,
|
||||
<CODE>lua_upvaluejoin</CODE>,
|
||||
<CODE>lua_version</CODE>.
|
||||
<LI> new functions
|
||||
<CODE>luaL_checkversion</CODE>,
|
||||
<CODE>luaL_setmetatable</CODE>,
|
||||
<CODE>luaL_testudata</CODE>,
|
||||
<CODE>luaL_tolstring</CODE>.
|
||||
<LI> <CODE>lua_pushstring</CODE> and <CODE>pushlstring</CODE> return string
|
||||
<LI> <CODE>nparams</CODE> and <CODE>isvararg</CODE> available in debug API
|
||||
<LI> new <CODE>lua_Unsigned</CODE>
|
||||
</UL>
|
||||
|
||||
<H3>Implementation</H3>
|
||||
<UL>
|
||||
<LI> max constants per function raised to 2<SUP>26</SUP>
|
||||
<LI> generational mode for garbage collection (experimental)
|
||||
<LI> NaN trick (experimental)
|
||||
<LI> internal (immutable) version of ctypes
|
||||
<LI> simpler implementation for string buffers
|
||||
<LI> parser uses much less C-stack space (no more auto arrays)
|
||||
</UL>
|
||||
|
||||
<H3>Lua standalone interpreter</H3>
|
||||
<UL>
|
||||
<LI> new <CODE>-E</CODE> option to avoid environment variables
|
||||
<LI> handling of non-string error messages
|
||||
</UL>
|
||||
|
||||
<H2><A NAME="license">License</A></H2>
|
||||
<A HREF="http://www.opensource.org/docs/definition.php">
|
||||
<IMG SRC="osi-certified-72x60.png" ALIGN="right" BORDER="0" ALT="[osi certified]" STYLE="padding-left: 30px ;">
|
||||
</A>
|
||||
|
||||
<P>
|
||||
Lua is free software distributed under the terms of the
|
||||
<A HREF="http://www.opensource.org/licenses/mit-license.html">MIT license</A>
|
||||
reproduced below;
|
||||
it may be used for any purpose, including commercial purposes,
|
||||
at absolutely no cost without having to ask us.
|
||||
|
||||
The only requirement is that if you do use Lua,
|
||||
then you should give us credit by including the appropriate copyright notice somewhere in your product or its documentation.
|
||||
|
||||
For details, see
|
||||
<A HREF="http://www.lua.org/license.html">this</A>.
|
||||
|
||||
<BLOCKQUOTE STYLE="padding-bottom: 0em">
|
||||
Copyright © 1994–2013 Lua.org, PUC-Rio.
|
||||
|
||||
<P>
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
<P>
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
<P>
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
</BLOCKQUOTE>
|
||||
<P>
|
||||
|
||||
<HR>
|
||||
<SMALL CLASS="footer">
|
||||
Last update:
|
||||
Sat Nov 9 22:39:16 BRST 2013
|
||||
</SMALL>
|
||||
<!--
|
||||
Last change: revised for Lua 5.2.3
|
||||
-->
|
||||
|
||||
</BODY>
|
||||
</HTML>
|
12
AppPkg/Applications/Lua/scripts/Fact.lua
Normal file
12
AppPkg/Applications/Lua/scripts/Fact.lua
Normal file
@ -0,0 +1,12 @@
|
||||
-- defines a factorial function
|
||||
function fact (n)
|
||||
if n == 0 then
|
||||
return 1
|
||||
else
|
||||
return n * fact(n-1)
|
||||
end
|
||||
end
|
||||
|
||||
print("enter a number:")
|
||||
a = io.read("*number") -- read a number
|
||||
print(fact(a))
|
1
AppPkg/Applications/Lua/scripts/Hello.lua
Normal file
1
AppPkg/Applications/Lua/scripts/Hello.lua
Normal file
@ -0,0 +1 @@
|
||||
print("Hello UEFI World")
|
187
AppPkg/Applications/Lua/src/Makefile
Normal file
187
AppPkg/Applications/Lua/src/Makefile
Normal file
@ -0,0 +1,187 @@
|
||||
# Makefile for building Lua
|
||||
# See ../doc/readme.html for installation and customization instructions.
|
||||
|
||||
# == CHANGE THE SETTINGS BELOW TO SUIT YOUR ENVIRONMENT =======================
|
||||
|
||||
# Your platform. See PLATS for possible values.
|
||||
PLAT= none
|
||||
|
||||
CC= gcc
|
||||
CFLAGS= -O2 -Wall -DLUA_COMPAT_ALL $(SYSCFLAGS) $(MYCFLAGS)
|
||||
LDFLAGS= $(SYSLDFLAGS) $(MYLDFLAGS)
|
||||
LIBS= -lm $(SYSLIBS) $(MYLIBS)
|
||||
|
||||
AR= ar rcu
|
||||
RANLIB= ranlib
|
||||
RM= rm -f
|
||||
|
||||
SYSCFLAGS=
|
||||
SYSLDFLAGS=
|
||||
SYSLIBS=
|
||||
|
||||
MYCFLAGS=
|
||||
MYLDFLAGS=
|
||||
MYLIBS=
|
||||
MYOBJS=
|
||||
|
||||
# == END OF USER SETTINGS -- NO NEED TO CHANGE ANYTHING BELOW THIS LINE =======
|
||||
|
||||
PLATS= aix ansi bsd freebsd generic linux macosx mingw posix solaris
|
||||
|
||||
LUA_A= liblua.a
|
||||
CORE_O= lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o \
|
||||
lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o \
|
||||
ltm.o lundump.o lvm.o lzio.o
|
||||
LIB_O= lauxlib.o lbaselib.o lbitlib.o lcorolib.o ldblib.o liolib.o \
|
||||
lmathlib.o loslib.o lstrlib.o ltablib.o loadlib.o linit.o
|
||||
BASE_O= $(CORE_O) $(LIB_O) $(MYOBJS)
|
||||
|
||||
LUA_T= lua
|
||||
LUA_O= lua.o
|
||||
|
||||
LUAC_T= luac
|
||||
LUAC_O= luac.o
|
||||
|
||||
ALL_O= $(BASE_O) $(LUA_O) $(LUAC_O)
|
||||
ALL_T= $(LUA_A) $(LUA_T) $(LUAC_T)
|
||||
ALL_A= $(LUA_A)
|
||||
|
||||
# Targets start here.
|
||||
default: $(PLAT)
|
||||
|
||||
all: $(ALL_T)
|
||||
|
||||
o: $(ALL_O)
|
||||
|
||||
a: $(ALL_A)
|
||||
|
||||
$(LUA_A): $(BASE_O)
|
||||
$(AR) $@ $(BASE_O)
|
||||
$(RANLIB) $@
|
||||
|
||||
$(LUA_T): $(LUA_O) $(LUA_A)
|
||||
$(CC) -o $@ $(LDFLAGS) $(LUA_O) $(LUA_A) $(LIBS)
|
||||
|
||||
$(LUAC_T): $(LUAC_O) $(LUA_A)
|
||||
$(CC) -o $@ $(LDFLAGS) $(LUAC_O) $(LUA_A) $(LIBS)
|
||||
|
||||
clean:
|
||||
$(RM) $(ALL_T) $(ALL_O)
|
||||
|
||||
depend:
|
||||
@$(CC) $(CFLAGS) -MM l*.c
|
||||
|
||||
echo:
|
||||
@echo "PLAT= $(PLAT)"
|
||||
@echo "CC= $(CC)"
|
||||
@echo "CFLAGS= $(CFLAGS)"
|
||||
@echo "LDFLAGS= $(SYSLDFLAGS)"
|
||||
@echo "LIBS= $(LIBS)"
|
||||
@echo "AR= $(AR)"
|
||||
@echo "RANLIB= $(RANLIB)"
|
||||
@echo "RM= $(RM)"
|
||||
|
||||
# Convenience targets for popular platforms
|
||||
ALL= all
|
||||
|
||||
none:
|
||||
@echo "Please do 'make PLATFORM' where PLATFORM is one of these:"
|
||||
@echo " $(PLATS)"
|
||||
|
||||
aix:
|
||||
$(MAKE) $(ALL) CC="xlc" CFLAGS="-O2 -DLUA_USE_POSIX -DLUA_USE_DLOPEN" SYSLIBS="-ldl" SYSLDFLAGS="-brtl -bexpall"
|
||||
|
||||
ansi:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_ANSI"
|
||||
|
||||
bsd:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_POSIX -DLUA_USE_DLOPEN" SYSLIBS="-Wl,-E"
|
||||
|
||||
freebsd:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX" SYSLIBS="-Wl,-E -lreadline"
|
||||
|
||||
generic: $(ALL)
|
||||
|
||||
linux:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX" SYSLIBS="-Wl,-E -ldl -lreadline"
|
||||
|
||||
macosx:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_MACOSX" SYSLIBS="-lreadline" CC=cc
|
||||
|
||||
mingw:
|
||||
$(MAKE) "LUA_A=lua52.dll" "LUA_T=lua.exe" \
|
||||
"AR=$(CC) -shared -o" "RANLIB=strip --strip-unneeded" \
|
||||
"SYSCFLAGS=-DLUA_BUILD_AS_DLL" "SYSLIBS=" "SYSLDFLAGS=-s" lua.exe
|
||||
$(MAKE) "LUAC_T=luac.exe" luac.exe
|
||||
|
||||
posix:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_POSIX"
|
||||
|
||||
solaris:
|
||||
$(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_POSIX -DLUA_USE_DLOPEN" SYSLIBS="-ldl"
|
||||
|
||||
# list targets that do not create files (but not all makes understand .PHONY)
|
||||
.PHONY: all $(PLATS) default o a clean depend echo none
|
||||
|
||||
# DO NOT DELETE
|
||||
|
||||
lapi.o: lapi.c lua.h luaconf.h lapi.h llimits.h lstate.h lobject.h ltm.h \
|
||||
lzio.h lmem.h ldebug.h ldo.h lfunc.h lgc.h lstring.h ltable.h lundump.h \
|
||||
lvm.h
|
||||
lauxlib.o: lauxlib.c lua.h luaconf.h lauxlib.h
|
||||
lbaselib.o: lbaselib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lbitlib.o: lbitlib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lcode.o: lcode.c lua.h luaconf.h lcode.h llex.h lobject.h llimits.h \
|
||||
lzio.h lmem.h lopcodes.h lparser.h ldebug.h lstate.h ltm.h ldo.h lgc.h \
|
||||
lstring.h ltable.h lvm.h
|
||||
lcorolib.o: lcorolib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lctype.o: lctype.c lctype.h lua.h luaconf.h llimits.h
|
||||
ldblib.o: ldblib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
ldebug.o: ldebug.c lua.h luaconf.h lapi.h llimits.h lstate.h lobject.h \
|
||||
ltm.h lzio.h lmem.h lcode.h llex.h lopcodes.h lparser.h ldebug.h ldo.h \
|
||||
lfunc.h lstring.h lgc.h ltable.h lvm.h
|
||||
ldo.o: ldo.c lua.h luaconf.h lapi.h llimits.h lstate.h lobject.h ltm.h \
|
||||
lzio.h lmem.h ldebug.h ldo.h lfunc.h lgc.h lopcodes.h lparser.h \
|
||||
lstring.h ltable.h lundump.h lvm.h
|
||||
ldump.o: ldump.c lua.h luaconf.h lobject.h llimits.h lstate.h ltm.h \
|
||||
lzio.h lmem.h lundump.h
|
||||
lfunc.o: lfunc.c lua.h luaconf.h lfunc.h lobject.h llimits.h lgc.h \
|
||||
lstate.h ltm.h lzio.h lmem.h
|
||||
lgc.o: lgc.c lua.h luaconf.h ldebug.h lstate.h lobject.h llimits.h ltm.h \
|
||||
lzio.h lmem.h ldo.h lfunc.h lgc.h lstring.h ltable.h
|
||||
linit.o: linit.c lua.h luaconf.h lualib.h lauxlib.h
|
||||
liolib.o: liolib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
llex.o: llex.c lua.h luaconf.h lctype.h llimits.h ldo.h lobject.h \
|
||||
lstate.h ltm.h lzio.h lmem.h llex.h lparser.h lstring.h lgc.h ltable.h
|
||||
lmathlib.o: lmathlib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lmem.o: lmem.c lua.h luaconf.h ldebug.h lstate.h lobject.h llimits.h \
|
||||
ltm.h lzio.h lmem.h ldo.h lgc.h
|
||||
loadlib.o: loadlib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lobject.o: lobject.c lua.h luaconf.h lctype.h llimits.h ldebug.h lstate.h \
|
||||
lobject.h ltm.h lzio.h lmem.h ldo.h lstring.h lgc.h lvm.h
|
||||
lopcodes.o: lopcodes.c lopcodes.h llimits.h lua.h luaconf.h
|
||||
loslib.o: loslib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
lparser.o: lparser.c lua.h luaconf.h lcode.h llex.h lobject.h llimits.h \
|
||||
lzio.h lmem.h lopcodes.h lparser.h ldebug.h lstate.h ltm.h ldo.h lfunc.h \
|
||||
lstring.h lgc.h ltable.h
|
||||
lstate.o: lstate.c lua.h luaconf.h lapi.h llimits.h lstate.h lobject.h \
|
||||
ltm.h lzio.h lmem.h ldebug.h ldo.h lfunc.h lgc.h llex.h lstring.h \
|
||||
ltable.h
|
||||
lstring.o: lstring.c lua.h luaconf.h lmem.h llimits.h lobject.h lstate.h \
|
||||
ltm.h lzio.h lstring.h lgc.h
|
||||
lstrlib.o: lstrlib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
ltable.o: ltable.c lua.h luaconf.h ldebug.h lstate.h lobject.h llimits.h \
|
||||
ltm.h lzio.h lmem.h ldo.h lgc.h lstring.h ltable.h lvm.h
|
||||
ltablib.o: ltablib.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
ltm.o: ltm.c lua.h luaconf.h lobject.h llimits.h lstate.h ltm.h lzio.h \
|
||||
lmem.h lstring.h lgc.h ltable.h
|
||||
lua.o: lua.c lua.h luaconf.h lauxlib.h lualib.h
|
||||
luac.o: luac.c lua.h luaconf.h lauxlib.h lobject.h llimits.h lstate.h \
|
||||
ltm.h lzio.h lmem.h lundump.h ldebug.h lopcodes.h
|
||||
lundump.o: lundump.c lua.h luaconf.h ldebug.h lstate.h lobject.h \
|
||||
llimits.h ltm.h lzio.h lmem.h ldo.h lfunc.h lstring.h lgc.h lundump.h
|
||||
lvm.o: lvm.c lua.h luaconf.h ldebug.h lstate.h lobject.h llimits.h ltm.h \
|
||||
lzio.h lmem.h ldo.h lfunc.h lgc.h lopcodes.h lstring.h ltable.h lvm.h
|
||||
lzio.o: lzio.c lua.h luaconf.h llimits.h lmem.h lstate.h lobject.h ltm.h \
|
||||
lzio.h
|
||||
|
1287
AppPkg/Applications/Lua/src/lapi.c
Normal file
1287
AppPkg/Applications/Lua/src/lapi.c
Normal file
File diff suppressed because it is too large
Load Diff
24
AppPkg/Applications/Lua/src/lapi.h
Normal file
24
AppPkg/Applications/Lua/src/lapi.h
Normal file
@ -0,0 +1,24 @@
|
||||
/*
|
||||
** $Id: lapi.h,v 2.7.1.1 2013/04/12 18:48:47 roberto Exp $
|
||||
** Auxiliary functions from Lua API
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
#ifndef lapi_h
|
||||
#define lapi_h
|
||||
|
||||
|
||||
#include "llimits.h"
|
||||
#include "lstate.h"
|
||||
|
||||
#define api_incr_top(L) {L->top++; api_check(L, L->top <= L->ci->top, \
|
||||
"stack overflow");}
|
||||
|
||||
#define adjustresults(L,nres) \
|
||||
{ if ((nres) == LUA_MULTRET && L->ci->top < L->top) L->ci->top = L->top; }
|
||||
|
||||
#define api_checknelems(L,n) api_check(L, (n) < (L->top - L->ci->func), \
|
||||
"not enough elements in the stack")
|
||||
|
||||
|
||||
#endif
|
959
AppPkg/Applications/Lua/src/lauxlib.c
Normal file
959
AppPkg/Applications/Lua/src/lauxlib.c
Normal file
@ -0,0 +1,959 @@
|
||||
/*
|
||||
** $Id: lauxlib.c,v 1.248.1.1 2013/04/12 18:48:47 roberto Exp $
|
||||
** Auxiliary functions for building Lua libraries
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
|
||||
#include <errno.h>
|
||||
#include <stdarg.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
|
||||
/* This file uses only the official API of Lua.
|
||||
** Any function declared here could be written as an application function.
|
||||
*/
|
||||
|
||||
#define lauxlib_c
|
||||
#define LUA_LIB
|
||||
|
||||
#include "lua.h"
|
||||
|
||||
#include "lauxlib.h"
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Traceback
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
|
||||
#define LEVELS1 12 /* size of the first part of the stack */
|
||||
#define LEVELS2 10 /* size of the second part of the stack */
|
||||
|
||||
|
||||
|
||||
/*
|
||||
** search for 'objidx' in table at index -1.
|
||||
** return 1 + string at top if find a good name.
|
||||
*/
|
||||
static int findfield (lua_State *L, int objidx, int level) {
|
||||
if (level == 0 || !lua_istable(L, -1))
|
||||
return 0; /* not found */
|
||||
lua_pushnil(L); /* start 'next' loop */
|
||||
while (lua_next(L, -2)) { /* for each pair in table */
|
||||
if (lua_type(L, -2) == LUA_TSTRING) { /* ignore non-string keys */
|
||||
if (lua_rawequal(L, objidx, -1)) { /* found object? */
|
||||
lua_pop(L, 1); /* remove value (but keep name) */
|
||||
return 1;
|
||||
}
|
||||
else if (findfield(L, objidx, level - 1)) { /* try recursively */
|
||||
lua_remove(L, -2); /* remove table (but keep name) */
|
||||
lua_pushliteral(L, ".");
|
||||
lua_insert(L, -2); /* place '.' between the two names */
|
||||
lua_concat(L, 3);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
lua_pop(L, 1); /* remove value */
|
||||
}
|
||||
return 0; /* not found */
|
||||
}
|
||||
|
||||
|
||||
static int pushglobalfuncname (lua_State *L, lua_Debug *ar) {
|
||||
int top = lua_gettop(L);
|
||||
lua_getinfo(L, "f", ar); /* push function */
|
||||
lua_pushglobaltable(L);
|
||||
if (findfield(L, top + 1, 2)) {
|
||||
lua_copy(L, -1, top + 1); /* move name to proper place */
|
||||
lua_pop(L, 2); /* remove pushed values */
|
||||
return 1;
|
||||
}
|
||||
else {
|
||||
lua_settop(L, top); /* remove function and global table */
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void pushfuncname (lua_State *L, lua_Debug *ar) {
|
||||
if (*ar->namewhat != '\0') /* is there a name? */
|
||||
lua_pushfstring(L, "function " LUA_QS, ar->name);
|
||||
else if (*ar->what == 'm') /* main? */
|
||||
lua_pushliteral(L, "main chunk");
|
||||
else if (*ar->what == 'C') {
|
||||
if (pushglobalfuncname(L, ar)) {
|
||||
lua_pushfstring(L, "function " LUA_QS, lua_tostring(L, -1));
|
||||
lua_remove(L, -2); /* remove name */
|
||||
}
|
||||
else
|
||||
lua_pushliteral(L, "?");
|
||||
}
|
||||
else
|
||||
lua_pushfstring(L, "function <%s:%d>", ar->short_src, ar->linedefined);
|
||||
}
|
||||
|
||||
|
||||
static int countlevels (lua_State *L) {
|
||||
lua_Debug ar;
|
||||
int li = 1, le = 1;
|
||||
/* find an upper bound */
|
||||
while (lua_getstack(L, le, &ar)) { li = le; le *= 2; }
|
||||
/* do a binary search */
|
||||
while (li < le) {
|
||||
int m = (li + le)/2;
|
||||
if (lua_getstack(L, m, &ar)) li = m + 1;
|
||||
else le = m;
|
||||
}
|
||||
return le - 1;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_traceback (lua_State *L, lua_State *L1,
|
||||
const char *msg, int level) {
|
||||
lua_Debug ar;
|
||||
int top = lua_gettop(L);
|
||||
int numlevels = countlevels(L1);
|
||||
int mark = (numlevels > LEVELS1 + LEVELS2) ? LEVELS1 : 0;
|
||||
if (msg) lua_pushfstring(L, "%s\n", msg);
|
||||
lua_pushliteral(L, "stack traceback:");
|
||||
while (lua_getstack(L1, level++, &ar)) {
|
||||
if (level == mark) { /* too many levels? */
|
||||
lua_pushliteral(L, "\n\t..."); /* add a '...' */
|
||||
level = numlevels - LEVELS2; /* and skip to last ones */
|
||||
}
|
||||
else {
|
||||
lua_getinfo(L1, "Slnt", &ar);
|
||||
lua_pushfstring(L, "\n\t%s:", ar.short_src);
|
||||
if (ar.currentline > 0)
|
||||
lua_pushfstring(L, "%d:", ar.currentline);
|
||||
lua_pushliteral(L, " in ");
|
||||
pushfuncname(L, &ar);
|
||||
if (ar.istailcall)
|
||||
lua_pushliteral(L, "\n\t(...tail calls...)");
|
||||
lua_concat(L, lua_gettop(L) - top);
|
||||
}
|
||||
}
|
||||
lua_concat(L, lua_gettop(L) - top);
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Error-report functions
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
LUALIB_API int luaL_argerror (lua_State *L, int narg, const char *extramsg) {
|
||||
lua_Debug ar;
|
||||
if (!lua_getstack(L, 0, &ar)) /* no stack frame? */
|
||||
return luaL_error(L, "bad argument #%d (%s)", narg, extramsg);
|
||||
lua_getinfo(L, "n", &ar);
|
||||
if (strcmp(ar.namewhat, "method") == 0) {
|
||||
narg--; /* do not count `self' */
|
||||
if (narg == 0) /* error is in the self argument itself? */
|
||||
return luaL_error(L, "calling " LUA_QS " on bad self (%s)",
|
||||
ar.name, extramsg);
|
||||
}
|
||||
if (ar.name == NULL)
|
||||
ar.name = (pushglobalfuncname(L, &ar)) ? lua_tostring(L, -1) : "?";
|
||||
return luaL_error(L, "bad argument #%d to " LUA_QS " (%s)",
|
||||
narg, ar.name, extramsg);
|
||||
}
|
||||
|
||||
|
||||
static int typeerror (lua_State *L, int narg, const char *tname) {
|
||||
const char *msg = lua_pushfstring(L, "%s expected, got %s",
|
||||
tname, luaL_typename(L, narg));
|
||||
return luaL_argerror(L, narg, msg);
|
||||
}
|
||||
|
||||
|
||||
static void tag_error (lua_State *L, int narg, int tag) {
|
||||
typeerror(L, narg, lua_typename(L, tag));
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_where (lua_State *L, int level) {
|
||||
lua_Debug ar;
|
||||
if (lua_getstack(L, level, &ar)) { /* check function at level */
|
||||
lua_getinfo(L, "Sl", &ar); /* get info about it */
|
||||
if (ar.currentline > 0) { /* is there info? */
|
||||
lua_pushfstring(L, "%s:%d: ", ar.short_src, ar.currentline);
|
||||
return;
|
||||
}
|
||||
}
|
||||
lua_pushliteral(L, ""); /* else, no information available... */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_error (lua_State *L, const char *fmt, ...) {
|
||||
va_list argp;
|
||||
va_start(argp, fmt);
|
||||
luaL_where(L, 1);
|
||||
lua_pushvfstring(L, fmt, argp);
|
||||
va_end(argp);
|
||||
lua_concat(L, 2);
|
||||
return lua_error(L);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_fileresult (lua_State *L, int stat, const char *fname) {
|
||||
int en = errno; /* calls to Lua API may change this value */
|
||||
if (stat) {
|
||||
lua_pushboolean(L, 1);
|
||||
return 1;
|
||||
}
|
||||
else {
|
||||
lua_pushnil(L);
|
||||
if (fname)
|
||||
lua_pushfstring(L, "%s: %s", fname, strerror(en));
|
||||
else
|
||||
lua_pushstring(L, strerror(en));
|
||||
lua_pushinteger(L, en);
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#if !defined(inspectstat) /* { */
|
||||
|
||||
#if defined(LUA_USE_POSIX)
|
||||
|
||||
#include <sys/wait.h>
|
||||
|
||||
/*
|
||||
** use appropriate macros to interpret 'pclose' return status
|
||||
*/
|
||||
#define inspectstat(stat,what) \
|
||||
if (WIFEXITED(stat)) { stat = WEXITSTATUS(stat); } \
|
||||
else if (WIFSIGNALED(stat)) { stat = WTERMSIG(stat); what = "signal"; }
|
||||
|
||||
#else
|
||||
|
||||
#define inspectstat(stat,what) /* no op */
|
||||
|
||||
#endif
|
||||
|
||||
#endif /* } */
|
||||
|
||||
|
||||
LUALIB_API int luaL_execresult (lua_State *L, int stat) {
|
||||
const char *what = "exit"; /* type of termination */
|
||||
if (stat == -1) /* error? */
|
||||
return luaL_fileresult(L, 0, NULL);
|
||||
else {
|
||||
inspectstat(stat, what); /* interpret result */
|
||||
if (*what == 'e' && stat == 0) /* successful termination? */
|
||||
lua_pushboolean(L, 1);
|
||||
else
|
||||
lua_pushnil(L);
|
||||
lua_pushstring(L, what);
|
||||
lua_pushinteger(L, stat);
|
||||
return 3; /* return true/nil,what,code */
|
||||
}
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Userdata's metatable manipulation
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
LUALIB_API int luaL_newmetatable (lua_State *L, const char *tname) {
|
||||
luaL_getmetatable(L, tname); /* try to get metatable */
|
||||
if (!lua_isnil(L, -1)) /* name already in use? */
|
||||
return 0; /* leave previous value on top, but return 0 */
|
||||
lua_pop(L, 1);
|
||||
lua_newtable(L); /* create metatable */
|
||||
lua_pushvalue(L, -1);
|
||||
lua_setfield(L, LUA_REGISTRYINDEX, tname); /* registry.name = metatable */
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_setmetatable (lua_State *L, const char *tname) {
|
||||
luaL_getmetatable(L, tname);
|
||||
lua_setmetatable(L, -2);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void *luaL_testudata (lua_State *L, int ud, const char *tname) {
|
||||
void *p = lua_touserdata(L, ud);
|
||||
if (p != NULL) { /* value is a userdata? */
|
||||
if (lua_getmetatable(L, ud)) { /* does it have a metatable? */
|
||||
luaL_getmetatable(L, tname); /* get correct metatable */
|
||||
if (!lua_rawequal(L, -1, -2)) /* not the same? */
|
||||
p = NULL; /* value is a userdata with wrong metatable */
|
||||
lua_pop(L, 2); /* remove both metatables */
|
||||
return p;
|
||||
}
|
||||
}
|
||||
return NULL; /* value is not a userdata with a metatable */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void *luaL_checkudata (lua_State *L, int ud, const char *tname) {
|
||||
void *p = luaL_testudata(L, ud, tname);
|
||||
if (p == NULL) typeerror(L, ud, tname);
|
||||
return p;
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Argument check functions
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
LUALIB_API int luaL_checkoption (lua_State *L, int narg, const char *def,
|
||||
const char *const lst[]) {
|
||||
const char *name = (def) ? luaL_optstring(L, narg, def) :
|
||||
luaL_checkstring(L, narg);
|
||||
int i;
|
||||
for (i=0; lst[i]; i++)
|
||||
if (strcmp(lst[i], name) == 0)
|
||||
return i;
|
||||
return luaL_argerror(L, narg,
|
||||
lua_pushfstring(L, "invalid option " LUA_QS, name));
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_checkstack (lua_State *L, int space, const char *msg) {
|
||||
/* keep some extra space to run error routines, if needed */
|
||||
const int extra = LUA_MINSTACK;
|
||||
if (!lua_checkstack(L, space + extra)) {
|
||||
if (msg)
|
||||
luaL_error(L, "stack overflow (%s)", msg);
|
||||
else
|
||||
luaL_error(L, "stack overflow");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_checktype (lua_State *L, int narg, int t) {
|
||||
if (lua_type(L, narg) != t)
|
||||
tag_error(L, narg, t);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_checkany (lua_State *L, int narg) {
|
||||
if (lua_type(L, narg) == LUA_TNONE)
|
||||
luaL_argerror(L, narg, "value expected");
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API const char *luaL_checklstring (lua_State *L, int narg, size_t *len) {
|
||||
const char *s = lua_tolstring(L, narg, len);
|
||||
if (!s) tag_error(L, narg, LUA_TSTRING);
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API const char *luaL_optlstring (lua_State *L, int narg,
|
||||
const char *def, size_t *len) {
|
||||
if (lua_isnoneornil(L, narg)) {
|
||||
if (len)
|
||||
*len = (def ? strlen(def) : 0);
|
||||
return def;
|
||||
}
|
||||
else return luaL_checklstring(L, narg, len);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Number luaL_checknumber (lua_State *L, int narg) {
|
||||
int isnum;
|
||||
lua_Number d = lua_tonumberx(L, narg, &isnum);
|
||||
if (!isnum)
|
||||
tag_error(L, narg, LUA_TNUMBER);
|
||||
return d;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Number luaL_optnumber (lua_State *L, int narg, lua_Number def) {
|
||||
return luaL_opt(L, luaL_checknumber, narg, def);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Integer luaL_checkinteger (lua_State *L, int narg) {
|
||||
int isnum;
|
||||
lua_Integer d = lua_tointegerx(L, narg, &isnum);
|
||||
if (!isnum)
|
||||
tag_error(L, narg, LUA_TNUMBER);
|
||||
return d;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Unsigned luaL_checkunsigned (lua_State *L, int narg) {
|
||||
int isnum;
|
||||
lua_Unsigned d = lua_tounsignedx(L, narg, &isnum);
|
||||
if (!isnum)
|
||||
tag_error(L, narg, LUA_TNUMBER);
|
||||
return d;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Integer luaL_optinteger (lua_State *L, int narg,
|
||||
lua_Integer def) {
|
||||
return luaL_opt(L, luaL_checkinteger, narg, def);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_Unsigned luaL_optunsigned (lua_State *L, int narg,
|
||||
lua_Unsigned def) {
|
||||
return luaL_opt(L, luaL_checkunsigned, narg, def);
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Generic Buffer manipulation
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
/*
|
||||
** check whether buffer is using a userdata on the stack as a temporary
|
||||
** buffer
|
||||
*/
|
||||
#define buffonstack(B) ((B)->b != (B)->initb)
|
||||
|
||||
|
||||
/*
|
||||
** returns a pointer to a free area with at least 'sz' bytes
|
||||
*/
|
||||
LUALIB_API char *luaL_prepbuffsize (luaL_Buffer *B, size_t sz) {
|
||||
lua_State *L = B->L;
|
||||
if (B->size - B->n < sz) { /* not enough space? */
|
||||
char *newbuff;
|
||||
size_t newsize = B->size * 2; /* double buffer size */
|
||||
if (newsize - B->n < sz) /* not big enough? */
|
||||
newsize = B->n + sz;
|
||||
if (newsize < B->n || newsize - B->n < sz)
|
||||
luaL_error(L, "buffer too large");
|
||||
/* create larger buffer */
|
||||
newbuff = (char *)lua_newuserdata(L, newsize * sizeof(char));
|
||||
/* move content to new buffer */
|
||||
memcpy(newbuff, B->b, B->n * sizeof(char));
|
||||
if (buffonstack(B))
|
||||
lua_remove(L, -2); /* remove old buffer */
|
||||
B->b = newbuff;
|
||||
B->size = newsize;
|
||||
}
|
||||
return &B->b[B->n];
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_addlstring (luaL_Buffer *B, const char *s, size_t l) {
|
||||
char *b = luaL_prepbuffsize(B, l);
|
||||
memcpy(b, s, l * sizeof(char));
|
||||
luaL_addsize(B, l);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_addstring (luaL_Buffer *B, const char *s) {
|
||||
luaL_addlstring(B, s, strlen(s));
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_pushresult (luaL_Buffer *B) {
|
||||
lua_State *L = B->L;
|
||||
lua_pushlstring(L, B->b, B->n);
|
||||
if (buffonstack(B))
|
||||
lua_remove(L, -2); /* remove old buffer */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_pushresultsize (luaL_Buffer *B, size_t sz) {
|
||||
luaL_addsize(B, sz);
|
||||
luaL_pushresult(B);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_addvalue (luaL_Buffer *B) {
|
||||
lua_State *L = B->L;
|
||||
size_t l;
|
||||
const char *s = lua_tolstring(L, -1, &l);
|
||||
if (buffonstack(B))
|
||||
lua_insert(L, -2); /* put value below buffer */
|
||||
luaL_addlstring(B, s, l);
|
||||
lua_remove(L, (buffonstack(B)) ? -2 : -1); /* remove value */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_buffinit (lua_State *L, luaL_Buffer *B) {
|
||||
B->L = L;
|
||||
B->b = B->initb;
|
||||
B->n = 0;
|
||||
B->size = LUAL_BUFFERSIZE;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API char *luaL_buffinitsize (lua_State *L, luaL_Buffer *B, size_t sz) {
|
||||
luaL_buffinit(L, B);
|
||||
return luaL_prepbuffsize(B, sz);
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Reference system
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
/* index of free-list header */
|
||||
#define freelist 0
|
||||
|
||||
|
||||
LUALIB_API int luaL_ref (lua_State *L, int t) {
|
||||
int ref;
|
||||
if (lua_isnil(L, -1)) {
|
||||
lua_pop(L, 1); /* remove from stack */
|
||||
return LUA_REFNIL; /* `nil' has a unique fixed reference */
|
||||
}
|
||||
t = lua_absindex(L, t);
|
||||
lua_rawgeti(L, t, freelist); /* get first free element */
|
||||
ref = (int)lua_tointeger(L, -1); /* ref = t[freelist] */
|
||||
lua_pop(L, 1); /* remove it from stack */
|
||||
if (ref != 0) { /* any free element? */
|
||||
lua_rawgeti(L, t, ref); /* remove it from list */
|
||||
lua_rawseti(L, t, freelist); /* (t[freelist] = t[ref]) */
|
||||
}
|
||||
else /* no free elements */
|
||||
ref = (int)lua_rawlen(L, t) + 1; /* get a new reference */
|
||||
lua_rawseti(L, t, ref);
|
||||
return ref;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_unref (lua_State *L, int t, int ref) {
|
||||
if (ref >= 0) {
|
||||
t = lua_absindex(L, t);
|
||||
lua_rawgeti(L, t, freelist);
|
||||
lua_rawseti(L, t, ref); /* t[ref] = t[freelist] */
|
||||
lua_pushinteger(L, ref);
|
||||
lua_rawseti(L, t, freelist); /* t[freelist] = ref */
|
||||
}
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Load functions
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
typedef struct LoadF {
|
||||
int n; /* number of pre-read characters */
|
||||
FILE *f; /* file being read */
|
||||
char buff[LUAL_BUFFERSIZE]; /* area for reading file */
|
||||
} LoadF;
|
||||
|
||||
|
||||
static const char *getF (lua_State *L, void *ud, size_t *size) {
|
||||
LoadF *lf = (LoadF *)ud;
|
||||
(void)L; /* not used */
|
||||
if (lf->n > 0) { /* are there pre-read characters to be read? */
|
||||
*size = lf->n; /* return them (chars already in buffer) */
|
||||
lf->n = 0; /* no more pre-read characters */
|
||||
}
|
||||
else { /* read a block from file */
|
||||
/* 'fread' can return > 0 *and* set the EOF flag. If next call to
|
||||
'getF' called 'fread', it might still wait for user input.
|
||||
The next check avoids this problem. */
|
||||
if (feof(lf->f)) return NULL;
|
||||
*size = fread(lf->buff, 1, sizeof(lf->buff), lf->f); /* read block */
|
||||
}
|
||||
return lf->buff;
|
||||
}
|
||||
|
||||
|
||||
static int errfile (lua_State *L, const char *what, int fnameindex) {
|
||||
const char *serr = strerror(errno);
|
||||
const char *filename = lua_tostring(L, fnameindex) + 1;
|
||||
lua_pushfstring(L, "cannot %s %s: %s", what, filename, serr);
|
||||
lua_remove(L, fnameindex);
|
||||
return LUA_ERRFILE;
|
||||
}
|
||||
|
||||
|
||||
static int skipBOM (LoadF *lf) {
|
||||
const char *p = "\xEF\xBB\xBF"; /* Utf8 BOM mark */
|
||||
int c;
|
||||
lf->n = 0;
|
||||
do {
|
||||
c = getc(lf->f);
|
||||
if (c == EOF || c != *(const unsigned char *)p++) return c;
|
||||
lf->buff[lf->n++] = (char)c; /* to be read by the parser */
|
||||
} while (*p != '\0');
|
||||
lf->n = 0; /* prefix matched; discard it */
|
||||
return getc(lf->f); /* return next character */
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** reads the first character of file 'f' and skips an optional BOM mark
|
||||
** in its beginning plus its first line if it starts with '#'. Returns
|
||||
** true if it skipped the first line. In any case, '*cp' has the
|
||||
** first "valid" character of the file (after the optional BOM and
|
||||
** a first-line comment).
|
||||
*/
|
||||
static int skipcomment (LoadF *lf, int *cp) {
|
||||
int c = *cp = skipBOM(lf);
|
||||
if (c == '#') { /* first line is a comment (Unix exec. file)? */
|
||||
do { /* skip first line */
|
||||
c = getc(lf->f);
|
||||
} while (c != EOF && c != '\n') ;
|
||||
*cp = getc(lf->f); /* skip end-of-line, if present */
|
||||
return 1; /* there was a comment */
|
||||
}
|
||||
else return 0; /* no comment */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_loadfilex (lua_State *L, const char *filename,
|
||||
const char *mode) {
|
||||
LoadF lf;
|
||||
int status, readstatus;
|
||||
int c;
|
||||
int fnameindex = lua_gettop(L) + 1; /* index of filename on the stack */
|
||||
if (filename == NULL) {
|
||||
lua_pushliteral(L, "=stdin");
|
||||
lf.f = stdin;
|
||||
}
|
||||
else {
|
||||
lua_pushfstring(L, "@%s", filename);
|
||||
lf.f = fopen(filename, "r");
|
||||
if (lf.f == NULL) return errfile(L, "open", fnameindex);
|
||||
}
|
||||
if (skipcomment(&lf, &c)) /* read initial portion */
|
||||
lf.buff[lf.n++] = '\n'; /* add line to correct line numbers */
|
||||
if (c == LUA_SIGNATURE[0] && filename) { /* binary file? */
|
||||
lf.f = freopen(filename, "rb", lf.f); /* reopen in binary mode */
|
||||
if (lf.f == NULL) return errfile(L, "reopen", fnameindex);
|
||||
skipcomment(&lf, &c); /* re-read initial portion */
|
||||
}
|
||||
if (c != EOF)
|
||||
lf.buff[lf.n++] = (char)c; /* 'c' is the first character of the stream */
|
||||
status = lua_load(L, getF, &lf, lua_tostring(L, -1), mode);
|
||||
readstatus = ferror(lf.f);
|
||||
if (filename) fclose(lf.f); /* close file (even in case of errors) */
|
||||
if (readstatus) {
|
||||
lua_settop(L, fnameindex); /* ignore results from `lua_load' */
|
||||
return errfile(L, "read", fnameindex);
|
||||
}
|
||||
lua_remove(L, fnameindex);
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
typedef struct LoadS {
|
||||
const char *s;
|
||||
size_t size;
|
||||
} LoadS;
|
||||
|
||||
|
||||
static const char *getS (lua_State *L, void *ud, size_t *size) {
|
||||
LoadS *ls = (LoadS *)ud;
|
||||
(void)L; /* not used */
|
||||
if (ls->size == 0) return NULL;
|
||||
*size = ls->size;
|
||||
ls->size = 0;
|
||||
return ls->s;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_loadbufferx (lua_State *L, const char *buff, size_t size,
|
||||
const char *name, const char *mode) {
|
||||
LoadS ls;
|
||||
ls.s = buff;
|
||||
ls.size = size;
|
||||
return lua_load(L, getS, &ls, name, mode);
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_loadstring (lua_State *L, const char *s) {
|
||||
return luaL_loadbuffer(L, s, strlen(s), s);
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
|
||||
LUALIB_API int luaL_getmetafield (lua_State *L, int obj, const char *event) {
|
||||
if (!lua_getmetatable(L, obj)) /* no metatable? */
|
||||
return 0;
|
||||
lua_pushstring(L, event);
|
||||
lua_rawget(L, -2);
|
||||
if (lua_isnil(L, -1)) {
|
||||
lua_pop(L, 2); /* remove metatable and metafield */
|
||||
return 0;
|
||||
}
|
||||
else {
|
||||
lua_remove(L, -2); /* remove only metatable */
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_callmeta (lua_State *L, int obj, const char *event) {
|
||||
obj = lua_absindex(L, obj);
|
||||
if (!luaL_getmetafield(L, obj, event)) /* no metafield? */
|
||||
return 0;
|
||||
lua_pushvalue(L, obj);
|
||||
lua_call(L, 1, 1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API int luaL_len (lua_State *L, int idx) {
|
||||
int l;
|
||||
int isnum;
|
||||
lua_len(L, idx);
|
||||
l = (int)lua_tointegerx(L, -1, &isnum);
|
||||
if (!isnum)
|
||||
luaL_error(L, "object length is not a number");
|
||||
lua_pop(L, 1); /* remove object */
|
||||
return l;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API const char *luaL_tolstring (lua_State *L, int idx, size_t *len) {
|
||||
if (!luaL_callmeta(L, idx, "__tostring")) { /* no metafield? */
|
||||
switch (lua_type(L, idx)) {
|
||||
case LUA_TNUMBER:
|
||||
case LUA_TSTRING:
|
||||
lua_pushvalue(L, idx);
|
||||
break;
|
||||
case LUA_TBOOLEAN:
|
||||
lua_pushstring(L, (lua_toboolean(L, idx) ? "true" : "false"));
|
||||
break;
|
||||
case LUA_TNIL:
|
||||
lua_pushliteral(L, "nil");
|
||||
break;
|
||||
default:
|
||||
lua_pushfstring(L, "%s: %p", luaL_typename(L, idx),
|
||||
lua_topointer(L, idx));
|
||||
break;
|
||||
}
|
||||
}
|
||||
return lua_tolstring(L, -1, len);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Compatibility with 5.1 module functions
|
||||
** =======================================================
|
||||
*/
|
||||
#if defined(LUA_COMPAT_MODULE)
|
||||
|
||||
static const char *luaL_findtable (lua_State *L, int idx,
|
||||
const char *fname, int szhint) {
|
||||
const char *e;
|
||||
if (idx) lua_pushvalue(L, idx);
|
||||
do {
|
||||
e = strchr(fname, '.');
|
||||
if (e == NULL) e = fname + strlen(fname);
|
||||
lua_pushlstring(L, fname, e - fname);
|
||||
lua_rawget(L, -2);
|
||||
if (lua_isnil(L, -1)) { /* no such field? */
|
||||
lua_pop(L, 1); /* remove this nil */
|
||||
lua_createtable(L, 0, (*e == '.' ? 1 : szhint)); /* new table for field */
|
||||
lua_pushlstring(L, fname, e - fname);
|
||||
lua_pushvalue(L, -2);
|
||||
lua_settable(L, -4); /* set new table into field */
|
||||
}
|
||||
else if (!lua_istable(L, -1)) { /* field has a non-table value? */
|
||||
lua_pop(L, 2); /* remove table and value */
|
||||
return fname; /* return problematic part of the name */
|
||||
}
|
||||
lua_remove(L, -2); /* remove previous table */
|
||||
fname = e + 1;
|
||||
} while (*e == '.');
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** Count number of elements in a luaL_Reg list.
|
||||
*/
|
||||
static int libsize (const luaL_Reg *l) {
|
||||
int size = 0;
|
||||
for (; l && l->name; l++) size++;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** Find or create a module table with a given name. The function
|
||||
** first looks at the _LOADED table and, if that fails, try a
|
||||
** global variable with that name. In any case, leaves on the stack
|
||||
** the module table.
|
||||
*/
|
||||
LUALIB_API void luaL_pushmodule (lua_State *L, const char *modname,
|
||||
int sizehint) {
|
||||
luaL_findtable(L, LUA_REGISTRYINDEX, "_LOADED", 1); /* get _LOADED table */
|
||||
lua_getfield(L, -1, modname); /* get _LOADED[modname] */
|
||||
if (!lua_istable(L, -1)) { /* not found? */
|
||||
lua_pop(L, 1); /* remove previous result */
|
||||
/* try global variable (and create one if it does not exist) */
|
||||
lua_pushglobaltable(L);
|
||||
if (luaL_findtable(L, 0, modname, sizehint) != NULL)
|
||||
luaL_error(L, "name conflict for module " LUA_QS, modname);
|
||||
lua_pushvalue(L, -1);
|
||||
lua_setfield(L, -3, modname); /* _LOADED[modname] = new table */
|
||||
}
|
||||
lua_remove(L, -2); /* remove _LOADED table */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_openlib (lua_State *L, const char *libname,
|
||||
const luaL_Reg *l, int nup) {
|
||||
luaL_checkversion(L);
|
||||
if (libname) {
|
||||
luaL_pushmodule(L, libname, libsize(l)); /* get/create library table */
|
||||
lua_insert(L, -(nup + 1)); /* move library table to below upvalues */
|
||||
}
|
||||
if (l)
|
||||
luaL_setfuncs(L, l, nup);
|
||||
else
|
||||
lua_pop(L, nup); /* remove upvalues */
|
||||
}
|
||||
|
||||
#endif
|
||||
/* }====================================================== */
|
||||
|
||||
/*
|
||||
** set functions from list 'l' into table at top - 'nup'; each
|
||||
** function gets the 'nup' elements at the top as upvalues.
|
||||
** Returns with only the table at the stack.
|
||||
*/
|
||||
LUALIB_API void luaL_setfuncs (lua_State *L, const luaL_Reg *l, int nup) {
|
||||
luaL_checkversion(L);
|
||||
luaL_checkstack(L, nup, "too many upvalues");
|
||||
for (; l->name != NULL; l++) { /* fill the table with given functions */
|
||||
int i;
|
||||
for (i = 0; i < nup; i++) /* copy upvalues to the top */
|
||||
lua_pushvalue(L, -nup);
|
||||
lua_pushcclosure(L, l->func, nup); /* closure with those upvalues */
|
||||
lua_setfield(L, -(nup + 2), l->name);
|
||||
}
|
||||
lua_pop(L, nup); /* remove upvalues */
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** ensure that stack[idx][fname] has a table and push that table
|
||||
** into the stack
|
||||
*/
|
||||
LUALIB_API int luaL_getsubtable (lua_State *L, int idx, const char *fname) {
|
||||
lua_getfield(L, idx, fname);
|
||||
if (lua_istable(L, -1)) return 1; /* table already there */
|
||||
else {
|
||||
lua_pop(L, 1); /* remove previous result */
|
||||
idx = lua_absindex(L, idx);
|
||||
lua_newtable(L);
|
||||
lua_pushvalue(L, -1); /* copy to be left at top */
|
||||
lua_setfield(L, idx, fname); /* assign new table to field */
|
||||
return 0; /* false, because did not find table there */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** stripped-down 'require'. Calls 'openf' to open a module,
|
||||
** registers the result in 'package.loaded' table and, if 'glb'
|
||||
** is true, also registers the result in the global table.
|
||||
** Leaves resulting module on the top.
|
||||
*/
|
||||
LUALIB_API void luaL_requiref (lua_State *L, const char *modname,
|
||||
lua_CFunction openf, int glb) {
|
||||
lua_pushcfunction(L, openf);
|
||||
lua_pushstring(L, modname); /* argument to open function */
|
||||
lua_call(L, 1, 1); /* open module */
|
||||
luaL_getsubtable(L, LUA_REGISTRYINDEX, "_LOADED");
|
||||
lua_pushvalue(L, -2); /* make copy of module (call result) */
|
||||
lua_setfield(L, -2, modname); /* _LOADED[modname] = module */
|
||||
lua_pop(L, 1); /* remove _LOADED table */
|
||||
if (glb) {
|
||||
lua_pushvalue(L, -1); /* copy of 'mod' */
|
||||
lua_setglobal(L, modname); /* _G[modname] = module */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API const char *luaL_gsub (lua_State *L, const char *s, const char *p,
|
||||
const char *r) {
|
||||
const char *wild;
|
||||
size_t l = strlen(p);
|
||||
luaL_Buffer b;
|
||||
luaL_buffinit(L, &b);
|
||||
while ((wild = strstr(s, p)) != NULL) {
|
||||
luaL_addlstring(&b, s, wild - s); /* push prefix */
|
||||
luaL_addstring(&b, r); /* push replacement in place of pattern */
|
||||
s = wild + l; /* continue after `p' */
|
||||
}
|
||||
luaL_addstring(&b, s); /* push last suffix */
|
||||
luaL_pushresult(&b);
|
||||
return lua_tostring(L, -1);
|
||||
}
|
||||
|
||||
|
||||
static void *l_alloc (void *ud, void *ptr, size_t osize, size_t nsize) {
|
||||
(void)ud; (void)osize; /* not used */
|
||||
if (nsize == 0) {
|
||||
free(ptr);
|
||||
return NULL;
|
||||
}
|
||||
else
|
||||
return realloc(ptr, nsize);
|
||||
}
|
||||
|
||||
|
||||
static int panic (lua_State *L) {
|
||||
luai_writestringerror("PANIC: unprotected error in call to Lua API (%s)\n",
|
||||
lua_tostring(L, -1));
|
||||
return 0; /* return to Lua to abort */
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API lua_State *luaL_newstate (void) {
|
||||
lua_State *L = lua_newstate(l_alloc, NULL);
|
||||
if (L) lua_atpanic(L, &panic);
|
||||
return L;
|
||||
}
|
||||
|
||||
|
||||
LUALIB_API void luaL_checkversion_ (lua_State *L, lua_Number ver) {
|
||||
const lua_Number *v = lua_version(L);
|
||||
if (v != lua_version(NULL))
|
||||
luaL_error(L, "multiple Lua VMs detected");
|
||||
else if (*v != ver)
|
||||
luaL_error(L, "version mismatch: app. needs %f, Lua core provides %f",
|
||||
ver, *v);
|
||||
/* check conversions number -> integer types */
|
||||
lua_pushnumber(L, -(lua_Number)0x1234);
|
||||
if (lua_tointeger(L, -1) != -0x1234 ||
|
||||
lua_tounsigned(L, -1) != (lua_Unsigned)-0x1234)
|
||||
luaL_error(L, "bad conversion number->int;"
|
||||
" must recompile Lua with proper settings");
|
||||
lua_pop(L, 1);
|
||||
}
|
||||
|
15
AppPkg/Applications/Lua/src/lauxlib.h
Normal file
15
AppPkg/Applications/Lua/src/lauxlib.h
Normal file
@ -0,0 +1,15 @@
|
||||
/** @file
|
||||
Stub header to allow the "real" headers to reside in StdLib/Include/Lua
|
||||
yet have the Lua sources build without having to change the include
|
||||
directives in each file.
|
||||
|
||||
Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
|
||||
This program and the accompanying materials are licensed and made available under
|
||||
the terms and conditions of the BSD License that accompanies this distribution.
|
||||
The full text of the license may be found at
|
||||
http://opensource.org/licenses/bsd-license.
|
||||
|
||||
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||
**/
|
||||
#include <Lua/lauxlib.h>
|
458
AppPkg/Applications/Lua/src/lbaselib.c
Normal file
458
AppPkg/Applications/Lua/src/lbaselib.c
Normal file
@ -0,0 +1,458 @@
|
||||
/*
|
||||
** $Id: lbaselib.c,v 1.276.1.1 2013/04/12 18:48:47 roberto Exp $
|
||||
** Basic library
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
|
||||
|
||||
#include <ctype.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#define lbaselib_c
|
||||
#define LUA_LIB
|
||||
|
||||
#include "lua.h"
|
||||
|
||||
#include "lauxlib.h"
|
||||
#include "lualib.h"
|
||||
|
||||
|
||||
static int luaB_print (lua_State *L) {
|
||||
int n = lua_gettop(L); /* number of arguments */
|
||||
int i;
|
||||
lua_getglobal(L, "tostring");
|
||||
for (i=1; i<=n; i++) {
|
||||
const char *s;
|
||||
size_t l;
|
||||
lua_pushvalue(L, -1); /* function to be called */
|
||||
lua_pushvalue(L, i); /* value to print */
|
||||
lua_call(L, 1, 1);
|
||||
s = lua_tolstring(L, -1, &l); /* get result */
|
||||
if (s == NULL)
|
||||
return luaL_error(L,
|
||||
LUA_QL("tostring") " must return a string to " LUA_QL("print"));
|
||||
if (i>1) luai_writestring("\t", 1);
|
||||
luai_writestring(s, l);
|
||||
lua_pop(L, 1); /* pop result */
|
||||
}
|
||||
luai_writeline();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
#define SPACECHARS " \f\n\r\t\v"
|
||||
|
||||
static int luaB_tonumber (lua_State *L) {
|
||||
if (lua_isnoneornil(L, 2)) { /* standard conversion */
|
||||
int isnum;
|
||||
lua_Number n = lua_tonumberx(L, 1, &isnum);
|
||||
if (isnum) {
|
||||
lua_pushnumber(L, n);
|
||||
return 1;
|
||||
} /* else not a number; must be something */
|
||||
luaL_checkany(L, 1);
|
||||
}
|
||||
else {
|
||||
size_t l;
|
||||
const char *s = luaL_checklstring(L, 1, &l);
|
||||
const char *e = s + l; /* end point for 's' */
|
||||
int base = luaL_checkint(L, 2);
|
||||
int neg = 0;
|
||||
luaL_argcheck(L, 2 <= base && base <= 36, 2, "base out of range");
|
||||
s += strspn(s, SPACECHARS); /* skip initial spaces */
|
||||
if (*s == '-') { s++; neg = 1; } /* handle signal */
|
||||
else if (*s == '+') s++;
|
||||
if (isalnum((unsigned char)*s)) {
|
||||
lua_Number n = 0;
|
||||
do {
|
||||
int digit = (isdigit((unsigned char)*s)) ? *s - '0'
|
||||
: toupper((unsigned char)*s) - 'A' + 10;
|
||||
if (digit >= base) break; /* invalid numeral; force a fail */
|
||||
n = n * (lua_Number)base + (lua_Number)digit;
|
||||
s++;
|
||||
} while (isalnum((unsigned char)*s));
|
||||
s += strspn(s, SPACECHARS); /* skip trailing spaces */
|
||||
if (s == e) { /* no invalid trailing characters? */
|
||||
lua_pushnumber(L, (neg) ? -n : n);
|
||||
return 1;
|
||||
} /* else not a number */
|
||||
} /* else not a number */
|
||||
}
|
||||
lua_pushnil(L); /* not a number */
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_error (lua_State *L) {
|
||||
int level = luaL_optint(L, 2, 1);
|
||||
lua_settop(L, 1);
|
||||
if (lua_isstring(L, 1) && level > 0) { /* add extra information? */
|
||||
luaL_where(L, level);
|
||||
lua_pushvalue(L, 1);
|
||||
lua_concat(L, 2);
|
||||
}
|
||||
return lua_error(L);
|
||||
}
|
||||
|
||||
|
||||
static int luaB_getmetatable (lua_State *L) {
|
||||
luaL_checkany(L, 1);
|
||||
if (!lua_getmetatable(L, 1)) {
|
||||
lua_pushnil(L);
|
||||
return 1; /* no metatable */
|
||||
}
|
||||
luaL_getmetafield(L, 1, "__metatable");
|
||||
return 1; /* returns either __metatable field (if present) or metatable */
|
||||
}
|
||||
|
||||
|
||||
static int luaB_setmetatable (lua_State *L) {
|
||||
int t = lua_type(L, 2);
|
||||
luaL_checktype(L, 1, LUA_TTABLE);
|
||||
luaL_argcheck(L, t == LUA_TNIL || t == LUA_TTABLE, 2,
|
||||
"nil or table expected");
|
||||
if (luaL_getmetafield(L, 1, "__metatable"))
|
||||
return luaL_error(L, "cannot change a protected metatable");
|
||||
lua_settop(L, 2);
|
||||
lua_setmetatable(L, 1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_rawequal (lua_State *L) {
|
||||
luaL_checkany(L, 1);
|
||||
luaL_checkany(L, 2);
|
||||
lua_pushboolean(L, lua_rawequal(L, 1, 2));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_rawlen (lua_State *L) {
|
||||
int t = lua_type(L, 1);
|
||||
luaL_argcheck(L, t == LUA_TTABLE || t == LUA_TSTRING, 1,
|
||||
"table or string expected");
|
||||
lua_pushinteger(L, lua_rawlen(L, 1));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_rawget (lua_State *L) {
|
||||
luaL_checktype(L, 1, LUA_TTABLE);
|
||||
luaL_checkany(L, 2);
|
||||
lua_settop(L, 2);
|
||||
lua_rawget(L, 1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int luaB_rawset (lua_State *L) {
|
||||
luaL_checktype(L, 1, LUA_TTABLE);
|
||||
luaL_checkany(L, 2);
|
||||
luaL_checkany(L, 3);
|
||||
lua_settop(L, 3);
|
||||
lua_rawset(L, 1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_collectgarbage (lua_State *L) {
|
||||
static const char *const opts[] = {"stop", "restart", "collect",
|
||||
"count", "step", "setpause", "setstepmul",
|
||||
"setmajorinc", "isrunning", "generational", "incremental", NULL};
|
||||
static const int optsnum[] = {LUA_GCSTOP, LUA_GCRESTART, LUA_GCCOLLECT,
|
||||
LUA_GCCOUNT, LUA_GCSTEP, LUA_GCSETPAUSE, LUA_GCSETSTEPMUL,
|
||||
LUA_GCSETMAJORINC, LUA_GCISRUNNING, LUA_GCGEN, LUA_GCINC};
|
||||
int o = optsnum[luaL_checkoption(L, 1, "collect", opts)];
|
||||
int ex = luaL_optint(L, 2, 0);
|
||||
int res = lua_gc(L, o, ex);
|
||||
switch (o) {
|
||||
case LUA_GCCOUNT: {
|
||||
int b = lua_gc(L, LUA_GCCOUNTB, 0);
|
||||
lua_pushnumber(L, res + ((lua_Number)b/1024));
|
||||
lua_pushinteger(L, b);
|
||||
return 2;
|
||||
}
|
||||
case LUA_GCSTEP: case LUA_GCISRUNNING: {
|
||||
lua_pushboolean(L, res);
|
||||
return 1;
|
||||
}
|
||||
default: {
|
||||
lua_pushinteger(L, res);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int luaB_type (lua_State *L) {
|
||||
luaL_checkany(L, 1);
|
||||
lua_pushstring(L, luaL_typename(L, 1));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int pairsmeta (lua_State *L, const char *method, int iszero,
|
||||
lua_CFunction iter) {
|
||||
if (!luaL_getmetafield(L, 1, method)) { /* no metamethod? */
|
||||
luaL_checktype(L, 1, LUA_TTABLE); /* argument must be a table */
|
||||
lua_pushcfunction(L, iter); /* will return generator, */
|
||||
lua_pushvalue(L, 1); /* state, */
|
||||
if (iszero) lua_pushinteger(L, 0); /* and initial value */
|
||||
else lua_pushnil(L);
|
||||
}
|
||||
else {
|
||||
lua_pushvalue(L, 1); /* argument 'self' to metamethod */
|
||||
lua_call(L, 1, 3); /* get 3 values from metamethod */
|
||||
}
|
||||
return 3;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_next (lua_State *L) {
|
||||
luaL_checktype(L, 1, LUA_TTABLE);
|
||||
lua_settop(L, 2); /* create a 2nd argument if there isn't one */
|
||||
if (lua_next(L, 1))
|
||||
return 2;
|
||||
else {
|
||||
lua_pushnil(L);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int luaB_pairs (lua_State *L) {
|
||||
return pairsmeta(L, "__pairs", 0, luaB_next);
|
||||
}
|
||||
|
||||
|
||||
static int ipairsaux (lua_State *L) {
|
||||
int i = luaL_checkint(L, 2);
|
||||
luaL_checktype(L, 1, LUA_TTABLE);
|
||||
i++; /* next value */
|
||||
lua_pushinteger(L, i);
|
||||
lua_rawgeti(L, 1, i);
|
||||
return (lua_isnil(L, -1)) ? 1 : 2;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_ipairs (lua_State *L) {
|
||||
return pairsmeta(L, "__ipairs", 1, ipairsaux);
|
||||
}
|
||||
|
||||
|
||||
static int load_aux (lua_State *L, int status, int envidx) {
|
||||
if (status == LUA_OK) {
|
||||
if (envidx != 0) { /* 'env' parameter? */
|
||||
lua_pushvalue(L, envidx); /* environment for loaded function */
|
||||
if (!lua_setupvalue(L, -2, 1)) /* set it as 1st upvalue */
|
||||
lua_pop(L, 1); /* remove 'env' if not used by previous call */
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
else { /* error (message is on top of the stack) */
|
||||
lua_pushnil(L);
|
||||
lua_insert(L, -2); /* put before error message */
|
||||
return 2; /* return nil plus error message */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int luaB_loadfile (lua_State *L) {
|
||||
const char *fname = luaL_optstring(L, 1, NULL);
|
||||
const char *mode = luaL_optstring(L, 2, NULL);
|
||||
int env = (!lua_isnone(L, 3) ? 3 : 0); /* 'env' index or 0 if no 'env' */
|
||||
int status = luaL_loadfilex(L, fname, mode);
|
||||
return load_aux(L, status, env);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** {======================================================
|
||||
** Generic Read function
|
||||
** =======================================================
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
** reserved slot, above all arguments, to hold a copy of the returned
|
||||
** string to avoid it being collected while parsed. 'load' has four
|
||||
** optional arguments (chunk, source name, mode, and environment).
|
||||
*/
|
||||
#define RESERVEDSLOT 5
|
||||
|
||||
|
||||
/*
|
||||
** Reader for generic `load' function: `lua_load' uses the
|
||||
** stack for internal stuff, so the reader cannot change the
|
||||
** stack top. Instead, it keeps its resulting string in a
|
||||
** reserved slot inside the stack.
|
||||
*/
|
||||
static const char *generic_reader (lua_State *L, void *ud, size_t *size) {
|
||||
(void)(ud); /* not used */
|
||||
luaL_checkstack(L, 2, "too many nested functions");
|
||||
lua_pushvalue(L, 1); /* get function */
|
||||
lua_call(L, 0, 1); /* call it */
|
||||
if (lua_isnil(L, -1)) {
|
||||
lua_pop(L, 1); /* pop result */
|
||||
*size = 0;
|
||||
return NULL;
|
||||
}
|
||||
else if (!lua_isstring(L, -1))
|
||||
luaL_error(L, "reader function must return a string");
|
||||
lua_replace(L, RESERVEDSLOT); /* save string in reserved slot */
|
||||
return lua_tolstring(L, RESERVEDSLOT, size);
|
||||
}
|
||||
|
||||
|
||||
static int luaB_load (lua_State *L) {
|
||||
int status;
|
||||
size_t l;
|
||||
const char *s = lua_tolstring(L, 1, &l);
|
||||
const char *mode = luaL_optstring(L, 3, "bt");
|
||||
int env = (!lua_isnone(L, 4) ? 4 : 0); /* 'env' index or 0 if no 'env' */
|
||||
if (s != NULL) { /* loading a string? */
|
||||
const char *chunkname = luaL_optstring(L, 2, s);
|
||||
status = luaL_loadbufferx(L, s, l, chunkname, mode);
|
||||
}
|
||||
else { /* loading from a reader function */
|
||||
const char *chunkname = luaL_optstring(L, 2, "=(load)");
|
||||
luaL_checktype(L, 1, LUA_TFUNCTION);
|
||||
lua_settop(L, RESERVEDSLOT); /* create reserved slot */
|
||||
status = lua_load(L, generic_reader, NULL, chunkname, mode);
|
||||
}
|
||||
return load_aux(L, status, env);
|
||||
}
|
||||
|
||||
/* }====================================================== */
|
||||
|
||||
|
||||
static int dofilecont (lua_State *L) {
|
||||
return lua_gettop(L) - 1;
|
||||
}
|
||||
|
||||
|
||||
static int luaB_dofile (lua_State *L) {
|
||||
const char *fname = luaL_optstring(L, 1, NULL);
|
||||
lua_settop(L, 1);
|
||||
if (luaL_loadfile(L, fname) != LUA_OK)
|
||||
return lua_error(L);
|
||||
lua_callk(L, 0, LUA_MULTRET, 0, dofilecont);
|
||||
return dofilecont(L);
|
||||
}
|
||||
|
||||
|
||||
static int luaB_assert (lua_State *L) {
|
||||
if (!lua_toboolean(L, 1))
|
||||
return luaL_error(L, "%s", luaL_optstring(L, 2, "assertion failed!"));
|
||||
return lua_gettop(L);
|
||||
}
|
||||
|
||||
|
||||
static int luaB_select (lua_State *L) {
|
||||
int n = lua_gettop(L);
|
||||
if (lua_type(L, 1) == LUA_TSTRING && *lua_tostring(L, 1) == '#') {
|
||||
lua_pushinteger(L, n-1);
|
||||
return 1;
|
||||
}
|
||||
else {
|
||||
int i = luaL_checkint(L, 1);
|
||||
if (i < 0) i = n + i;
|
||||
else if (i > n) i = n;
|
||||
luaL_argcheck(L, 1 <= i, 1, "index out of range");
|
||||
return n - i;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int finishpcall (lua_State *L, int status) {
|
||||
if (!lua_checkstack(L, 1)) { /* no space for extra boolean? */
|
||||
lua_settop(L, 0); /* create space for return values */
|
||||
lua_pushboolean(L, 0);
|
||||
lua_pushstring(L, "stack overflow");
|
||||
return 2; /* return false, msg */
|
||||
}
|
||||
lua_pushboolean(L, status); /* first result (status) */
|
||||
lua_replace(L, 1); /* put first result in first slot */
|
||||
return lua_gettop(L);
|
||||
}
|
||||
|
||||
|
||||
static int pcallcont (lua_State *L) {
|
||||
int status = lua_getctx(L, NULL);
|
||||
return finishpcall(L, (status == LUA_YIELD));
|
||||
}
|
||||
|
||||
|
||||
static int luaB_pcall (lua_State *L) {
|
||||
int status;
|
||||
luaL_checkany(L, 1);
|
||||
lua_pushnil(L);
|
||||
lua_insert(L, 1); /* create space for status result */
|
||||
status = lua_pcallk(L, lua_gettop(L) - 2, LUA_MULTRET, 0, 0, pcallcont);
|
||||
return finishpcall(L, (status == LUA_OK));
|
||||
}
|
||||
|
||||
|
||||
static int luaB_xpcall (lua_State *L) {
|
||||
int status;
|
||||
int n = lua_gettop(L);
|
||||
luaL_argcheck(L, n >= 2, 2, "value expected");
|
||||
lua_pushvalue(L, 1); /* exchange function... */
|
||||
lua_copy(L, 2, 1); /* ...and error handler */
|
||||
lua_replace(L, 2);
|
||||
status = lua_pcallk(L, n - 2, LUA_MULTRET, 1, 0, pcallcont);
|
||||
return finishpcall(L, (status == LUA_OK));
|
||||
}
|
||||
|
||||
|
||||
static int luaB_tostring (lua_State *L) {
|
||||
luaL_checkany(L, 1);
|
||||
luaL_tolstring(L, 1, NULL);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static const luaL_Reg base_funcs[] = {
|
||||
{"assert", luaB_assert},
|
||||
{"collectgarbage", luaB_collectgarbage},
|
||||
{"dofile", luaB_dofile},
|
||||
{"error", luaB_error},
|
||||
{"getmetatable", luaB_getmetatable},
|
||||
{"ipairs", luaB_ipairs},
|
||||
{"loadfile", luaB_loadfile},
|
||||
{"load", luaB_load},
|
||||
#if defined(LUA_COMPAT_LOADSTRING)
|
||||
{"loadstring", luaB_load},
|
||||
#endif
|
||||
{"next", luaB_next},
|
||||
{"pairs", luaB_pairs},
|
||||
{"pcall", luaB_pcall},
|
||||
{"print", luaB_print},
|
||||
{"rawequal", luaB_rawequal},
|
||||
{"rawlen", luaB_rawlen},
|
||||
{"rawget", luaB_rawget},
|
||||
{"rawset", luaB_rawset},
|
||||
{"select", luaB_select},
|
||||
{"setmetatable", luaB_setmetatable},
|
||||
{"tonumber", luaB_tonumber},
|
||||
{"tostring", luaB_tostring},
|
||||
{"type", luaB_type},
|
||||
{"xpcall", luaB_xpcall},
|
||||
{NULL, NULL}
|
||||
};
|
||||
|
||||
|
||||
LUAMOD_API int luaopen_base (lua_State *L) {
|
||||
/* set global _G */
|
||||
lua_pushglobaltable(L);
|
||||
lua_pushglobaltable(L);
|
||||
lua_setfield(L, -2, "_G");
|
||||
/* open lib into global table */
|
||||
luaL_setfuncs(L, base_funcs, 0);
|
||||
lua_pushliteral(L, LUA_VERSION);
|
||||
lua_setfield(L, -2, "_VERSION"); /* set global _VERSION */
|
||||
return 1;
|
||||
}
|
||||
|
212
AppPkg/Applications/Lua/src/lbitlib.c
Normal file
212
AppPkg/Applications/Lua/src/lbitlib.c
Normal file
@ -0,0 +1,212 @@
|
||||
/*
|
||||
** $Id: lbitlib.c,v 1.18.1.2 2013/07/09 18:01:41 roberto Exp $
|
||||
** Standard library for bitwise operations
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
#define lbitlib_c
|
||||
#define LUA_LIB
|
||||
|
||||
#include "lua.h"
|
||||
|
||||
#include "lauxlib.h"
|
||||
#include "lualib.h"
|
||||
|
||||
|
||||
/* number of bits to consider in a number */
|
||||
#if !defined(LUA_NBITS)
|
||||
#define LUA_NBITS 32
|
||||
#endif
|
||||
|
||||
|
||||
#define ALLONES (~(((~(lua_Unsigned)0) << (LUA_NBITS - 1)) << 1))
|
||||
|
||||
/* macro to trim extra bits */
|
||||
#define trim(x) ((x) & ALLONES)
|
||||
|
||||
|
||||
/* builds a number with 'n' ones (1 <= n <= LUA_NBITS) */
|
||||
#define mask(n) (~((ALLONES << 1) << ((n) - 1)))
|
||||
|
||||
|
||||
typedef lua_Unsigned b_uint;
|
||||
|
||||
|
||||
|
||||
static b_uint andaux (lua_State *L) {
|
||||
int i, n = lua_gettop(L);
|
||||
b_uint r = ~(b_uint)0;
|
||||
for (i = 1; i <= n; i++)
|
||||
r &= luaL_checkunsigned(L, i);
|
||||
return trim(r);
|
||||
}
|
||||
|
||||
|
||||
static int b_and (lua_State *L) {
|
||||
b_uint r = andaux(L);
|
||||
lua_pushunsigned(L, r);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_test (lua_State *L) {
|
||||
b_uint r = andaux(L);
|
||||
lua_pushboolean(L, r != 0);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_or (lua_State *L) {
|
||||
int i, n = lua_gettop(L);
|
||||
b_uint r = 0;
|
||||
for (i = 1; i <= n; i++)
|
||||
r |= luaL_checkunsigned(L, i);
|
||||
lua_pushunsigned(L, trim(r));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_xor (lua_State *L) {
|
||||
int i, n = lua_gettop(L);
|
||||
b_uint r = 0;
|
||||
for (i = 1; i <= n; i++)
|
||||
r ^= luaL_checkunsigned(L, i);
|
||||
lua_pushunsigned(L, trim(r));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_not (lua_State *L) {
|
||||
b_uint r = ~luaL_checkunsigned(L, 1);
|
||||
lua_pushunsigned(L, trim(r));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_shift (lua_State *L, b_uint r, int i) {
|
||||
if (i < 0) { /* shift right? */
|
||||
i = -i;
|
||||
r = trim(r);
|
||||
if (i >= LUA_NBITS) r = 0;
|
||||
else r >>= i;
|
||||
}
|
||||
else { /* shift left */
|
||||
if (i >= LUA_NBITS) r = 0;
|
||||
else r <<= i;
|
||||
r = trim(r);
|
||||
}
|
||||
lua_pushunsigned(L, r);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_lshift (lua_State *L) {
|
||||
return b_shift(L, luaL_checkunsigned(L, 1), luaL_checkint(L, 2));
|
||||
}
|
||||
|
||||
|
||||
static int b_rshift (lua_State *L) {
|
||||
return b_shift(L, luaL_checkunsigned(L, 1), -luaL_checkint(L, 2));
|
||||
}
|
||||
|
||||
|
||||
static int b_arshift (lua_State *L) {
|
||||
b_uint r = luaL_checkunsigned(L, 1);
|
||||
int i = luaL_checkint(L, 2);
|
||||
if (i < 0 || !(r & ((b_uint)1 << (LUA_NBITS - 1))))
|
||||
return b_shift(L, r, -i);
|
||||
else { /* arithmetic shift for 'negative' number */
|
||||
if (i >= LUA_NBITS) r = ALLONES;
|
||||
else
|
||||
r = trim((r >> i) | ~(~(b_uint)0 >> i)); /* add signal bit */
|
||||
lua_pushunsigned(L, r);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int b_rot (lua_State *L, int i) {
|
||||
b_uint r = luaL_checkunsigned(L, 1);
|
||||
i &= (LUA_NBITS - 1); /* i = i % NBITS */
|
||||
r = trim(r);
|
||||
if (i != 0) /* avoid undefined shift of LUA_NBITS when i == 0 */
|
||||
r = (r << i) | (r >> (LUA_NBITS - i));
|
||||
lua_pushunsigned(L, trim(r));
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_lrot (lua_State *L) {
|
||||
return b_rot(L, luaL_checkint(L, 2));
|
||||
}
|
||||
|
||||
|
||||
static int b_rrot (lua_State *L) {
|
||||
return b_rot(L, -luaL_checkint(L, 2));
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** get field and width arguments for field-manipulation functions,
|
||||
** checking whether they are valid.
|
||||
** ('luaL_error' called without 'return' to avoid later warnings about
|
||||
** 'width' being used uninitialized.)
|
||||
*/
|
||||
static int fieldargs (lua_State *L, int farg, int *width) {
|
||||
int f = luaL_checkint(L, farg);
|
||||
int w = luaL_optint(L, farg + 1, 1);
|
||||
luaL_argcheck(L, 0 <= f, farg, "field cannot be negative");
|
||||
luaL_argcheck(L, 0 < w, farg + 1, "width must be positive");
|
||||
if (f + w > LUA_NBITS)
|
||||
luaL_error(L, "trying to access non-existent bits");
|
||||
*width = w;
|
||||
return f;
|
||||
}
|
||||
|
||||
|
||||
static int b_extract (lua_State *L) {
|
||||
int w;
|
||||
b_uint r = luaL_checkunsigned(L, 1);
|
||||
int f = fieldargs(L, 2, &w);
|
||||
r = (r >> f) & mask(w);
|
||||
lua_pushunsigned(L, r);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static int b_replace (lua_State *L) {
|
||||
int w;
|
||||
b_uint r = luaL_checkunsigned(L, 1);
|
||||
b_uint v = luaL_checkunsigned(L, 2);
|
||||
int f = fieldargs(L, 3, &w);
|
||||
int m = mask(w);
|
||||
v &= m; /* erase bits outside given width */
|
||||
r = (r & ~(m << f)) | (v << f);
|
||||
lua_pushunsigned(L, r);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static const luaL_Reg bitlib[] = {
|
||||
{"arshift", b_arshift},
|
||||
{"band", b_and},
|
||||
{"bnot", b_not},
|
||||
{"bor", b_or},
|
||||
{"bxor", b_xor},
|
||||
{"btest", b_test},
|
||||
{"extract", b_extract},
|
||||
{"lrotate", b_lrot},
|
||||
{"lshift", b_lshift},
|
||||
{"replace", b_replace},
|
||||
{"rrotate", b_rrot},
|
||||
{"rshift", b_rshift},
|
||||
{NULL, NULL}
|
||||
};
|
||||
|
||||
|
||||
|
||||
LUAMOD_API int luaopen_bit32 (lua_State *L) {
|
||||
luaL_newlib(L, bitlib);
|
||||
return 1;
|
||||
}
|
||||
|
881
AppPkg/Applications/Lua/src/lcode.c
Normal file
881
AppPkg/Applications/Lua/src/lcode.c
Normal file
@ -0,0 +1,881 @@
|
||||
/*
|
||||
** $Id: lcode.c,v 2.62.1.1 2013/04/12 18:48:47 roberto Exp $
|
||||
** Code generator for Lua
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
#define lcode_c
|
||||
#define LUA_CORE
|
||||
|
||||
#include "lua.h"
|
||||
|
||||
#include "lcode.h"
|
||||
#include "ldebug.h"
|
||||
#include "ldo.h"
|
||||
#include "lgc.h"
|
||||
#include "llex.h"
|
||||
#include "lmem.h"
|
||||
#include "lobject.h"
|
||||
#include "lopcodes.h"
|
||||
#include "lparser.h"
|
||||
#include "lstring.h"
|
||||
#include "ltable.h"
|
||||
#include "lvm.h"
|
||||
|
||||
|
||||
#define hasjumps(e) ((e)->t != (e)->f)
|
||||
|
||||
|
||||
static int isnumeral(expdesc *e) {
|
||||
return (e->k == VKNUM && e->t == NO_JUMP && e->f == NO_JUMP);
|
||||
}
|
||||
|
||||
|
||||
void luaK_nil (FuncState *fs, int from, int n) {
|
||||
Instruction *previous;
|
||||
int l = from + n - 1; /* last register to set nil */
|
||||
if (fs->pc > fs->lasttarget) { /* no jumps to current position? */
|
||||
previous = &fs->f->code[fs->pc-1];
|
||||
if (GET_OPCODE(*previous) == OP_LOADNIL) {
|
||||
int pfrom = GETARG_A(*previous);
|
||||
int pl = pfrom + GETARG_B(*previous);
|
||||
if ((pfrom <= from && from <= pl + 1) ||
|
||||
(from <= pfrom && pfrom <= l + 1)) { /* can connect both? */
|
||||
if (pfrom < from) from = pfrom; /* from = min(from, pfrom) */
|
||||
if (pl > l) l = pl; /* l = max(l, pl) */
|
||||
SETARG_A(*previous, from);
|
||||
SETARG_B(*previous, l - from);
|
||||
return;
|
||||
}
|
||||
} /* else go through */
|
||||
}
|
||||
luaK_codeABC(fs, OP_LOADNIL, from, n - 1, 0); /* else no optimization */
|
||||
}
|
||||
|
||||
|
||||
int luaK_jump (FuncState *fs) {
|
||||
int jpc = fs->jpc; /* save list of jumps to here */
|
||||
int j;
|
||||
fs->jpc = NO_JUMP;
|
||||
j = luaK_codeAsBx(fs, OP_JMP, 0, NO_JUMP);
|
||||
luaK_concat(fs, &j, jpc); /* keep them on hold */
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
void luaK_ret (FuncState *fs, int first, int nret) {
|
||||
luaK_codeABC(fs, OP_RETURN, first, nret+1, 0);
|
||||
}
|
||||
|
||||
|
||||
static int condjump (FuncState *fs, OpCode op, int A, int B, int C) {
|
||||
luaK_codeABC(fs, op, A, B, C);
|
||||
return luaK_jump(fs);
|
||||
}
|
||||
|
||||
|
||||
static void fixjump (FuncState *fs, int pc, int dest) {
|
||||
Instruction *jmp = &fs->f->code[pc];
|
||||
int offset = dest-(pc+1);
|
||||
lua_assert(dest != NO_JUMP);
|
||||
if (abs(offset) > MAXARG_sBx)
|
||||
luaX_syntaxerror(fs->ls, "control structure too long");
|
||||
SETARG_sBx(*jmp, offset);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** returns current `pc' and marks it as a jump target (to avoid wrong
|
||||
** optimizations with consecutive instructions not in the same basic block).
|
||||
*/
|
||||
int luaK_getlabel (FuncState *fs) {
|
||||
fs->lasttarget = fs->pc;
|
||||
return fs->pc;
|
||||
}
|
||||
|
||||
|
||||
static int getjump (FuncState *fs, int pc) {
|
||||
int offset = GETARG_sBx(fs->f->code[pc]);
|
||||
if (offset == NO_JUMP) /* point to itself represents end of list */
|
||||
return NO_JUMP; /* end of list */
|
||||
else
|
||||
return (pc+1)+offset; /* turn offset into absolute position */
|
||||
}
|
||||
|
||||
|
||||
static Instruction *getjumpcontrol (FuncState *fs, int pc) {
|
||||
Instruction *pi = &fs->f->code[pc];
|
||||
if (pc >= 1 && testTMode(GET_OPCODE(*(pi-1))))
|
||||
return pi-1;
|
||||
else
|
||||
return pi;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
** check whether list has any jump that do not produce a value
|
||||
** (or produce an inverted value)
|
||||
*/
|
||||
static int need_value (FuncState *fs, int list) {
|
||||
for (; list != NO_JUMP; list = getjump(fs, list)) {
|
||||
Instruction i = *getjumpcontrol(fs, list);
|
||||
if (GET_OPCODE(i) != OP_TESTSET) return 1;
|
||||
}
|
||||
return 0; /* not found */
|
||||
}
|
||||
|
||||
|
||||
static int patchtestreg (FuncState *fs, int node, int reg) {
|
||||
Instruction *i = getjumpcontrol(fs, node);
|
||||
if (GET_OPCODE(*i) != OP_TESTSET)
|
||||
return 0; /* cannot patch other instructions */
|
||||
if (reg != NO_REG && reg != GETARG_B(*i))
|
||||
SETARG_A(*i, reg);
|
||||
else /* no register to put value or register already has the value */
|
||||
*i = CREATE_ABC(OP_TEST, GETARG_B(*i), 0, GETARG_C(*i));
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static void removevalues (FuncState *fs, int list) {
|
||||
for (; list != NO_JUMP; list = getjump(fs, list))
|
||||
patchtestreg(fs, list, NO_REG);
|
||||
}
|
||||
|
||||
|
||||
static void patchlistaux (FuncState *fs, int list, int vtarget, int reg,
|
||||
int dtarget) {
|
||||
while (list != NO_JUMP) {
|
||||
int next = getjump(fs, list);
|
||||
if (patchtestreg(fs, list, reg))
|
||||
fixjump(fs, list, vtarget);
|
||||
else
|
||||
fixjump(fs, list, dtarget); /* jump to default target */
|
||||
list = next;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void dischargejpc (FuncState *fs) {
|
||||
patchlistaux(fs, fs->jpc, fs->pc, NO_REG, fs->pc);
|
||||
fs->jpc = NO_JUMP;
|
||||
}
|
||||
|
||||
|
||||
void luaK_patchlist (FuncState *fs, int list, int target) {
|
||||
if (target == fs->pc)
|
||||
luaK_patchtohere(fs, list);
|
||||
else {
|
||||
lua_assert(target < fs->pc);
|
||||
patchlistaux(fs, list, target, NO_REG, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
LUAI_FUNC void luaK_patchclose (FuncState *fs, int list, int level) {
|
||||
level++; /* argument is +1 to reserve 0 as non-op */
|
||||
while (list != NO_JUMP) {
|
||||
int next = getjump(fs, list);
|
||||
lua_assert(GET_OPCODE(fs->f->code[list]) == OP_JMP &&
|
||||
(GETARG_A(fs->f->code[list]) == 0 ||
|
||||
GETARG_A(fs->f->code[list]) >= level));
|
||||
SETARG_A(fs->f->code[list], level);
|
||||
list = next;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_patchtohere (FuncState *fs, int list) {
|
||||
luaK_getlabel(fs);
|
||||
luaK_concat(fs, &fs->jpc, list);
|
||||
}
|
||||
|
||||
|
||||
void luaK_concat (FuncState *fs, int *l1, int l2) {
|
||||
if (l2 == NO_JUMP) return;
|
||||
else if (*l1 == NO_JUMP)
|
||||
*l1 = l2;
|
||||
else {
|
||||
int list = *l1;
|
||||
int next;
|
||||
while ((next = getjump(fs, list)) != NO_JUMP) /* find last element */
|
||||
list = next;
|
||||
fixjump(fs, list, l2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int luaK_code (FuncState *fs, Instruction i) {
|
||||
Proto *f = fs->f;
|
||||
dischargejpc(fs); /* `pc' will change */
|
||||
/* put new instruction in code array */
|
||||
luaM_growvector(fs->ls->L, f->code, fs->pc, f->sizecode, Instruction,
|
||||
MAX_INT, "opcodes");
|
||||
f->code[fs->pc] = i;
|
||||
/* save corresponding line information */
|
||||
luaM_growvector(fs->ls->L, f->lineinfo, fs->pc, f->sizelineinfo, int,
|
||||
MAX_INT, "opcodes");
|
||||
f->lineinfo[fs->pc] = fs->ls->lastline;
|
||||
return fs->pc++;
|
||||
}
|
||||
|
||||
|
||||
int luaK_codeABC (FuncState *fs, OpCode o, int a, int b, int c) {
|
||||
lua_assert(getOpMode(o) == iABC);
|
||||
lua_assert(getBMode(o) != OpArgN || b == 0);
|
||||
lua_assert(getCMode(o) != OpArgN || c == 0);
|
||||
lua_assert(a <= MAXARG_A && b <= MAXARG_B && c <= MAXARG_C);
|
||||
return luaK_code(fs, CREATE_ABC(o, a, b, c));
|
||||
}
|
||||
|
||||
|
||||
int luaK_codeABx (FuncState *fs, OpCode o, int a, unsigned int bc) {
|
||||
lua_assert(getOpMode(o) == iABx || getOpMode(o) == iAsBx);
|
||||
lua_assert(getCMode(o) == OpArgN);
|
||||
lua_assert(a <= MAXARG_A && bc <= MAXARG_Bx);
|
||||
return luaK_code(fs, CREATE_ABx(o, a, bc));
|
||||
}
|
||||
|
||||
|
||||
static int codeextraarg (FuncState *fs, int a) {
|
||||
lua_assert(a <= MAXARG_Ax);
|
||||
return luaK_code(fs, CREATE_Ax(OP_EXTRAARG, a));
|
||||
}
|
||||
|
||||
|
||||
int luaK_codek (FuncState *fs, int reg, int k) {
|
||||
if (k <= MAXARG_Bx)
|
||||
return luaK_codeABx(fs, OP_LOADK, reg, k);
|
||||
else {
|
||||
int p = luaK_codeABx(fs, OP_LOADKX, reg, 0);
|
||||
codeextraarg(fs, k);
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_checkstack (FuncState *fs, int n) {
|
||||
int newstack = fs->freereg + n;
|
||||
if (newstack > fs->f->maxstacksize) {
|
||||
if (newstack >= MAXSTACK)
|
||||
luaX_syntaxerror(fs->ls, "function or expression too complex");
|
||||
fs->f->maxstacksize = cast_byte(newstack);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_reserveregs (FuncState *fs, int n) {
|
||||
luaK_checkstack(fs, n);
|
||||
fs->freereg += (lu_byte)n;
|
||||
}
|
||||
|
||||
|
||||
static void freereg (FuncState *fs, int reg) {
|
||||
if (!ISK(reg) && reg >= fs->nactvar) {
|
||||
fs->freereg--;
|
||||
lua_assert(reg == fs->freereg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void freeexp (FuncState *fs, expdesc *e) {
|
||||
if (e->k == VNONRELOC)
|
||||
freereg(fs, e->u.info);
|
||||
}
|
||||
|
||||
|
||||
static int addk (FuncState *fs, TValue *key, TValue *v) {
|
||||
lua_State *L = fs->ls->L;
|
||||
TValue *idx = luaH_set(L, fs->h, key);
|
||||
Proto *f = fs->f;
|
||||
int k, oldsize;
|
||||
if (ttisnumber(idx)) {
|
||||
lua_Number n = nvalue(idx);
|
||||
lua_number2int(k, n);
|
||||
if (luaV_rawequalobj(&f->k[k], v))
|
||||
return k;
|
||||
/* else may be a collision (e.g., between 0.0 and "\0\0\0\0\0\0\0\0");
|
||||
go through and create a new entry for this value */
|
||||
}
|
||||
/* constant not found; create a new entry */
|
||||
oldsize = f->sizek;
|
||||
k = fs->nk;
|
||||
/* numerical value does not need GC barrier;
|
||||
table has no metatable, so it does not need to invalidate cache */
|
||||
setnvalue(idx, cast_num(k));
|
||||
luaM_growvector(L, f->k, k, f->sizek, TValue, MAXARG_Ax, "constants");
|
||||
while (oldsize < f->sizek) setnilvalue(&f->k[oldsize++]);
|
||||
setobj(L, &f->k[k], v);
|
||||
fs->nk++;
|
||||
luaC_barrier(L, f, v);
|
||||
return k;
|
||||
}
|
||||
|
||||
|
||||
int luaK_stringK (FuncState *fs, TString *s) {
|
||||
TValue o;
|
||||
setsvalue(fs->ls->L, &o, s);
|
||||
return addk(fs, &o, &o);
|
||||
}
|
||||
|
||||
|
||||
int luaK_numberK (FuncState *fs, lua_Number r) {
|
||||
int n;
|
||||
lua_State *L = fs->ls->L;
|
||||
TValue o;
|
||||
setnvalue(&o, r);
|
||||
if (r == 0 || luai_numisnan(NULL, r)) { /* handle -0 and NaN */
|
||||
/* use raw representation as key to avoid numeric problems */
|
||||
setsvalue(L, L->top++, luaS_newlstr(L, (char *)&r, sizeof(r)));
|
||||
n = addk(fs, L->top - 1, &o);
|
||||
L->top--;
|
||||
}
|
||||
else
|
||||
n = addk(fs, &o, &o); /* regular case */
|
||||
return n;
|
||||
}
|
||||
|
||||
|
||||
static int boolK (FuncState *fs, int b) {
|
||||
TValue o;
|
||||
setbvalue(&o, b);
|
||||
return addk(fs, &o, &o);
|
||||
}
|
||||
|
||||
|
||||
static int nilK (FuncState *fs) {
|
||||
TValue k, v;
|
||||
setnilvalue(&v);
|
||||
/* cannot use nil as key; instead use table itself to represent nil */
|
||||
sethvalue(fs->ls->L, &k, fs->h);
|
||||
return addk(fs, &k, &v);
|
||||
}
|
||||
|
||||
|
||||
void luaK_setreturns (FuncState *fs, expdesc *e, int nresults) {
|
||||
if (e->k == VCALL) { /* expression is an open function call? */
|
||||
SETARG_C(getcode(fs, e), nresults+1);
|
||||
}
|
||||
else if (e->k == VVARARG) {
|
||||
SETARG_B(getcode(fs, e), nresults+1);
|
||||
SETARG_A(getcode(fs, e), fs->freereg);
|
||||
luaK_reserveregs(fs, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_setoneret (FuncState *fs, expdesc *e) {
|
||||
if (e->k == VCALL) { /* expression is an open function call? */
|
||||
e->k = VNONRELOC;
|
||||
e->u.info = GETARG_A(getcode(fs, e));
|
||||
}
|
||||
else if (e->k == VVARARG) {
|
||||
SETARG_B(getcode(fs, e), 2);
|
||||
e->k = VRELOCABLE; /* can relocate its simple result */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_dischargevars (FuncState *fs, expdesc *e) {
|
||||
switch (e->k) {
|
||||
case VLOCAL: {
|
||||
e->k = VNONRELOC;
|
||||
break;
|
||||
}
|
||||
case VUPVAL: {
|
||||
e->u.info = luaK_codeABC(fs, OP_GETUPVAL, 0, e->u.info, 0);
|
||||
e->k = VRELOCABLE;
|
||||
break;
|
||||
}
|
||||
case VINDEXED: {
|
||||
OpCode op = OP_GETTABUP; /* assume 't' is in an upvalue */
|
||||
freereg(fs, e->u.ind.idx);
|
||||
if (e->u.ind.vt == VLOCAL) { /* 't' is in a register? */
|
||||
freereg(fs, e->u.ind.t);
|
||||
op = OP_GETTABLE;
|
||||
}
|
||||
e->u.info = luaK_codeABC(fs, op, 0, e->u.ind.t, e->u.ind.idx);
|
||||
e->k = VRELOCABLE;
|
||||
break;
|
||||
}
|
||||
case VVARARG:
|
||||
case VCALL: {
|
||||
luaK_setoneret(fs, e);
|
||||
break;
|
||||
}
|
||||
default: break; /* there is one value available (somewhere) */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int code_label (FuncState *fs, int A, int b, int jump) {
|
||||
luaK_getlabel(fs); /* those instructions may be jump targets */
|
||||
return luaK_codeABC(fs, OP_LOADBOOL, A, b, jump);
|
||||
}
|
||||
|
||||
|
||||
static void discharge2reg (FuncState *fs, expdesc *e, int reg) {
|
||||
luaK_dischargevars(fs, e);
|
||||
switch (e->k) {
|
||||
case VNIL: {
|
||||
luaK_nil(fs, reg, 1);
|
||||
break;
|
||||
}
|
||||
case VFALSE: case VTRUE: {
|
||||
luaK_codeABC(fs, OP_LOADBOOL, reg, e->k == VTRUE, 0);
|
||||
break;
|
||||
}
|
||||
case VK: {
|
||||
luaK_codek(fs, reg, e->u.info);
|
||||
break;
|
||||
}
|
||||
case VKNUM: {
|
||||
luaK_codek(fs, reg, luaK_numberK(fs, e->u.nval));
|
||||
break;
|
||||
}
|
||||
case VRELOCABLE: {
|
||||
Instruction *pc = &getcode(fs, e);
|
||||
SETARG_A(*pc, reg);
|
||||
break;
|
||||
}
|
||||
case VNONRELOC: {
|
||||
if (reg != e->u.info)
|
||||
luaK_codeABC(fs, OP_MOVE, reg, e->u.info, 0);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
lua_assert(e->k == VVOID || e->k == VJMP);
|
||||
return; /* nothing to do... */
|
||||
}
|
||||
}
|
||||
e->u.info = reg;
|
||||
e->k = VNONRELOC;
|
||||
}
|
||||
|
||||
|
||||
static void discharge2anyreg (FuncState *fs, expdesc *e) {
|
||||
if (e->k != VNONRELOC) {
|
||||
luaK_reserveregs(fs, 1);
|
||||
discharge2reg(fs, e, fs->freereg-1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void exp2reg (FuncState *fs, expdesc *e, int reg) {
|
||||
discharge2reg(fs, e, reg);
|
||||
if (e->k == VJMP)
|
||||
luaK_concat(fs, &e->t, e->u.info); /* put this jump in `t' list */
|
||||
if (hasjumps(e)) {
|
||||
int final; /* position after whole expression */
|
||||
int p_f = NO_JUMP; /* position of an eventual LOAD false */
|
||||
int p_t = NO_JUMP; /* position of an eventual LOAD true */
|
||||
if (need_value(fs, e->t) || need_value(fs, e->f)) {
|
||||
int fj = (e->k == VJMP) ? NO_JUMP : luaK_jump(fs);
|
||||
p_f = code_label(fs, reg, 0, 1);
|
||||
p_t = code_label(fs, reg, 1, 0);
|
||||
luaK_patchtohere(fs, fj);
|
||||
}
|
||||
final = luaK_getlabel(fs);
|
||||
patchlistaux(fs, e->f, final, reg, p_f);
|
||||
patchlistaux(fs, e->t, final, reg, p_t);
|
||||
}
|
||||
e->f = e->t = NO_JUMP;
|
||||
e->u.info = reg;
|
||||
e->k = VNONRELOC;
|
||||
}
|
||||
|
||||
|
||||
void luaK_exp2nextreg (FuncState *fs, expdesc *e) {
|
||||
luaK_dischargevars(fs, e);
|
||||
freeexp(fs, e);
|
||||
luaK_reserveregs(fs, 1);
|
||||
exp2reg(fs, e, fs->freereg - 1);
|
||||
}
|
||||
|
||||
|
||||
int luaK_exp2anyreg (FuncState *fs, expdesc *e) {
|
||||
luaK_dischargevars(fs, e);
|
||||
if (e->k == VNONRELOC) {
|
||||
if (!hasjumps(e)) return e->u.info; /* exp is already in a register */
|
||||
if (e->u.info >= fs->nactvar) { /* reg. is not a local? */
|
||||
exp2reg(fs, e, e->u.info); /* put value on it */
|
||||
return e->u.info;
|
||||
}
|
||||
}
|
||||
luaK_exp2nextreg(fs, e); /* default */
|
||||
return e->u.info;
|
||||
}
|
||||
|
||||
|
||||
void luaK_exp2anyregup (FuncState *fs, expdesc *e) {
|
||||
if (e->k != VUPVAL || hasjumps(e))
|
||||
luaK_exp2anyreg(fs, e);
|
||||
}
|
||||
|
||||
|
||||
void luaK_exp2val (FuncState *fs, expdesc *e) {
|
||||
if (hasjumps(e))
|
||||
luaK_exp2anyreg(fs, e);
|
||||
else
|
||||
luaK_dischargevars(fs, e);
|
||||
}
|
||||
|
||||
|
||||
int luaK_exp2RK (FuncState *fs, expdesc *e) {
|
||||
luaK_exp2val(fs, e);
|
||||
switch (e->k) {
|
||||
case VTRUE:
|
||||
case VFALSE:
|
||||
case VNIL: {
|
||||
if (fs->nk <= MAXINDEXRK) { /* constant fits in RK operand? */
|
||||
e->u.info = (e->k == VNIL) ? nilK(fs) : boolK(fs, (e->k == VTRUE));
|
||||
e->k = VK;
|
||||
return RKASK(e->u.info);
|
||||
}
|
||||
else break;
|
||||
}
|
||||
case VKNUM: {
|
||||
e->u.info = luaK_numberK(fs, e->u.nval);
|
||||
e->k = VK;
|
||||
/* go through */
|
||||
}
|
||||
case VK: {
|
||||
if (e->u.info <= MAXINDEXRK) /* constant fits in argC? */
|
||||
return RKASK(e->u.info);
|
||||
else break;
|
||||
}
|
||||
default: break;
|
||||
}
|
||||
/* not a constant in the right range: put it in a register */
|
||||
return luaK_exp2anyreg(fs, e);
|
||||
}
|
||||
|
||||
|
||||
void luaK_storevar (FuncState *fs, expdesc *var, expdesc *ex) {
|
||||
switch (var->k) {
|
||||
case VLOCAL: {
|
||||
freeexp(fs, ex);
|
||||
exp2reg(fs, ex, var->u.info);
|
||||
return;
|
||||
}
|
||||
case VUPVAL: {
|
||||
int e = luaK_exp2anyreg(fs, ex);
|
||||
luaK_codeABC(fs, OP_SETUPVAL, e, var->u.info, 0);
|
||||
break;
|
||||
}
|
||||
case VINDEXED: {
|
||||
OpCode op = (var->u.ind.vt == VLOCAL) ? OP_SETTABLE : OP_SETTABUP;
|
||||
int e = luaK_exp2RK(fs, ex);
|
||||
luaK_codeABC(fs, op, var->u.ind.t, var->u.ind.idx, e);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
lua_assert(0); /* invalid var kind to store */
|
||||
break;
|
||||
}
|
||||
}
|
||||
freeexp(fs, ex);
|
||||
}
|
||||
|
||||
|
||||
void luaK_self (FuncState *fs, expdesc *e, expdesc *key) {
|
||||
int ereg;
|
||||
luaK_exp2anyreg(fs, e);
|
||||
ereg = e->u.info; /* register where 'e' was placed */
|
||||
freeexp(fs, e);
|
||||
e->u.info = fs->freereg; /* base register for op_self */
|
||||
e->k = VNONRELOC;
|
||||
luaK_reserveregs(fs, 2); /* function and 'self' produced by op_self */
|
||||
luaK_codeABC(fs, OP_SELF, e->u.info, ereg, luaK_exp2RK(fs, key));
|
||||
freeexp(fs, key);
|
||||
}
|
||||
|
||||
|
||||
static void invertjump (FuncState *fs, expdesc *e) {
|
||||
Instruction *pc = getjumpcontrol(fs, e->u.info);
|
||||
lua_assert(testTMode(GET_OPCODE(*pc)) && GET_OPCODE(*pc) != OP_TESTSET &&
|
||||
GET_OPCODE(*pc) != OP_TEST);
|
||||
SETARG_A(*pc, !(GETARG_A(*pc)));
|
||||
}
|
||||
|
||||
|
||||
static int jumponcond (FuncState *fs, expdesc *e, int cond) {
|
||||
if (e->k == VRELOCABLE) {
|
||||
Instruction ie = getcode(fs, e);
|
||||
if (GET_OPCODE(ie) == OP_NOT) {
|
||||
fs->pc--; /* remove previous OP_NOT */
|
||||
return condjump(fs, OP_TEST, GETARG_B(ie), 0, !cond);
|
||||
}
|
||||
/* else go through */
|
||||
}
|
||||
discharge2anyreg(fs, e);
|
||||
freeexp(fs, e);
|
||||
return condjump(fs, OP_TESTSET, NO_REG, e->u.info, cond);
|
||||
}
|
||||
|
||||
|
||||
void luaK_goiftrue (FuncState *fs, expdesc *e) {
|
||||
int pc; /* pc of last jump */
|
||||
luaK_dischargevars(fs, e);
|
||||
switch (e->k) {
|
||||
case VJMP: {
|
||||
invertjump(fs, e);
|
||||
pc = e->u.info;
|
||||
break;
|
||||
}
|
||||
case VK: case VKNUM: case VTRUE: {
|
||||
pc = NO_JUMP; /* always true; do nothing */
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
pc = jumponcond(fs, e, 0);
|
||||
break;
|
||||
}
|
||||
}
|
||||
luaK_concat(fs, &e->f, pc); /* insert last jump in `f' list */
|
||||
luaK_patchtohere(fs, e->t);
|
||||
e->t = NO_JUMP;
|
||||
}
|
||||
|
||||
|
||||
void luaK_goiffalse (FuncState *fs, expdesc *e) {
|
||||
int pc; /* pc of last jump */
|
||||
luaK_dischargevars(fs, e);
|
||||
switch (e->k) {
|
||||
case VJMP: {
|
||||
pc = e->u.info;
|
||||
break;
|
||||
}
|
||||
case VNIL: case VFALSE: {
|
||||
pc = NO_JUMP; /* always false; do nothing */
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
pc = jumponcond(fs, e, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
luaK_concat(fs, &e->t, pc); /* insert last jump in `t' list */
|
||||
luaK_patchtohere(fs, e->f);
|
||||
e->f = NO_JUMP;
|
||||
}
|
||||
|
||||
|
||||
static void codenot (FuncState *fs, expdesc *e) {
|
||||
luaK_dischargevars(fs, e);
|
||||
switch (e->k) {
|
||||
case VNIL: case VFALSE: {
|
||||
e->k = VTRUE;
|
||||
break;
|
||||
}
|
||||
case VK: case VKNUM: case VTRUE: {
|
||||
e->k = VFALSE;
|
||||
break;
|
||||
}
|
||||
case VJMP: {
|
||||
invertjump(fs, e);
|
||||
break;
|
||||
}
|
||||
case VRELOCABLE:
|
||||
case VNONRELOC: {
|
||||
discharge2anyreg(fs, e);
|
||||
freeexp(fs, e);
|
||||
e->u.info = luaK_codeABC(fs, OP_NOT, 0, e->u.info, 0);
|
||||
e->k = VRELOCABLE;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
lua_assert(0); /* cannot happen */
|
||||
break;
|
||||
}
|
||||
}
|
||||
/* interchange true and false lists */
|
||||
{ int temp = e->f; e->f = e->t; e->t = temp; }
|
||||
removevalues(fs, e->f);
|
||||
removevalues(fs, e->t);
|
||||
}
|
||||
|
||||
|
||||
void luaK_indexed (FuncState *fs, expdesc *t, expdesc *k) {
|
||||
lua_assert(!hasjumps(t));
|
||||
t->u.ind.t = (lu_byte)t->u.info;
|
||||
t->u.ind.idx = (short)luaK_exp2RK(fs, k);
|
||||
t->u.ind.vt = (t->k == VUPVAL) ? VUPVAL
|
||||
: check_exp(vkisinreg(t->k), VLOCAL);
|
||||
t->k = VINDEXED;
|
||||
}
|
||||
|
||||
|
||||
static int constfolding (OpCode op, expdesc *e1, expdesc *e2) {
|
||||
lua_Number r;
|
||||
if (!isnumeral(e1) || !isnumeral(e2)) return 0;
|
||||
if ((op == OP_DIV || op == OP_MOD) && e2->u.nval == 0)
|
||||
return 0; /* do not attempt to divide by 0 */
|
||||
r = luaO_arith(op - OP_ADD + LUA_OPADD, e1->u.nval, e2->u.nval);
|
||||
e1->u.nval = r;
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static void codearith (FuncState *fs, OpCode op,
|
||||
expdesc *e1, expdesc *e2, int line) {
|
||||
if (constfolding(op, e1, e2))
|
||||
return;
|
||||
else {
|
||||
int o2 = (op != OP_UNM && op != OP_LEN) ? luaK_exp2RK(fs, e2) : 0;
|
||||
int o1 = luaK_exp2RK(fs, e1);
|
||||
if (o1 > o2) {
|
||||
freeexp(fs, e1);
|
||||
freeexp(fs, e2);
|
||||
}
|
||||
else {
|
||||
freeexp(fs, e2);
|
||||
freeexp(fs, e1);
|
||||
}
|
||||
e1->u.info = luaK_codeABC(fs, op, 0, o1, o2);
|
||||
e1->k = VRELOCABLE;
|
||||
luaK_fixline(fs, line);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void codecomp (FuncState *fs, OpCode op, int cond, expdesc *e1,
|
||||
expdesc *e2) {
|
||||
int o1 = luaK_exp2RK(fs, e1);
|
||||
int o2 = luaK_exp2RK(fs, e2);
|
||||
freeexp(fs, e2);
|
||||
freeexp(fs, e1);
|
||||
if (cond == 0 && op != OP_EQ) {
|
||||
int temp; /* exchange args to replace by `<' or `<=' */
|
||||
temp = o1; o1 = o2; o2 = temp; /* o1 <==> o2 */
|
||||
cond = 1;
|
||||
}
|
||||
e1->u.info = condjump(fs, op, cond, o1, o2);
|
||||
e1->k = VJMP;
|
||||
}
|
||||
|
||||
|
||||
void luaK_prefix (FuncState *fs, UnOpr op, expdesc *e, int line) {
|
||||
expdesc e2;
|
||||
e2.t = e2.f = NO_JUMP; e2.k = VKNUM; e2.u.nval = 0;
|
||||
switch (op) {
|
||||
case OPR_MINUS: {
|
||||
if (isnumeral(e)) /* minus constant? */
|
||||
e->u.nval = luai_numunm(NULL, e->u.nval); /* fold it */
|
||||
else {
|
||||
luaK_exp2anyreg(fs, e);
|
||||
codearith(fs, OP_UNM, e, &e2, line);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OPR_NOT: codenot(fs, e); break;
|
||||
case OPR_LEN: {
|
||||
luaK_exp2anyreg(fs, e); /* cannot operate on constants */
|
||||
codearith(fs, OP_LEN, e, &e2, line);
|
||||
break;
|
||||
}
|
||||
default: lua_assert(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_infix (FuncState *fs, BinOpr op, expdesc *v) {
|
||||
switch (op) {
|
||||
case OPR_AND: {
|
||||
luaK_goiftrue(fs, v);
|
||||
break;
|
||||
}
|
||||
case OPR_OR: {
|
||||
luaK_goiffalse(fs, v);
|
||||
break;
|
||||
}
|
||||
case OPR_CONCAT: {
|
||||
luaK_exp2nextreg(fs, v); /* operand must be on the `stack' */
|
||||
break;
|
||||
}
|
||||
case OPR_ADD: case OPR_SUB: case OPR_MUL: case OPR_DIV:
|
||||
case OPR_MOD: case OPR_POW: {
|
||||
if (!isnumeral(v)) luaK_exp2RK(fs, v);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
luaK_exp2RK(fs, v);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_posfix (FuncState *fs, BinOpr op,
|
||||
expdesc *e1, expdesc *e2, int line) {
|
||||
switch (op) {
|
||||
case OPR_AND: {
|
||||
lua_assert(e1->t == NO_JUMP); /* list must be closed */
|
||||
luaK_dischargevars(fs, e2);
|
||||
luaK_concat(fs, &e2->f, e1->f);
|
||||
*e1 = *e2;
|
||||
break;
|
||||
}
|
||||
case OPR_OR: {
|
||||
lua_assert(e1->f == NO_JUMP); /* list must be closed */
|
||||
luaK_dischargevars(fs, e2);
|
||||
luaK_concat(fs, &e2->t, e1->t);
|
||||
*e1 = *e2;
|
||||
break;
|
||||
}
|
||||
case OPR_CONCAT: {
|
||||
luaK_exp2val(fs, e2);
|
||||
if (e2->k == VRELOCABLE && GET_OPCODE(getcode(fs, e2)) == OP_CONCAT) {
|
||||
lua_assert(e1->u.info == GETARG_B(getcode(fs, e2))-1);
|
||||
freeexp(fs, e1);
|
||||
SETARG_B(getcode(fs, e2), e1->u.info);
|
||||
e1->k = VRELOCABLE; e1->u.info = e2->u.info;
|
||||
}
|
||||
else {
|
||||
luaK_exp2nextreg(fs, e2); /* operand must be on the 'stack' */
|
||||
codearith(fs, OP_CONCAT, e1, e2, line);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OPR_ADD: case OPR_SUB: case OPR_MUL: case OPR_DIV:
|
||||
case OPR_MOD: case OPR_POW: {
|
||||
codearith(fs, cast(OpCode, op - OPR_ADD + OP_ADD), e1, e2, line);
|
||||
break;
|
||||
}
|
||||
case OPR_EQ: case OPR_LT: case OPR_LE: {
|
||||
codecomp(fs, cast(OpCode, op - OPR_EQ + OP_EQ), 1, e1, e2);
|
||||
break;
|
||||
}
|
||||
case OPR_NE: case OPR_GT: case OPR_GE: {
|
||||
codecomp(fs, cast(OpCode, op - OPR_NE + OP_EQ), 0, e1, e2);
|
||||
break;
|
||||
}
|
||||
default: lua_assert(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void luaK_fixline (FuncState *fs, int line) {
|
||||
fs->f->lineinfo[fs->pc - 1] = line;
|
||||
}
|
||||
|
||||
|
||||
void luaK_setlist (FuncState *fs, int base, int nelems, int tostore) {
|
||||
int c = (nelems - 1)/LFIELDS_PER_FLUSH + 1;
|
||||
int b = (tostore == LUA_MULTRET) ? 0 : tostore;
|
||||
lua_assert(tostore != 0);
|
||||
if (c <= MAXARG_C)
|
||||
luaK_codeABC(fs, OP_SETLIST, base, b, c);
|
||||
else if (c <= MAXARG_Ax) {
|
||||
luaK_codeABC(fs, OP_SETLIST, base, b, 0);
|
||||
codeextraarg(fs, c);
|
||||
}
|
||||
else
|
||||
luaX_syntaxerror(fs->ls, "constructor too long");
|
||||
fs->freereg = (lu_byte)base + 1; /* free registers with list values */
|
||||
}
|
||||
|
83
AppPkg/Applications/Lua/src/lcode.h
Normal file
83
AppPkg/Applications/Lua/src/lcode.h
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
** $Id: lcode.h,v 1.58.1.1 2013/04/12 18:48:47 roberto Exp $
|
||||
** Code generator for Lua
|
||||
** See Copyright Notice in lua.h
|
||||
*/
|
||||
|
||||
#ifndef lcode_h
|
||||
#define lcode_h
|
||||
|
||||
#include "llex.h"
|
||||
#include "lobject.h"
|
||||
#include "lopcodes.h"
|
||||
#include "lparser.h"
|
||||
|
||||
|
||||
/*
|
||||
** Marks the end of a patch list. It is an invalid value both as an absolute
|
||||
** address, and as a list link (would link an element to itself).
|
||||
*/
|
||||
#define NO_JUMP (-1)
|
||||
|
||||
|
||||
/*
|
||||
** grep "ORDER OPR" if you change these enums (ORDER OP)
|
||||
*/
|
||||
typedef enum BinOpr {
|
||||
OPR_ADD, OPR_SUB, OPR_MUL, OPR_DIV, OPR_MOD, OPR_POW,
|
||||
OPR_CONCAT,
|
||||
OPR_EQ, OPR_LT, OPR_LE,
|
||||
OPR_NE, OPR_GT, OPR_GE,
|
||||
OPR_AND, OPR_OR,
|
||||
OPR_NOBINOPR
|
||||
} BinOpr;
|
||||
|
||||
|
||||
typedef enum UnOpr { OPR_MINUS, OPR_NOT, OPR_LEN, OPR_NOUNOPR } UnOpr;
|
||||
|
||||
|
||||
#define getcode(fs,e) ((fs)->f->code[(e)->u.info])
|
||||
|
||||
#define luaK_codeAsBx(fs,o,A,sBx) luaK_codeABx(fs,o,A,(sBx)+MAXARG_sBx)
|
||||
|
||||
#define luaK_setmultret(fs,e) luaK_setreturns(fs, e, LUA_MULTRET)
|
||||
|
||||
#define luaK_jumpto(fs,t) luaK_patchlist(fs, luaK_jump(fs), t)
|
||||
|
||||
LUAI_FUNC int luaK_codeABx (FuncState *fs, OpCode o, int A, unsigned int Bx);
|
||||
LUAI_FUNC int luaK_codeABC (FuncState *fs, OpCode o, int A, int B, int C);
|
||||
LUAI_FUNC int luaK_codek (FuncState *fs, int reg, int k);
|
||||
LUAI_FUNC void luaK_fixline (FuncState *fs, int line);
|
||||
LUAI_FUNC void luaK_nil (FuncState *fs, int from, int n);
|
||||
LUAI_FUNC void luaK_reserveregs (FuncState *fs, int n);
|
||||
LUAI_FUNC void luaK_checkstack (FuncState *fs, int n);
|
||||
LUAI_FUNC int luaK_stringK (FuncState *fs, TString *s);
|
||||
LUAI_FUNC int luaK_numberK (FuncState *fs, lua_Number r);
|
||||
LUAI_FUNC void luaK_dischargevars (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC int luaK_exp2anyreg (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_exp2anyregup (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_exp2nextreg (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_exp2val (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC int luaK_exp2RK (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_self (FuncState *fs, expdesc *e, expdesc *key);
|
||||
LUAI_FUNC void luaK_indexed (FuncState *fs, expdesc *t, expdesc *k);
|
||||
LUAI_FUNC void luaK_goiftrue (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_goiffalse (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC void luaK_storevar (FuncState *fs, expdesc *var, expdesc *e);
|
||||
LUAI_FUNC void luaK_setreturns (FuncState *fs, expdesc *e, int nresults);
|
||||
LUAI_FUNC void luaK_setoneret (FuncState *fs, expdesc *e);
|
||||
LUAI_FUNC int luaK_jump (FuncState *fs);
|
||||
LUAI_FUNC void luaK_ret (FuncState *fs, int first, int nret);
|
||||
LUAI_FUNC void luaK_patchlist (FuncState *fs, int list, int target);
|
||||
LUAI_FUNC void luaK_patchtohere (FuncState *fs, int list);
|
||||
LUAI_FUNC void luaK_patchclose (FuncState *fs, int list, int level);
|
||||
LUAI_FUNC void luaK_concat (FuncState *fs, int *l1, int l2);
|
||||
LUAI_FUNC int luaK_getlabel (FuncState *fs);
|
||||
LUAI_FUNC void luaK_prefix (FuncState *fs, UnOpr op, expdesc *v, int line);
|
||||
LUAI_FUNC void luaK_infix (FuncState *fs, BinOpr op, expdesc *v);
|
||||
LUAI_FUNC void luaK_posfix (FuncState *fs, BinOpr op, expdesc *v1,
|
||||
expdesc *v2, int line);
|
||||
LUAI_FUNC void luaK_setlist (FuncState *fs, int base, int nelems, int tostore);
|
||||
|
||||
|
||||
#endif
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user