Revert BaseTools: PYTHON3 migration
This reverts commit 6693f359b3c213513c5096a06c6f67244a44dc52..
678f851312
.
Python3 migration is the fundamental change. It requires every developer
to install Python3. Before this migration, the well communication and wide
verification must be done. But now, most people is not aware of this change,
and not try it. So, Python3 migration is reverted and be moved to edk2-staging
Python3 branch for the edk2 user evaluation.
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a $PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
#python `dirname $0`/RunToolFromSource.py `basename $0` $*
|
||||||
|
|
||||||
# If a PYTHON3 command is available, use it in preference to python
|
# If a python2 command is available, use it in preference to python
|
||||||
if command -v $PYTHON3 >/dev/null 2>&1; then
|
if command -v python2 >/dev/null 2>&1; then
|
||||||
python_exe=$PYTHON3
|
python_exe=python2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
full_cmd=${BASH_SOURCE:-$0} # see http://mywiki.wooledge.org/BashFAQ/028 for a discussion of why $0 is not a good choice here
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
||||||
@%PYTHON3% -m %ToolName%.%ToolName% %*
|
@%PYTHON_HOME%\python.exe -m %ToolName%.%ToolName% %*
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
||||||
@%PYTHON3% -m %ToolName%.EccMain %*
|
@%PYTHON_HOME%\python.exe -m %ToolName%.EccMain %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\AutoGen\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\AutoGen\%ToolName%.py %*
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
@set PYTHONPATH=%PYTHONPATH%;%BASE_TOOLS_PATH%\Source\Python
|
||||||
@%PYTHON3% -m %ToolName%.%ToolName% %*
|
@%PYTHON_HOME%\python.exe -m %ToolName%.%ToolName% %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1 +1 @@
|
|||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\Capsule\GenerateCapsule.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\Capsule\GenerateCapsule.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1 +1 @@
|
|||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\Rsa2048Sha256Sign\Rsa2048Sha256GenerateKeys.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\Rsa2048Sha256Sign\Rsa2048Sha256GenerateKeys.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
@setlocal
|
@setlocal
|
||||||
@set ToolName=%~n0%
|
@set ToolName=%~n0%
|
||||||
@%PYTHON3% %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
@%PYTHON_HOME%\python.exe %BASE_TOOLS_PATH%\Source\Python\%ToolName%\%ToolName%.py %*
|
||||||
|
@ -15,24 +15,24 @@
|
|||||||
!ERROR "BASE_TOOLS_PATH is not set! Please run toolsetup.bat first!"
|
!ERROR "BASE_TOOLS_PATH is not set! Please run toolsetup.bat first!"
|
||||||
!ENDIF
|
!ENDIF
|
||||||
|
|
||||||
SUBDIRS = $(BASE_TOOLS_PATH)\Source\C
|
SUBDIRS = $(BASE_TOOLS_PATH)\Source\C $(BASE_TOOLS_PATH)\Source\Python
|
||||||
|
|
||||||
all: c
|
all: c python
|
||||||
|
|
||||||
c :
|
c :
|
||||||
@$(PYTHON3) $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $(BASE_TOOLS_PATH)\Source\C
|
@$(PYTHON_HOME)\python.exe $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $(BASE_TOOLS_PATH)\Source\C
|
||||||
|
|
||||||
python:
|
python:
|
||||||
@$(PYTHON3) $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $(BASE_TOOLS_PATH)\Source\Python
|
@$(PYTHON_HOME)\python.exe $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $(BASE_TOOLS_PATH)\Source\Python
|
||||||
|
|
||||||
subdirs: $(SUBDIRS)
|
subdirs: $(SUBDIRS)
|
||||||
@$(PYTHON3) $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $**
|
@$(PYTHON_HOME)\python.exe $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py all $**
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
$(PYTHON3) $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py clean $(SUBDIRS)
|
$(PYTHON_HOME)\python.exe $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py clean $(SUBDIRS)
|
||||||
|
|
||||||
.PHONY: cleanall
|
.PHONY: cleanall
|
||||||
cleanall:
|
cleanall:
|
||||||
$(PYTHON3) $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py cleanall $(SUBDIRS)
|
$(PYTHON_HOME)\python.exe $(BASE_TOOLS_PATH)\Source\C\Makefiles\NmakeSubdirs.py cleanall $(SUBDIRS)
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
Prerequisite Tools:
|
Prerequisite Tools:
|
||||||
1. Install Python 3.6.6 from https://www.python.org/downloads/release/python-366/
|
1. Install Python 2.7.3 from https://www.python.org/download/releases/2.7.3/
|
||||||
2. Install wxPython 4.0.3 from https://pypi.org/project/wxPython/#files
|
2. Install wxPython 2.8.12.1 from https://sourceforge.net/projects/wxpython/files/wxPython/2.8.12.1/
|
||||||
generally the libraries will be installed at python's subfolder, for example in windows: c:\python27\Lib\site-packages\
|
generally the libraries will be installed at python's subfolder, for example in windows: c:\python27\Lib\site-packages\
|
||||||
3. Install DoxyGen 1.8.6 from https://sourceforge.net/projects/doxygen/files/rel-1.8.6/
|
3. Install DoxyGen 1.8.6 from https://sourceforge.net/projects/doxygen/files/rel-1.8.6/
|
||||||
4. (Windows only) Install Htmlhelp tool from https://msdn.microsoft.com/en-us/library/windows/desktop/ms669985(v=vs.85).aspx
|
4. (Windows only) Install Htmlhelp tool from https://msdn.microsoft.com/en-us/library/windows/desktop/ms669985(v=vs.85).aspx
|
||||||
|
@ -12,11 +12,12 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import os, sys, logging, traceback, subprocess
|
import os, sys, logging, traceback, subprocess
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
from .plugins.EdkPlugins.edk2.model import baseobject
|
import plugins.EdkPlugins.edk2.model.baseobject as baseobject
|
||||||
from .plugins.EdkPlugins.edk2.model import doxygengen
|
import plugins.EdkPlugins.edk2.model.doxygengen as doxygengen
|
||||||
|
|
||||||
gArchMarcoDict = {'ALL' : 'MDE_CPU_IA32 MDE_CPU_X64 MDE_CPU_EBC MDE_CPU_IPF _MSC_EXTENSIONS __GNUC__ __INTEL_COMPILER',
|
gArchMarcoDict = {'ALL' : 'MDE_CPU_IA32 MDE_CPU_X64 MDE_CPU_EBC MDE_CPU_IPF _MSC_EXTENSIONS __GNUC__ __INTEL_COMPILER',
|
||||||
'IA32_MSFT': 'MDE_CPU_IA32 _MSC_EXTENSIONS',
|
'IA32_MSFT': 'MDE_CPU_IA32 _MSC_EXTENSIONS',
|
||||||
@ -37,7 +38,7 @@ def parseCmdArgs():
|
|||||||
help='Specify the absolute path of doxygen tools installation. For example: C:\\Program Files\\doxygen\bin\doxygen.exe')
|
help='Specify the absolute path of doxygen tools installation. For example: C:\\Program Files\\doxygen\bin\doxygen.exe')
|
||||||
parser.add_option('-o', '--output', action='store', dest='OutputPath',
|
parser.add_option('-o', '--output', action='store', dest='OutputPath',
|
||||||
help='Specify the document output path. For example: c:\\docoutput')
|
help='Specify the document output path. For example: c:\\docoutput')
|
||||||
parser.add_option('-a', '--arch', action='store', dest='Arch', choices=list(gArchMarcoDict.keys()),
|
parser.add_option('-a', '--arch', action='store', dest='Arch', choices=gArchMarcoDict.keys(),
|
||||||
help='Specify the architecture used in preprocess package\'s source. For example: -a IA32_MSFT')
|
help='Specify the architecture used in preprocess package\'s source. For example: -a IA32_MSFT')
|
||||||
parser.add_option('-m', '--mode', action='store', dest='DocumentMode', choices=['CHM', 'HTML'],
|
parser.add_option('-m', '--mode', action='store', dest='DocumentMode', choices=['CHM', 'HTML'],
|
||||||
help='Specify the document mode from : CHM or HTML')
|
help='Specify the document mode from : CHM or HTML')
|
||||||
|
@ -18,8 +18,8 @@ import os, sys, wx, logging
|
|||||||
import wx.stc
|
import wx.stc
|
||||||
import wx.lib.newevent
|
import wx.lib.newevent
|
||||||
import wx.lib.agw.genericmessagedialog as GMD
|
import wx.lib.agw.genericmessagedialog as GMD
|
||||||
from plugins.EdkPlugins.edk2.model import baseobject
|
import plugins.EdkPlugins.edk2.model.baseobject as baseobject
|
||||||
from plugins.EdkPlugins.edk2.model import doxygengen
|
import plugins.EdkPlugins.edk2.model.doxygengen as doxygengen
|
||||||
|
|
||||||
if hasattr(sys, "frozen"):
|
if hasattr(sys, "frozen"):
|
||||||
appPath = os.path.abspath(os.path.dirname(sys.executable))
|
appPath = os.path.abspath(os.path.dirname(sys.executable))
|
||||||
@ -42,7 +42,7 @@ class PackageDocApp(wx.App):
|
|||||||
|
|
||||||
frame.Show(True)
|
frame.Show(True)
|
||||||
|
|
||||||
self.Bind(EVT_APP_CALLBACK, self.OnAppCallBack)
|
EVT_APP_CALLBACK( self, self.OnAppCallBack)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def GetLogger(self):
|
def GetLogger(self):
|
||||||
@ -60,13 +60,13 @@ class PackageDocApp(wx.App):
|
|||||||
|
|
||||||
class PackageDocMainFrame(wx.Frame):
|
class PackageDocMainFrame(wx.Frame):
|
||||||
def __init__(self, parent, title):
|
def __init__(self, parent, title):
|
||||||
wx.Frame.__init__(self, parent, -1, title, size=(550, 350), style=wx.MINIMIZE_BOX|wx.SYSTEM_MENU|wx.CAPTION|wx.CLOSE_BOX )
|
wx.Frame.__init__(self, parent, -1, title, size=(550, 290), style=wx.MINIMIZE_BOX|wx.SYSTEM_MENU|wx.CAPTION|wx.CLOSE_BOX )
|
||||||
|
|
||||||
panel = wx.Panel(self)
|
panel = wx.Panel(self)
|
||||||
sizer = wx.BoxSizer(wx.VERTICAL)
|
sizer = wx.BoxSizer(wx.VERTICAL)
|
||||||
|
|
||||||
subsizer = wx.GridBagSizer(5, 10)
|
subsizer = wx.GridBagSizer(5, 10)
|
||||||
subsizer.AddGrowableCol(0)
|
subsizer.AddGrowableCol(1)
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "Workspace Location : "), (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(wx.StaticText(panel, -1, "Workspace Location : "), (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self._workspacePathCtrl = wx.ComboBox(panel, -1)
|
self._workspacePathCtrl = wx.ComboBox(panel, -1)
|
||||||
list = self.GetConfigure("WorkspacePath")
|
list = self.GetConfigure("WorkspacePath")
|
||||||
@ -76,9 +76,9 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
self._workspacePathCtrl.SetValue(list[len(list) - 1])
|
self._workspacePathCtrl.SetValue(list[len(list) - 1])
|
||||||
|
|
||||||
subsizer.Add(self._workspacePathCtrl, (0, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._workspacePathCtrl, (0, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
self._workspacePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN))
|
self._workspacePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
|
||||||
subsizer.Add(self._workspacePathBt, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(self._workspacePathBt, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnBrowsePath, self._workspacePathBt)
|
wx.EVT_BUTTON(self._workspacePathBt, self._workspacePathBt.GetId(), self.OnBrowsePath)
|
||||||
|
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "Package DEC Location : "), (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(wx.StaticText(panel, -1, "Package DEC Location : "), (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
self._packagePathCtrl = wx.ComboBox(panel, -1)
|
self._packagePathCtrl = wx.ComboBox(panel, -1)
|
||||||
@ -88,9 +88,9 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
self._packagePathCtrl.Append(item)
|
self._packagePathCtrl.Append(item)
|
||||||
self._packagePathCtrl.SetValue(list[len(list) - 1])
|
self._packagePathCtrl.SetValue(list[len(list) - 1])
|
||||||
subsizer.Add(self._packagePathCtrl, (1, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._packagePathCtrl, (1, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
self._packagePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN))
|
self._packagePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
|
||||||
subsizer.Add(self._packagePathBt, (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(self._packagePathBt, (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnBrowsePath, self._packagePathBt)
|
wx.EVT_BUTTON(self._packagePathBt, self._packagePathBt.GetId(), self.OnBrowsePath)
|
||||||
|
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "Doxygen Tool Location : "), (2, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(wx.StaticText(panel, -1, "Doxygen Tool Location : "), (2, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self._doxygenPathCtrl = wx.TextCtrl(panel, -1)
|
self._doxygenPathCtrl = wx.TextCtrl(panel, -1)
|
||||||
@ -103,10 +103,10 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
else:
|
else:
|
||||||
self._doxygenPathCtrl.SetValue('/usr/bin/doxygen')
|
self._doxygenPathCtrl.SetValue('/usr/bin/doxygen')
|
||||||
|
|
||||||
self._doxygenPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN))
|
self._doxygenPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
|
||||||
subsizer.Add(self._doxygenPathCtrl, (2, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._doxygenPathCtrl, (2, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
subsizer.Add(self._doxygenPathBt, (2, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(self._doxygenPathBt, (2, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnBrowsePath, self._doxygenPathBt)
|
wx.EVT_BUTTON(self._doxygenPathBt, self._doxygenPathBt.GetId(), self.OnBrowsePath)
|
||||||
|
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "CHM Tool Location : "), (3, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(wx.StaticText(panel, -1, "CHM Tool Location : "), (3, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self._chmPathCtrl = wx.TextCtrl(panel, -1)
|
self._chmPathCtrl = wx.TextCtrl(panel, -1)
|
||||||
@ -116,10 +116,10 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
else:
|
else:
|
||||||
self._chmPathCtrl.SetValue('C:\\Program Files\\HTML Help Workshop\\hhc.exe')
|
self._chmPathCtrl.SetValue('C:\\Program Files\\HTML Help Workshop\\hhc.exe')
|
||||||
|
|
||||||
self._chmPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN))
|
self._chmPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
|
||||||
subsizer.Add(self._chmPathCtrl, (3, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._chmPathCtrl, (3, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
subsizer.Add(self._chmPathBt, (3, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(self._chmPathBt, (3, 2), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnBrowsePath, self._chmPathBt)
|
wx.EVT_BUTTON(self._chmPathBt, self._chmPathBt.GetId(), self.OnBrowsePath)
|
||||||
|
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "Output Location : "), (4, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(wx.StaticText(panel, -1, "Output Location : "), (4, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self._outputPathCtrl = wx.ComboBox(panel, -1)
|
self._outputPathCtrl = wx.ComboBox(panel, -1)
|
||||||
@ -130,9 +130,9 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
self._outputPathCtrl.SetValue(list[len(list) - 1])
|
self._outputPathCtrl.SetValue(list[len(list) - 1])
|
||||||
|
|
||||||
subsizer.Add(self._outputPathCtrl, (4, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._outputPathCtrl, (4, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
self._outputPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN))
|
self._outputPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
|
||||||
subsizer.Add(self._outputPathBt, (4, 2), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
subsizer.Add(self._outputPathBt, (4, 2), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnBrowsePath, self._outputPathBt)
|
wx.EVT_BUTTON(self._outputPathBt, self._outputPathBt.GetId(), self.OnBrowsePath)
|
||||||
|
|
||||||
subsizer.Add(wx.StaticText(panel, -1, "Architecture Specified : "), (5, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
subsizer.Add(wx.StaticText(panel, -1, "Architecture Specified : "), (5, 0), flag=wx.ALIGN_CENTER_VERTICAL)
|
||||||
self._archCtrl = wx.ComboBox(panel, -1, value='ALL', choices=['ALL', 'IA32/MSFT', 'IA32/GNU', 'X64/INTEL', 'X64/GNU', 'IPF/MSFT', 'IPF/GNU', 'EBC/INTEL'],
|
self._archCtrl = wx.ComboBox(panel, -1, value='ALL', choices=['ALL', 'IA32/MSFT', 'IA32/GNU', 'X64/INTEL', 'X64/GNU', 'IPF/MSFT', 'IPF/GNU', 'EBC/INTEL'],
|
||||||
@ -164,7 +164,7 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
def SaveConfigure(self, name, value):
|
def SaveConfigure(self, name, value):
|
||||||
if value ==None or len(value) == 0:
|
if value ==None or len(value) == 0:
|
||||||
return
|
return
|
||||||
config = wx.ConfigBase.Get()
|
config = wx.ConfigBase_Get()
|
||||||
oldvalues = config.Read(name, '').split(';')
|
oldvalues = config.Read(name, '').split(';')
|
||||||
if len(oldvalues) >= 10:
|
if len(oldvalues) >= 10:
|
||||||
oldvalues.remove(oldvalues[0])
|
oldvalues.remove(oldvalues[0])
|
||||||
@ -177,7 +177,7 @@ class PackageDocMainFrame(wx.Frame):
|
|||||||
config.Write(name, ';'.join(oldvalues))
|
config.Write(name, ';'.join(oldvalues))
|
||||||
|
|
||||||
def GetConfigure(self, name):
|
def GetConfigure(self, name):
|
||||||
config = wx.ConfigBase.Get()
|
config = wx.ConfigBase_Get()
|
||||||
values = config.Read(name, '').split(';')
|
values = config.Read(name, '').split(';')
|
||||||
list = []
|
list = []
|
||||||
for item in values:
|
for item in values:
|
||||||
@ -416,10 +416,10 @@ class ProgressDialog(wx.Dialog):
|
|||||||
logging.getLogger('').addHandler(self._loghandle)
|
logging.getLogger('').addHandler(self._loghandle)
|
||||||
logging.getLogger('app').addHandler(self._loghandle)
|
logging.getLogger('app').addHandler(self._loghandle)
|
||||||
|
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnButtonClose, self._closeBt)
|
wx.EVT_BUTTON(self._closeBt, self._closeBt.GetId(), self.OnButtonClose)
|
||||||
self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateCloseButton)
|
wx.EVT_UPDATE_UI(self, self._closeBt.GetId(), self.OnUpdateCloseButton)
|
||||||
self.Bind(wx.EVT_BUTTON, self.OnGotoOutput, self._gotoOuputBt)
|
wx.EVT_BUTTON(self._gotoOuputBt, self._gotoOuputBt.GetId(), self.OnGotoOutput)
|
||||||
self.Bind(EVT_LOG, self.OnPostLog)
|
EVT_LOG(self, self.OnPostLog)
|
||||||
|
|
||||||
self._process = None
|
self._process = None
|
||||||
self._pid = None
|
self._pid = None
|
||||||
@ -720,7 +720,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
f.close()
|
f.close()
|
||||||
bfound = False
|
bfound = False
|
||||||
for index in range(len(lines)):
|
for index in xrange(len(lines)):
|
||||||
if lines[index].find('<a class="el" href="files.html" target="basefrm">File List</a>') != -1:
|
if lines[index].find('<a class="el" href="files.html" target="basefrm">File List</a>') != -1:
|
||||||
lines[index] = "<!-- %s" % lines[index]
|
lines[index] = "<!-- %s" % lines[index]
|
||||||
bfound = True
|
bfound = True
|
||||||
@ -745,7 +745,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
f.close()
|
f.close()
|
||||||
bfound = False
|
bfound = False
|
||||||
for index in range(len(lines)):
|
for index in xrange(len(lines)):
|
||||||
if not bfound:
|
if not bfound:
|
||||||
if lines[index].find('<param name="Local" value="files.html">') != -1:
|
if lines[index].find('<param name="Local" value="files.html">') != -1:
|
||||||
lines[index] = '<!-- %s' % lines[index]
|
lines[index] = '<!-- %s' % lines[index]
|
||||||
@ -969,7 +969,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
fd = open(path, 'r')
|
fd = open(path, 'r')
|
||||||
text = fd.read()
|
text = fd.read()
|
||||||
fd.close()
|
fd.close()
|
||||||
except Exception as e:
|
except Exception, e:
|
||||||
self.LogMessage (" <<<Fail to open file %s" % path)
|
self.LogMessage (" <<<Fail to open file %s" % path)
|
||||||
return
|
return
|
||||||
text = text.replace ('.s.dox', '.s')
|
text = text.replace ('.s.dox', '.s')
|
||||||
@ -982,7 +982,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
fd = open(path, 'w')
|
fd = open(path, 'w')
|
||||||
fd.write(text)
|
fd.write(text)
|
||||||
fd.close()
|
fd.close()
|
||||||
except Exception as e:
|
except Exception, e:
|
||||||
self.LogMessage (" <<<Fail to fixup file %s" % path)
|
self.LogMessage (" <<<Fail to fixup file %s" % path)
|
||||||
return
|
return
|
||||||
self.LogMessage(' >>> Finish to fixup .dox postfix for file %s \n' % path)
|
self.LogMessage(' >>> Finish to fixup .dox postfix for file %s \n' % path)
|
||||||
@ -993,7 +993,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
fd = open(path, 'r')
|
fd = open(path, 'r')
|
||||||
lines = fd.readlines()
|
lines = fd.readlines()
|
||||||
fd.close()
|
fd.close()
|
||||||
except Exception as e:
|
except Exception, e:
|
||||||
self.LogMessage (" <<<Fail to open file %s" % path)
|
self.LogMessage (" <<<Fail to open file %s" % path)
|
||||||
return
|
return
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -1004,7 +1004,7 @@ class ProgressDialog(wx.Dialog):
|
|||||||
fd = open(path, 'w')
|
fd = open(path, 'w')
|
||||||
fd.write("".join(lines))
|
fd.write("".join(lines))
|
||||||
fd.close()
|
fd.close()
|
||||||
except Exception as e:
|
except Exception, e:
|
||||||
self.LogMessage (" <<<Fail to fixup file %s" % path)
|
self.LogMessage (" <<<Fail to fixup file %s" % path)
|
||||||
return
|
return
|
||||||
self.LogMessage(' >>> Finish to fixup .decdoxygen postfix for file %s \n' % path)
|
self.LogMessage(' >>> Finish to fixup .decdoxygen postfix for file %s \n' % path)
|
||||||
|
@ -11,6 +11,8 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .message import *
|
from .message import *
|
||||||
@ -65,7 +67,7 @@ class Page(BaseDoxygeItem):
|
|||||||
|
|
||||||
def AddSection(self, section):
|
def AddSection(self, section):
|
||||||
self.mSections.append(section)
|
self.mSections.append(section)
|
||||||
self.mSections.sort(key=lambda x: x.mName.lower())
|
self.mSections.sort(cmp=lambda x, y: cmp(x.mName.lower(), y.mName.lower()))
|
||||||
|
|
||||||
def Generate(self):
|
def Generate(self):
|
||||||
if self.mIsMainPage:
|
if self.mIsMainPage:
|
||||||
@ -78,7 +80,7 @@ class Page(BaseDoxygeItem):
|
|||||||
self.mText.append(self.mDescription)
|
self.mText.append(self.mDescription)
|
||||||
endIndex = len(self.mText)
|
endIndex = len(self.mText)
|
||||||
|
|
||||||
self.mSections.sort(key=lambda x: x.mName.lower())
|
self.mSections.sort()
|
||||||
for sect in self.mSections:
|
for sect in self.mSections:
|
||||||
self.mText += sect.Generate()
|
self.mText += sect.Generate()
|
||||||
|
|
||||||
@ -90,7 +92,7 @@ class Page(BaseDoxygeItem):
|
|||||||
self.mText.insert(endIndex, '<ul>')
|
self.mText.insert(endIndex, '<ul>')
|
||||||
endIndex += 1
|
endIndex += 1
|
||||||
if self.mIsSort:
|
if self.mIsSort:
|
||||||
self.mSubPages.sort(key=lambda x: x.mName.lower())
|
self.mSubPages.sort(cmp=lambda x, y: cmp(x.mName.lower(), y.mName.lower()))
|
||||||
for page in self.mSubPages:
|
for page in self.mSubPages:
|
||||||
self.mText.insert(endIndex, '<li>\subpage %s \"%s\" </li>' % (page.mTag, page.mName))
|
self.mText.insert(endIndex, '<li>\subpage %s \"%s\" </li>' % (page.mTag, page.mName))
|
||||||
endIndex += 1
|
endIndex += 1
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import array
|
import array
|
||||||
import uuid
|
import uuid
|
||||||
import re
|
import re
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
from .message import *
|
from .message import *
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
@ -24,7 +25,7 @@ class BaseINIFile(object):
|
|||||||
@return: instance of this class
|
@return: instance of this class
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if len(args) == 0: return object.__new__(cls)
|
if len(args) == 0: return object.__new__(cls, *args, **kwargs)
|
||||||
filename = args[0]
|
filename = args[0]
|
||||||
parent = None
|
parent = None
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
@ -32,7 +33,7 @@ class BaseINIFile(object):
|
|||||||
|
|
||||||
key = os.path.normpath(filename)
|
key = os.path.normpath(filename)
|
||||||
if key not in cls._objs.keys():
|
if key not in cls._objs.keys():
|
||||||
cls._objs[key] = object.__new__(cls)
|
cls._objs[key] = object.__new__(cls, *args, **kwargs)
|
||||||
|
|
||||||
if parent is not None:
|
if parent is not None:
|
||||||
cls._objs[key].AddParent(parent)
|
cls._objs[key].AddParent(parent)
|
||||||
|
@ -10,12 +10,12 @@
|
|||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
|
||||||
from ...basemodel import ini as ini
|
import plugins.EdkPlugins.basemodel.ini as ini
|
||||||
from ..model import dsc as dsc
|
import plugins.EdkPlugins.edk2.model.dsc as dsc
|
||||||
from ..model import inf as inf
|
import plugins.EdkPlugins.edk2.model.inf as inf
|
||||||
from ..model import dec as dec
|
import plugins.EdkPlugins.edk2.model.dec as dec
|
||||||
import os
|
import os
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
class SurfaceObject(object):
|
class SurfaceObject(object):
|
||||||
_objs = {}
|
_objs = {}
|
||||||
@ -25,7 +25,7 @@ class SurfaceObject(object):
|
|||||||
@return: instance of this class
|
@return: instance of this class
|
||||||
|
|
||||||
"""
|
"""
|
||||||
obj = object.__new__(cls)
|
obj = object.__new__(cls, *args, **kwargs)
|
||||||
if "None" not in cls._objs:
|
if "None" not in cls._objs:
|
||||||
cls._objs["None"] = []
|
cls._objs["None"] = []
|
||||||
cls._objs["None"].append(obj)
|
cls._objs["None"].append(obj)
|
||||||
@ -846,7 +846,7 @@ class SurfaceItem(object):
|
|||||||
ErrorMsg("%s item is duplicated defined in packages: %s and %s" %
|
ErrorMsg("%s item is duplicated defined in packages: %s and %s" %
|
||||||
(name, parent.GetFilename(), cls._objs[name].GetParent().GetFilename()))
|
(name, parent.GetFilename(), cls._objs[name].GetParent().GetFilename()))
|
||||||
return None
|
return None
|
||||||
obj = object.__new__(cls)
|
obj = object.__new__(cls, *args, **kwargs)
|
||||||
cls._objs[name] = obj
|
cls._objs[name] = obj
|
||||||
return obj
|
return obj
|
||||||
elif issubclass(parent.__class__, Module):
|
elif issubclass(parent.__class__, Module):
|
||||||
|
@ -11,9 +11,9 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
from ...basemodel import ini
|
import plugins.EdkPlugins.basemodel.ini as ini
|
||||||
import re, os
|
import re, os
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
class DECFile(ini.BaseINIFile):
|
class DECFile(ini.BaseINIFile):
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
"""This file produce action class to generate doxygen document for edk2 codebase.
|
"""This file produce action class to generate doxygen document for edk2 codebase.
|
||||||
The action classes are shared by GUI and command line tools.
|
The action classes are shared by GUI and command line tools.
|
||||||
"""
|
"""
|
||||||
from ...basemodel import doxygen
|
import plugins.EdkPlugins.basemodel.doxygen as doxygen
|
||||||
import os
|
import os
|
||||||
try:
|
try:
|
||||||
import wx
|
import wx
|
||||||
@ -24,9 +24,9 @@ try:
|
|||||||
except:
|
except:
|
||||||
gInGui = False
|
gInGui = False
|
||||||
import re
|
import re
|
||||||
from ..model import inf
|
import plugins.EdkPlugins.edk2.model.inf as inf
|
||||||
from ..model import dec
|
import plugins.EdkPlugins.edk2.model.dec as dec
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
_ignore_dir = ['.svn', '_svn', 'cvs']
|
_ignore_dir = ['.svn', '_svn', 'cvs']
|
||||||
_inf_key_description_mapping_table = {
|
_inf_key_description_mapping_table = {
|
||||||
@ -386,7 +386,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||||||
configFile.AddFile(path)
|
configFile.AddFile(path)
|
||||||
|
|
||||||
no = 0
|
no = 0
|
||||||
for no in range(len(lines)):
|
for no in xrange(len(lines)):
|
||||||
if len(lines[no].strip()) == 0:
|
if len(lines[no].strip()) == 0:
|
||||||
continue
|
continue
|
||||||
if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
|
if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
|
||||||
@ -1000,7 +1000,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||||||
#file = textfile.TextFile(path)
|
#file = textfile.TextFile(path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file = open(path, 'r')
|
file = open(path, 'rb')
|
||||||
except (IOError, OSError) as msg:
|
except (IOError, OSError) as msg:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
|
||||||
from ...basemodel import doxygen
|
import plugins.EdkPlugins.basemodel.doxygen as doxygen
|
||||||
import os
|
import os
|
||||||
try:
|
try:
|
||||||
import wx
|
import wx
|
||||||
@ -21,9 +21,9 @@ try:
|
|||||||
except:
|
except:
|
||||||
gInGui = False
|
gInGui = False
|
||||||
import re
|
import re
|
||||||
from ..model import inf
|
import plugins.EdkPlugins.edk2.model.inf as inf
|
||||||
from ..model import dec
|
import plugins.EdkPlugins.edk2.model.dec as dec
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
_ignore_dir = ['.svn', '_svn', 'cvs']
|
_ignore_dir = ['.svn', '_svn', 'cvs']
|
||||||
_inf_key_description_mapping_table = {
|
_inf_key_description_mapping_table = {
|
||||||
@ -388,7 +388,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||||||
configFile.AddFile(path)
|
configFile.AddFile(path)
|
||||||
return
|
return
|
||||||
no = 0
|
no = 0
|
||||||
for no in range(len(lines)):
|
for no in xrange(len(lines)):
|
||||||
if len(lines[no].strip()) == 0:
|
if len(lines[no].strip()) == 0:
|
||||||
continue
|
continue
|
||||||
if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
|
if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
|
||||||
@ -1003,7 +1003,7 @@ class PackageDocumentAction(DoxygenAction):
|
|||||||
#file = textfile.TextFile(path)
|
#file = textfile.TextFile(path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file = open(path, 'r')
|
file = open(path, 'rb')
|
||||||
except (IOError, OSError) as msg:
|
except (IOError, OSError) as msg:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -11,9 +11,9 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
from ...basemodel import ini
|
import plugins.EdkPlugins.basemodel.ini as ini
|
||||||
import re, os
|
import re, os
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
class DSCFile(ini.BaseINIFile):
|
class DSCFile(ini.BaseINIFile):
|
||||||
def GetSectionInstance(self, parent, name, isCombined=False):
|
def GetSectionInstance(self, parent, name, isCombined=False):
|
||||||
|
@ -11,9 +11,9 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
from ...basemodel import ini
|
import plugins.EdkPlugins.basemodel.ini as ini
|
||||||
import re, os
|
import re, os
|
||||||
from ...basemodel.message import *
|
from plugins.EdkPlugins.basemodel.message import *
|
||||||
|
|
||||||
class INFFile(ini.BaseINIFile):
|
class INFFile(ini.BaseINIFile):
|
||||||
_libobjs = {}
|
_libobjs = {}
|
||||||
|
@ -43,7 +43,7 @@ libs: $(LIBRARIES)
|
|||||||
@echo # Build libraries
|
@echo # Build libraries
|
||||||
@echo ######################
|
@echo ######################
|
||||||
@if not exist $(LIB_PATH) mkdir $(LIB_PATH)
|
@if not exist $(LIB_PATH) mkdir $(LIB_PATH)
|
||||||
@$(PYTHON3) Makefiles\NmakeSubdirs.py all $**
|
@$(PYTHON_HOME)\python.exe Makefiles\NmakeSubdirs.py all $**
|
||||||
|
|
||||||
apps: $(APPLICATIONS)
|
apps: $(APPLICATIONS)
|
||||||
@echo.
|
@echo.
|
||||||
@ -51,7 +51,7 @@ apps: $(APPLICATIONS)
|
|||||||
@echo # Build executables
|
@echo # Build executables
|
||||||
@echo ######################
|
@echo ######################
|
||||||
@if not exist $(BIN_PATH) mkdir $(BIN_PATH)
|
@if not exist $(BIN_PATH) mkdir $(BIN_PATH)
|
||||||
@$(PYTHON3) Makefiles\NmakeSubdirs.py all $**
|
@$(PYTHON_HOME)\python.exe Makefiles\NmakeSubdirs.py all $**
|
||||||
|
|
||||||
install: $(LIB_PATH) $(BIN_PATH)
|
install: $(LIB_PATH) $(BIN_PATH)
|
||||||
@echo.
|
@echo.
|
||||||
@ -65,11 +65,11 @@ install: $(LIB_PATH) $(BIN_PATH)
|
|||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
@$(PYTHON3) Makefiles\NmakeSubdirs.py clean $(LIBRARIES) $(APPLICATIONS)
|
@$(PYTHON_HOME)\python.exe Makefiles\NmakeSubdirs.py clean $(LIBRARIES) $(APPLICATIONS)
|
||||||
|
|
||||||
.PHONY: cleanall
|
.PHONY: cleanall
|
||||||
cleanall:
|
cleanall:
|
||||||
@$(PYTHON3) Makefiles\NmakeSubdirs.py cleanall $(LIBRARIES) $(APPLICATIONS)
|
@$(PYTHON_HOME)\python.exe Makefiles\NmakeSubdirs.py cleanall $(LIBRARIES) $(APPLICATIONS)
|
||||||
|
|
||||||
!INCLUDE Makefiles\ms.rule
|
!INCLUDE Makefiles\ms.rule
|
||||||
|
|
||||||
|
25
BaseTools/Source/C/PyUtility/Makefile
Normal file
25
BaseTools/Source/C/PyUtility/Makefile
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
## @file
|
||||||
|
# Makefile
|
||||||
|
#
|
||||||
|
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
|
||||||
|
# This program and the accompanying materials
|
||||||
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
|
# http://opensource.org/licenses/bsd-license.php
|
||||||
|
#
|
||||||
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
#
|
||||||
|
|
||||||
|
!INCLUDE ..\Makefiles\ms.common
|
||||||
|
|
||||||
|
APPNAME = GenSec
|
||||||
|
|
||||||
|
LIBS = $(LIB_PATH)\Common.lib
|
||||||
|
|
||||||
|
OBJECTS = PyUtility.obj
|
||||||
|
|
||||||
|
#CFLAGS = $(CFLAGS) /nodefaultlib:libc.lib
|
||||||
|
|
||||||
|
!INCLUDE ..\Makefiles\ms.app
|
||||||
|
|
106
BaseTools/Source/C/PyUtility/PyUtility.c
Normal file
106
BaseTools/Source/C/PyUtility/PyUtility.c
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
/** @file
|
||||||
|
Python Utility
|
||||||
|
|
||||||
|
Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
|
||||||
|
This program and the accompanying materials are licensed and made available
|
||||||
|
under the terms and conditions of the BSD License which accompanies this
|
||||||
|
distribution. The full text of the license may be found at
|
||||||
|
http://opensource.org/licenses/bsd-license.php
|
||||||
|
|
||||||
|
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
|
||||||
|
**/
|
||||||
|
|
||||||
|
#include <Python.h>
|
||||||
|
#include <Windows.h>
|
||||||
|
#include <Common/UefiBaseTypes.h>
|
||||||
|
|
||||||
|
/*
|
||||||
|
SaveFileToDisk(FilePath, Content)
|
||||||
|
*/
|
||||||
|
STATIC
|
||||||
|
PyObject*
|
||||||
|
SaveFileToDisk (
|
||||||
|
PyObject *Self,
|
||||||
|
PyObject *Args
|
||||||
|
)
|
||||||
|
{
|
||||||
|
CHAR8 *File;
|
||||||
|
UINT8 *Data;
|
||||||
|
UINTN DataLength;
|
||||||
|
UINTN WriteBytes;
|
||||||
|
UINTN Status;
|
||||||
|
HANDLE FileHandle;
|
||||||
|
PyObject *ReturnValue = Py_False;
|
||||||
|
|
||||||
|
Status = PyArg_ParseTuple(
|
||||||
|
Args,
|
||||||
|
"ss#",
|
||||||
|
&File,
|
||||||
|
&Data,
|
||||||
|
&DataLength
|
||||||
|
);
|
||||||
|
if (Status == 0) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
FileHandle = CreateFile(
|
||||||
|
File,
|
||||||
|
GENERIC_WRITE,
|
||||||
|
FILE_SHARE_WRITE|FILE_SHARE_READ|FILE_SHARE_DELETE,
|
||||||
|
NULL,
|
||||||
|
CREATE_ALWAYS,
|
||||||
|
FILE_ATTRIBUTE_NORMAL,
|
||||||
|
NULL
|
||||||
|
);
|
||||||
|
if (FileHandle == INVALID_HANDLE_VALUE) {
|
||||||
|
PyErr_SetString(PyExc_Exception, "File creation failure");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (WriteFile(FileHandle, Data, DataLength, &WriteBytes, NULL)) {
|
||||||
|
if (DataLength <= WriteBytes) {
|
||||||
|
DataLength = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
Data += WriteBytes;
|
||||||
|
DataLength -= WriteBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (DataLength != 0) {
|
||||||
|
// file saved unsuccessfully
|
||||||
|
PyErr_SetString(PyExc_Exception, "File write failure");
|
||||||
|
goto Done;
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Flush buffer may slow down the whole build performance (average 10s slower)
|
||||||
|
//
|
||||||
|
//if (!FlushFileBuffers(FileHandle)) {
|
||||||
|
// PyErr_SetString(PyExc_Exception, "File flush failure");
|
||||||
|
// goto Done;
|
||||||
|
//}
|
||||||
|
|
||||||
|
// success!
|
||||||
|
ReturnValue = Py_True;
|
||||||
|
|
||||||
|
Done:
|
||||||
|
CloseHandle(FileHandle);
|
||||||
|
return ReturnValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
STATIC INT8 SaveFileToDiskDocs[] = "SaveFileToDisk(): Make sure the file is saved to disk\n";
|
||||||
|
|
||||||
|
STATIC PyMethodDef PyUtility_Funcs[] = {
|
||||||
|
{"SaveFileToDisk", (PyCFunction)SaveFileToDisk, METH_VARARGS, SaveFileToDiskDocs},
|
||||||
|
{NULL, NULL, 0, NULL}
|
||||||
|
};
|
||||||
|
|
||||||
|
PyMODINIT_FUNC
|
||||||
|
initPyUtility(VOID) {
|
||||||
|
Py_InitModule3("PyUtility", PyUtility_Funcs, "Utilties Module Implemented C Language");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
42
BaseTools/Source/C/PyUtility/setup.py
Normal file
42
BaseTools/Source/C/PyUtility/setup.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
## @file
|
||||||
|
# package and install PyEfiCompressor extension
|
||||||
|
#
|
||||||
|
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
|
||||||
|
#
|
||||||
|
# This program and the accompanying materials
|
||||||
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
|
# http://opensource.org/licenses/bsd-license.php
|
||||||
|
#
|
||||||
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
|
#
|
||||||
|
|
||||||
|
##
|
||||||
|
# Import Modules
|
||||||
|
#
|
||||||
|
from distutils.core import setup, Extension
|
||||||
|
import os
|
||||||
|
|
||||||
|
if 'BASE_TOOLS_PATH' not in os.environ:
|
||||||
|
raise "Please define BASE_TOOLS_PATH to the root of base tools tree"
|
||||||
|
|
||||||
|
BaseToolsDir = os.environ['BASE_TOOLS_PATH']
|
||||||
|
setup(
|
||||||
|
name="PyUtility",
|
||||||
|
version="0.01",
|
||||||
|
ext_modules=[
|
||||||
|
Extension(
|
||||||
|
'PyUtility',
|
||||||
|
sources=[
|
||||||
|
'PyUtility.c'
|
||||||
|
],
|
||||||
|
include_dirs=[
|
||||||
|
os.path.join(BaseToolsDir, 'Source', 'C', 'Include'),
|
||||||
|
os.path.join(BaseToolsDir, 'Source', 'C', 'Include', 'Ia32'),
|
||||||
|
os.path.join(BaseToolsDir, 'Source', 'C', 'Common')
|
||||||
|
],
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
@ -17,7 +17,6 @@ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|||||||
**/
|
**/
|
||||||
|
|
||||||
#include "Compress.h"
|
#include "Compress.h"
|
||||||
#include "Decompress.h"
|
|
||||||
#include "TianoCompress.h"
|
#include "TianoCompress.h"
|
||||||
#include "EfiUtilityMsgs.h"
|
#include "EfiUtilityMsgs.h"
|
||||||
#include "ParseInf.h"
|
#include "ParseInf.h"
|
||||||
@ -66,7 +65,6 @@ static BOOLEAN QuietMode = FALSE;
|
|||||||
//
|
//
|
||||||
STATIC BOOLEAN ENCODE = FALSE;
|
STATIC BOOLEAN ENCODE = FALSE;
|
||||||
STATIC BOOLEAN DECODE = FALSE;
|
STATIC BOOLEAN DECODE = FALSE;
|
||||||
STATIC BOOLEAN UEFIMODE = FALSE;
|
|
||||||
STATIC UINT8 *mSrc, *mDst, *mSrcUpperLimit, *mDstUpperLimit;
|
STATIC UINT8 *mSrc, *mDst, *mSrcUpperLimit, *mDstUpperLimit;
|
||||||
STATIC UINT8 *mLevel, *mText, *mChildCount, *mBuf, mCLen[NC], mPTLen[NPT], *mLen;
|
STATIC UINT8 *mLevel, *mText, *mChildCount, *mBuf, mCLen[NC], mPTLen[NPT], *mLen;
|
||||||
STATIC INT16 mHeap[NC + 1];
|
STATIC INT16 mHeap[NC + 1];
|
||||||
@ -1705,8 +1703,6 @@ Returns:
|
|||||||
// Details Option
|
// Details Option
|
||||||
//
|
//
|
||||||
fprintf (stdout, "Options:\n");
|
fprintf (stdout, "Options:\n");
|
||||||
fprintf (stdout, " --uefi\n\
|
|
||||||
Enable UefiCompress, use TianoCompress when without this option\n");
|
|
||||||
fprintf (stdout, " -o FileName, --output FileName\n\
|
fprintf (stdout, " -o FileName, --output FileName\n\
|
||||||
File will be created to store the ouput content.\n");
|
File will be created to store the ouput content.\n");
|
||||||
fprintf (stdout, " -v, --verbose\n\
|
fprintf (stdout, " -v, --verbose\n\
|
||||||
@ -1826,13 +1822,6 @@ Returns:
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (stricmp(argv[0], "--uefi") == 0) {
|
|
||||||
UEFIMODE = TRUE;
|
|
||||||
argc--;
|
|
||||||
argv++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stricmp (argv[0], "--debug") == 0) {
|
if (stricmp (argv[0], "--debug") == 0) {
|
||||||
argc-=2;
|
argc-=2;
|
||||||
argv++;
|
argv++;
|
||||||
@ -1950,11 +1939,7 @@ Returns:
|
|||||||
if (DebugMode) {
|
if (DebugMode) {
|
||||||
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Encoding", NULL);
|
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Encoding", NULL);
|
||||||
}
|
}
|
||||||
if (UEFIMODE) {
|
|
||||||
Status = EfiCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
|
||||||
} else {
|
|
||||||
Status = TianoCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
Status = TianoCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
||||||
}
|
|
||||||
|
|
||||||
if (Status == EFI_BUFFER_TOO_SMALL) {
|
if (Status == EFI_BUFFER_TOO_SMALL) {
|
||||||
OutBuffer = (UINT8 *) malloc (DstSize);
|
OutBuffer = (UINT8 *) malloc (DstSize);
|
||||||
@ -1964,11 +1949,7 @@ Returns:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (UEFIMODE) {
|
|
||||||
Status = EfiCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
|
||||||
} else {
|
|
||||||
Status = TianoCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
Status = TianoCompress ((UINT8 *)FileBuffer, InputLength, OutBuffer, &DstSize);
|
||||||
}
|
|
||||||
if (Status != EFI_SUCCESS) {
|
if (Status != EFI_SUCCESS) {
|
||||||
Error (NULL, 0, 0007, "Error compressing file", NULL);
|
Error (NULL, 0, 0007, "Error compressing file", NULL);
|
||||||
goto ERROR;
|
goto ERROR;
|
||||||
@ -1998,14 +1979,6 @@ Returns:
|
|||||||
if (DebugMode) {
|
if (DebugMode) {
|
||||||
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Decoding\n", NULL);
|
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Decoding\n", NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (UEFIMODE) {
|
|
||||||
Status = Extract((VOID *)FileBuffer, InputLength, (VOID *)&OutBuffer, &DstSize, 1);
|
|
||||||
if (Status != EFI_SUCCESS) {
|
|
||||||
goto ERROR;
|
|
||||||
}
|
|
||||||
fwrite(OutBuffer, (size_t)(DstSize), 1, OutputFile);
|
|
||||||
} else {
|
|
||||||
//
|
//
|
||||||
// Get Compressed file original size
|
// Get Compressed file original size
|
||||||
//
|
//
|
||||||
@ -2021,23 +1994,17 @@ Returns:
|
|||||||
goto ERROR;
|
goto ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
Status = TDecompress((VOID *)FileBuffer, (VOID *)OutBuffer, (VOID *)Scratch, 2);
|
Status = Decompress((VOID *)FileBuffer, (VOID *)OutBuffer, (VOID *)Scratch, 2);
|
||||||
if (Status != EFI_SUCCESS) {
|
if (Status != EFI_SUCCESS) {
|
||||||
goto ERROR;
|
goto ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
fwrite(OutBuffer, (size_t)(Scratch->mOrigSize), 1, OutputFile);
|
fwrite(OutBuffer, (size_t)(Scratch->mOrigSize), 1, OutputFile);
|
||||||
}
|
|
||||||
fclose(OutputFile);
|
fclose(OutputFile);
|
||||||
fclose(InputFile);
|
fclose(InputFile);
|
||||||
if (Scratch != NULL) {
|
|
||||||
free(Scratch);
|
free(Scratch);
|
||||||
}
|
|
||||||
if (FileBuffer != NULL) {
|
|
||||||
free(FileBuffer);
|
free(FileBuffer);
|
||||||
}
|
|
||||||
if (OutBuffer != NULL) {
|
|
||||||
free(OutBuffer);
|
free(OutBuffer);
|
||||||
}
|
|
||||||
|
|
||||||
if (DebugMode) {
|
if (DebugMode) {
|
||||||
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Encoding successful!\n", NULL);
|
DebugMsg(UTILITY_NAME, 0, DebugLevel, "Encoding successful!\n", NULL);
|
||||||
@ -2667,7 +2634,7 @@ Done:
|
|||||||
|
|
||||||
RETURN_STATUS
|
RETURN_STATUS
|
||||||
EFIAPI
|
EFIAPI
|
||||||
TDecompress (
|
Decompress (
|
||||||
IN VOID *Source,
|
IN VOID *Source,
|
||||||
IN OUT VOID *Destination,
|
IN OUT VOID *Destination,
|
||||||
IN OUT VOID *Scratch,
|
IN OUT VOID *Scratch,
|
||||||
|
@ -427,7 +427,7 @@ Decode (
|
|||||||
|
|
||||||
RETURN_STATUS
|
RETURN_STATUS
|
||||||
EFIAPI
|
EFIAPI
|
||||||
TDecompress (
|
Decompress (
|
||||||
IN VOID *Source,
|
IN VOID *Source,
|
||||||
IN OUT VOID *Destination,
|
IN OUT VOID *Destination,
|
||||||
IN OUT VOID *Scratch,
|
IN OUT VOID *Scratch,
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
|
|
||||||
## Import Modules
|
## Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import re
|
import re
|
||||||
import os.path as path
|
import os.path as path
|
||||||
@ -198,11 +200,11 @@ class AutoGen(object):
|
|||||||
# if it exists, just return it directly
|
# if it exists, just return it directly
|
||||||
return cls.__ObjectCache[Key]
|
return cls.__ObjectCache[Key]
|
||||||
# it didnt exist. create it, cache it, then return it
|
# it didnt exist. create it, cache it, then return it
|
||||||
RetVal = cls.__ObjectCache[Key] = super().__new__(cls)
|
RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
|
||||||
return RetVal
|
return RetVal
|
||||||
|
|
||||||
def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
def __init__ (self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||||
super().__init__()
|
super(AutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
|
|
||||||
## hash() operator
|
## hash() operator
|
||||||
#
|
#
|
||||||
@ -235,7 +237,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
# call super().__init__ then call the worker function with different parameter count
|
# call super().__init__ then call the worker function with different parameter count
|
||||||
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||||
if not hasattr(self, "_Init"):
|
if not hasattr(self, "_Init"):
|
||||||
super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
super(WorkspaceAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
self._Init = True
|
self._Init = True
|
||||||
|
|
||||||
@ -295,7 +297,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
|
SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
|
||||||
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
|
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
|
||||||
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
|
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
|
||||||
self.ArchList = tuple(sorted(ArchList))
|
self.ArchList = tuple(ArchList)
|
||||||
|
|
||||||
# Validate build target
|
# Validate build target
|
||||||
if self.BuildTarget not in self.Platform.BuildTargets:
|
if self.BuildTarget not in self.Platform.BuildTargets:
|
||||||
@ -518,7 +520,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
for BuildData in PGen.BuildDatabase._CACHE_.values():
|
for BuildData in PGen.BuildDatabase._CACHE_.values():
|
||||||
if BuildData.Arch != Arch:
|
if BuildData.Arch != Arch:
|
||||||
continue
|
continue
|
||||||
for key in list(BuildData.Pcds.keys()):
|
for key in BuildData.Pcds:
|
||||||
for SinglePcd in GlobalData.MixedPcd:
|
for SinglePcd in GlobalData.MixedPcd:
|
||||||
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
|
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
|
||||||
for item in GlobalData.MixedPcd[SinglePcd]:
|
for item in GlobalData.MixedPcd[SinglePcd]:
|
||||||
@ -616,17 +618,17 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
#
|
#
|
||||||
content = 'gCommandLineDefines: '
|
content = 'gCommandLineDefines: '
|
||||||
content += str(GlobalData.gCommandLineDefines)
|
content += str(GlobalData.gCommandLineDefines)
|
||||||
content += "\n"
|
content += os.linesep
|
||||||
content += 'BuildOptionPcd: '
|
content += 'BuildOptionPcd: '
|
||||||
content += str(GlobalData.BuildOptionPcd)
|
content += str(GlobalData.BuildOptionPcd)
|
||||||
content += "\n"
|
content += os.linesep
|
||||||
content += 'Active Platform: '
|
content += 'Active Platform: '
|
||||||
content += str(self.Platform)
|
content += str(self.Platform)
|
||||||
content += "\n"
|
content += os.linesep
|
||||||
if self.FdfFile:
|
if self.FdfFile:
|
||||||
content += 'Flash Image Definition: '
|
content += 'Flash Image Definition: '
|
||||||
content += str(self.FdfFile)
|
content += str(self.FdfFile)
|
||||||
content += "\n"
|
content += os.linesep
|
||||||
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
|
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
|
||||||
|
|
||||||
#
|
#
|
||||||
@ -636,7 +638,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
if Pa.PcdTokenNumber:
|
if Pa.PcdTokenNumber:
|
||||||
if Pa.DynamicPcdList:
|
if Pa.DynamicPcdList:
|
||||||
for Pcd in Pa.DynamicPcdList:
|
for Pcd in Pa.DynamicPcdList:
|
||||||
PcdTokenNumber += "\n"
|
PcdTokenNumber += os.linesep
|
||||||
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
|
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
|
||||||
PcdTokenNumber += ' : '
|
PcdTokenNumber += ' : '
|
||||||
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
|
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
|
||||||
@ -661,7 +663,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
for files in AllWorkSpaceMetaFiles:
|
for files in AllWorkSpaceMetaFiles:
|
||||||
if files.endswith('.dec'):
|
if files.endswith('.dec'):
|
||||||
continue
|
continue
|
||||||
f = open(files, 'rb')
|
f = open(files, 'r')
|
||||||
Content = f.read()
|
Content = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
m.update(Content)
|
m.update(Content)
|
||||||
@ -677,7 +679,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
if not os.path.exists(self.BuildDir):
|
if not os.path.exists(self.BuildDir):
|
||||||
os.makedirs(self.BuildDir)
|
os.makedirs(self.BuildDir)
|
||||||
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
|
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
|
||||||
for f in sorted(AllWorkSpaceMetaFiles):
|
for f in AllWorkSpaceMetaFiles:
|
||||||
print(f, file=file)
|
print(f, file=file)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -690,7 +692,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
|
HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')
|
||||||
m = hashlib.md5()
|
m = hashlib.md5()
|
||||||
# Get .dec file's hash value
|
# Get .dec file's hash value
|
||||||
f = open(Pkg.MetaFile.Path, 'rb')
|
f = open(Pkg.MetaFile.Path, 'r')
|
||||||
Content = f.read()
|
Content = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
m.update(Content)
|
m.update(Content)
|
||||||
@ -700,7 +702,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
for Root, Dirs, Files in os.walk(str(inc)):
|
for Root, Dirs, Files in os.walk(str(inc)):
|
||||||
for File in sorted(Files):
|
for File in sorted(Files):
|
||||||
File_Path = os.path.join(Root, File)
|
File_Path = os.path.join(Root, File)
|
||||||
f = open(File_Path, 'rb')
|
f = open(File_Path, 'r')
|
||||||
Content = f.read()
|
Content = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
m.update(Content)
|
m.update(Content)
|
||||||
@ -864,7 +866,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
def _CheckAllPcdsTokenValueConflict(self):
|
def _CheckAllPcdsTokenValueConflict(self):
|
||||||
for Pa in self.AutoGenObjectList:
|
for Pa in self.AutoGenObjectList:
|
||||||
for Package in Pa.PackageList:
|
for Package in Pa.PackageList:
|
||||||
PcdList = list(Package.Pcds.values())
|
PcdList = Package.Pcds.values()
|
||||||
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
|
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
|
||||||
Count = 0
|
Count = 0
|
||||||
while (Count < len(PcdList) - 1) :
|
while (Count < len(PcdList) - 1) :
|
||||||
@ -910,7 +912,7 @@ class WorkspaceAutoGen(AutoGen):
|
|||||||
Count += SameTokenValuePcdListCount
|
Count += SameTokenValuePcdListCount
|
||||||
Count += 1
|
Count += 1
|
||||||
|
|
||||||
PcdList = list(Package.Pcds.values())
|
PcdList = Package.Pcds.values()
|
||||||
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
|
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
|
||||||
Count = 0
|
Count = 0
|
||||||
while (Count < len(PcdList) - 1) :
|
while (Count < len(PcdList) - 1) :
|
||||||
@ -973,7 +975,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
# call super().__init__ then call the worker function with different parameter count
|
# call super().__init__ then call the worker function with different parameter count
|
||||||
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||||
if not hasattr(self, "_Init"):
|
if not hasattr(self, "_Init"):
|
||||||
super().__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
super(PlatformAutoGen, self).__init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
|
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
|
||||||
self._Init = True
|
self._Init = True
|
||||||
#
|
#
|
||||||
@ -1179,7 +1181,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
if os.path.exists(VpdMapFilePath):
|
if os.path.exists(VpdMapFilePath):
|
||||||
OrgVpdFile.Read(VpdMapFilePath)
|
OrgVpdFile.Read(VpdMapFilePath)
|
||||||
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
|
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
|
||||||
NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
|
NvStoreOffset = PcdItems.values()[0].strip() if PcdItems else '0'
|
||||||
else:
|
else:
|
||||||
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
|
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
|
||||||
|
|
||||||
@ -1229,7 +1231,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
FdfModuleList.append(os.path.normpath(InfName))
|
FdfModuleList.append(os.path.normpath(InfName))
|
||||||
for M in self._MaList:
|
for M in self._MaList:
|
||||||
# F is the Module for which M is the module autogen
|
# F is the Module for which M is the module autogen
|
||||||
for PcdFromModule in list(M.ModulePcdList) + list(M.LibraryPcdList):
|
for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
|
||||||
# make sure that the "VOID*" kind of datum has MaxDatumSize set
|
# make sure that the "VOID*" kind of datum has MaxDatumSize set
|
||||||
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
|
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
|
||||||
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
|
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
|
||||||
@ -1378,7 +1380,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
if (self.Workspace.ArchList[-1] == self.Arch):
|
if (self.Workspace.ArchList[-1] == self.Arch):
|
||||||
for Pcd in self._DynamicPcdList:
|
for Pcd in self._DynamicPcdList:
|
||||||
# just pick the a value to determine whether is unicode string type
|
# just pick the a value to determine whether is unicode string type
|
||||||
Sku = list(Pcd.SkuInfoList.values())[0]
|
Sku = Pcd.SkuInfoList.values()[0]
|
||||||
Sku.VpdOffset = Sku.VpdOffset.strip()
|
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||||
|
|
||||||
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
|
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
|
||||||
@ -1477,7 +1479,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
if not FoundFlag :
|
if not FoundFlag :
|
||||||
# just pick the a value to determine whether is unicode string type
|
# just pick the a value to determine whether is unicode string type
|
||||||
SkuValueMap = {}
|
SkuValueMap = {}
|
||||||
SkuObjList = list(DscPcdEntry.SkuInfoList.items())
|
SkuObjList = DscPcdEntry.SkuInfoList.items()
|
||||||
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
|
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
|
||||||
if DefaultSku:
|
if DefaultSku:
|
||||||
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
|
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
|
||||||
@ -1503,7 +1505,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
|
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
|
||||||
# Only fix the value while no value provided in DSC file.
|
# Only fix the value while no value provided in DSC file.
|
||||||
if not Sku.DefaultValue:
|
if not Sku.DefaultValue:
|
||||||
DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
|
DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
|
||||||
|
|
||||||
if DscPcdEntry not in self._DynamicPcdList:
|
if DscPcdEntry not in self._DynamicPcdList:
|
||||||
self._DynamicPcdList.append(DscPcdEntry)
|
self._DynamicPcdList.append(DscPcdEntry)
|
||||||
@ -1579,7 +1581,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
# Delete the DynamicPcdList At the last time enter into this function
|
# Delete the DynamicPcdList At the last time enter into this function
|
||||||
for Pcd in self._DynamicPcdList:
|
for Pcd in self._DynamicPcdList:
|
||||||
# just pick the a value to determine whether is unicode string type
|
# just pick the a value to determine whether is unicode string type
|
||||||
Sku = list(Pcd.SkuInfoList.values())[0]
|
Sku = Pcd.SkuInfoList.values()[0]
|
||||||
Sku.VpdOffset = Sku.VpdOffset.strip()
|
Sku.VpdOffset = Sku.VpdOffset.strip()
|
||||||
|
|
||||||
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
|
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
|
||||||
@ -1598,14 +1600,11 @@ class PlatformAutoGen(AutoGen):
|
|||||||
self._DynamicPcdList.extend(list(UnicodePcdArray))
|
self._DynamicPcdList.extend(list(UnicodePcdArray))
|
||||||
self._DynamicPcdList.extend(list(HiiPcdArray))
|
self._DynamicPcdList.extend(list(HiiPcdArray))
|
||||||
self._DynamicPcdList.extend(list(OtherPcdArray))
|
self._DynamicPcdList.extend(list(OtherPcdArray))
|
||||||
#python3.6 set is not ordered at all
|
|
||||||
self._DynamicPcdList = sorted(self._DynamicPcdList, key=lambda x:(x.TokenSpaceGuidCName, x.TokenCName))
|
|
||||||
self._NonDynamicPcdList = sorted(self._NonDynamicPcdList, key=lambda x: (x.TokenSpaceGuidCName, x.TokenCName))
|
|
||||||
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
|
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
|
||||||
for pcd in self._DynamicPcdList:
|
for pcd in self._DynamicPcdList:
|
||||||
if len(pcd.SkuInfoList) == 1:
|
if len(pcd.SkuInfoList) == 1:
|
||||||
for (SkuName, SkuId) in allskuset:
|
for (SkuName, SkuId) in allskuset:
|
||||||
if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
|
if type(SkuId) in (str, unicode) and eval(SkuId) == 0 or SkuId == 0:
|
||||||
continue
|
continue
|
||||||
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
|
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
|
||||||
pcd.SkuInfoList[SkuName].SkuId = SkuId
|
pcd.SkuInfoList[SkuName].SkuId = SkuId
|
||||||
@ -2147,7 +2146,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
Pcd.MaxDatumSize = str(len(Value.split(',')))
|
Pcd.MaxDatumSize = str(len(Value.split(',')))
|
||||||
else:
|
else:
|
||||||
Pcd.MaxDatumSize = str(len(Value) - 1)
|
Pcd.MaxDatumSize = str(len(Value) - 1)
|
||||||
return list(Pcds.values())
|
return Pcds.values()
|
||||||
|
|
||||||
## Resolve library names to library modules
|
## Resolve library names to library modules
|
||||||
#
|
#
|
||||||
@ -2251,7 +2250,7 @@ class PlatformAutoGen(AutoGen):
|
|||||||
# Use the highest priority value.
|
# Use the highest priority value.
|
||||||
#
|
#
|
||||||
if (len(OverrideList) >= 2):
|
if (len(OverrideList) >= 2):
|
||||||
KeyList = list(OverrideList.keys())
|
KeyList = OverrideList.keys()
|
||||||
for Index in range(len(KeyList)):
|
for Index in range(len(KeyList)):
|
||||||
NowKey = KeyList[Index]
|
NowKey = KeyList[Index]
|
||||||
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
|
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
|
||||||
@ -2373,11 +2372,11 @@ class PlatformAutoGen(AutoGen):
|
|||||||
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
|
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
|
||||||
BuildRuleOrder = Options[Tool][Attr]
|
BuildRuleOrder = Options[Tool][Attr]
|
||||||
|
|
||||||
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
|
AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +
|
||||||
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
|
PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +
|
||||||
list(self.ToolDefinition.keys()))
|
self.ToolDefinition.keys())
|
||||||
BuildOptions = defaultdict(lambda: defaultdict(str))
|
BuildOptions = defaultdict(lambda: defaultdict(str))
|
||||||
for Tool in sorted(AllTools):
|
for Tool in AllTools:
|
||||||
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
|
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
|
||||||
if Tool not in Options:
|
if Tool not in Options:
|
||||||
continue
|
continue
|
||||||
@ -2428,7 +2427,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
# call super().__init__ then call the worker function with different parameter count
|
# call super().__init__ then call the worker function with different parameter count
|
||||||
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
|
||||||
if not hasattr(self, "_Init"):
|
if not hasattr(self, "_Init"):
|
||||||
super().__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
super(ModuleAutoGen, self).__init__(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
|
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
|
||||||
self._Init = True
|
self._Init = True
|
||||||
|
|
||||||
@ -2442,7 +2441,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
|
EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
|
||||||
% (MetaFile, Arch))
|
% (MetaFile, Arch))
|
||||||
return None
|
return None
|
||||||
return super().__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
|
||||||
|
|
||||||
## Initialize ModuleAutoGen
|
## Initialize ModuleAutoGen
|
||||||
#
|
#
|
||||||
@ -3159,12 +3158,12 @@ class ModuleAutoGen(AutoGen):
|
|||||||
@cached_property
|
@cached_property
|
||||||
def IntroTargetList(self):
|
def IntroTargetList(self):
|
||||||
self.Targets
|
self.Targets
|
||||||
return sorted(self._IntroBuildTargetList, key=lambda x: str(x.Target))
|
return self._IntroBuildTargetList
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def CodaTargetList(self):
|
def CodaTargetList(self):
|
||||||
self.Targets
|
self.Targets
|
||||||
return sorted(self._FinalBuildTargetList, key=lambda x: str(x.Target))
|
return self._FinalBuildTargetList
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def FileTypes(self):
|
def FileTypes(self):
|
||||||
@ -3210,7 +3209,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
|
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
|
||||||
RetVal[AutoFile] = str(StringH)
|
RetVal[AutoFile] = str(StringH)
|
||||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||||
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
|
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
|
||||||
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
|
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
|
||||||
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
|
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
|
||||||
AutoFile.IsBinary = True
|
AutoFile.IsBinary = True
|
||||||
@ -3221,7 +3220,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
|
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
|
||||||
RetVal[AutoFile] = str(StringIdf)
|
RetVal[AutoFile] = str(StringIdf)
|
||||||
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
|
||||||
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
|
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
|
||||||
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
|
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
|
||||||
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
|
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
|
||||||
AutoFile.IsBinary = True
|
AutoFile.IsBinary = True
|
||||||
@ -3439,7 +3438,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
return None
|
return None
|
||||||
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
|
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
|
||||||
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
|
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
|
||||||
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
|
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
|
||||||
if not VfrUniOffsetList:
|
if not VfrUniOffsetList:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -3452,7 +3451,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
|
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
|
||||||
|
|
||||||
# Use a instance of BytesIO to cache data
|
# Use a instance of BytesIO to cache data
|
||||||
fStringIO = BytesIO()
|
fStringIO = BytesIO('')
|
||||||
|
|
||||||
for Item in VfrUniOffsetList:
|
for Item in VfrUniOffsetList:
|
||||||
if (Item[0].find("Strings") != -1):
|
if (Item[0].find("Strings") != -1):
|
||||||
@ -3462,7 +3461,8 @@ class ModuleAutoGen(AutoGen):
|
|||||||
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
|
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
|
||||||
#
|
#
|
||||||
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
|
UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
|
||||||
fStringIO.write(bytes(UniGuid))
|
UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
|
||||||
|
fStringIO.write(''.join(UniGuid))
|
||||||
UniValue = pack ('Q', int (Item[1], 16))
|
UniValue = pack ('Q', int (Item[1], 16))
|
||||||
fStringIO.write (UniValue)
|
fStringIO.write (UniValue)
|
||||||
else:
|
else:
|
||||||
@ -3472,7 +3472,8 @@ class ModuleAutoGen(AutoGen):
|
|||||||
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
|
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
|
||||||
#
|
#
|
||||||
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
|
VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
|
||||||
fStringIO.write(bytes(VfrGuid))
|
VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
|
||||||
|
fStringIO.write(''.join(VfrGuid))
|
||||||
VfrValue = pack ('Q', int (Item[1], 16))
|
VfrValue = pack ('Q', int (Item[1], 16))
|
||||||
fStringIO.write (VfrValue)
|
fStringIO.write (VfrValue)
|
||||||
#
|
#
|
||||||
@ -3524,7 +3525,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
Packages = []
|
Packages = []
|
||||||
PcdCheckList = []
|
PcdCheckList = []
|
||||||
PcdTokenSpaceList = []
|
PcdTokenSpaceList = []
|
||||||
for Pcd in list(self.ModulePcdList) + list(self.LibraryPcdList):
|
for Pcd in self.ModulePcdList + self.LibraryPcdList:
|
||||||
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
|
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
|
||||||
PatchablePcds.append(Pcd)
|
PatchablePcds.append(Pcd)
|
||||||
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
|
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
|
||||||
@ -3636,10 +3637,6 @@ class ModuleAutoGen(AutoGen):
|
|||||||
AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
|
AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
|
||||||
else:
|
else:
|
||||||
AsBuiltInfDict['binary_item'].append('BIN|' + File)
|
AsBuiltInfDict['binary_item'].append('BIN|' + File)
|
||||||
if not self.DepexGenerated:
|
|
||||||
DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
|
|
||||||
if os.path.exists(DepexFile):
|
|
||||||
self.DepexGenerated = True
|
|
||||||
if self.DepexGenerated:
|
if self.DepexGenerated:
|
||||||
self.OutputFile.add(self.Name + '.depex')
|
self.OutputFile.add(self.Name + '.depex')
|
||||||
if self.ModuleType in [SUP_MODULE_PEIM]:
|
if self.ModuleType in [SUP_MODULE_PEIM]:
|
||||||
@ -3736,7 +3733,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
Padding = '0x00, '
|
Padding = '0x00, '
|
||||||
if Unicode:
|
if Unicode:
|
||||||
Padding = Padding * 2
|
Padding = Padding * 2
|
||||||
ArraySize = ArraySize // 2
|
ArraySize = ArraySize / 2
|
||||||
if ArraySize < (len(PcdValue) + 1):
|
if ArraySize < (len(PcdValue) + 1):
|
||||||
if Pcd.MaxSizeUserSet:
|
if Pcd.MaxSizeUserSet:
|
||||||
EdkLogger.error("build", AUTOGEN_ERROR,
|
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||||
@ -3896,7 +3893,7 @@ class ModuleAutoGen(AutoGen):
|
|||||||
if os.path.exists (self.TimeStampPath):
|
if os.path.exists (self.TimeStampPath):
|
||||||
os.remove (self.TimeStampPath)
|
os.remove (self.TimeStampPath)
|
||||||
with open(self.TimeStampPath, 'w+') as file:
|
with open(self.TimeStampPath, 'w+') as file:
|
||||||
for f in sorted(FileSet):
|
for f in FileSet:
|
||||||
print(f, file=file)
|
print(f, file=file)
|
||||||
|
|
||||||
# Ignore generating makefile when it is a binary module
|
# Ignore generating makefile when it is a binary module
|
||||||
@ -4024,29 +4021,29 @@ class ModuleAutoGen(AutoGen):
|
|||||||
GlobalData.gModuleHash[self.Arch] = {}
|
GlobalData.gModuleHash[self.Arch] = {}
|
||||||
m = hashlib.md5()
|
m = hashlib.md5()
|
||||||
# Add Platform level hash
|
# Add Platform level hash
|
||||||
m.update(GlobalData.gPlatformHash.encode('utf-8'))
|
m.update(GlobalData.gPlatformHash)
|
||||||
# Add Package level hash
|
# Add Package level hash
|
||||||
if self.DependentPackageList:
|
if self.DependentPackageList:
|
||||||
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
|
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
|
||||||
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
|
if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:
|
||||||
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName].encode('utf-8'))
|
m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])
|
||||||
|
|
||||||
# Add Library hash
|
# Add Library hash
|
||||||
if self.LibraryAutoGenList:
|
if self.LibraryAutoGenList:
|
||||||
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
|
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):
|
||||||
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
|
if Lib.Name not in GlobalData.gModuleHash[self.Arch]:
|
||||||
Lib.GenModuleHash()
|
Lib.GenModuleHash()
|
||||||
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))
|
m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])
|
||||||
|
|
||||||
# Add Module self
|
# Add Module self
|
||||||
f = open(str(self.MetaFile), 'rb')
|
f = open(str(self.MetaFile), 'r')
|
||||||
Content = f.read()
|
Content = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
m.update(Content)
|
m.update(Content)
|
||||||
# Add Module's source files
|
# Add Module's source files
|
||||||
if self.SourceFileList:
|
if self.SourceFileList:
|
||||||
for File in sorted(self.SourceFileList, key=lambda x: str(x)):
|
for File in sorted(self.SourceFileList, key=lambda x: str(x)):
|
||||||
f = open(str(File), 'rb')
|
f = open(str(File), 'r')
|
||||||
Content = f.read()
|
Content = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
m.update(Content)
|
m.update(Content)
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
## Import Modules
|
## Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import string
|
import string
|
||||||
import collections
|
import collections
|
||||||
import struct
|
import struct
|
||||||
@ -936,7 +937,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||||||
if Info.IsLibrary:
|
if Info.IsLibrary:
|
||||||
PcdList = Info.LibraryPcdList
|
PcdList = Info.LibraryPcdList
|
||||||
else:
|
else:
|
||||||
PcdList = list(Info.ModulePcdList) + list(Info.LibraryPcdList)
|
PcdList = Info.ModulePcdList + Info.LibraryPcdList
|
||||||
PcdExCNameTest = 0
|
PcdExCNameTest = 0
|
||||||
for PcdModule in PcdList:
|
for PcdModule in PcdList:
|
||||||
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
|
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
|
||||||
@ -970,7 +971,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||||||
AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
|
AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
|
||||||
elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
|
elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
|
||||||
PcdCNameTest = 0
|
PcdCNameTest = 0
|
||||||
for PcdModule in list(Info.LibraryPcdList) + list(Info.ModulePcdList):
|
for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
|
||||||
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
|
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
|
||||||
PcdCNameTest += 1
|
PcdCNameTest += 1
|
||||||
# get out early once we found > 1...
|
# get out early once we found > 1...
|
||||||
@ -1010,9 +1011,6 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||||||
if Value.upper().endswith('L'):
|
if Value.upper().endswith('L'):
|
||||||
Value = Value[:-1]
|
Value = Value[:-1]
|
||||||
ValueNumber = int (Value, 0)
|
ValueNumber = int (Value, 0)
|
||||||
except:
|
|
||||||
try:
|
|
||||||
ValueNumber = int(Value.lstrip('0'))
|
|
||||||
except:
|
except:
|
||||||
EdkLogger.error("build", AUTOGEN_ERROR,
|
EdkLogger.error("build", AUTOGEN_ERROR,
|
||||||
"PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
|
"PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
|
||||||
@ -1053,7 +1051,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||||||
else:
|
else:
|
||||||
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
|
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
|
||||||
if Unicode:
|
if Unicode:
|
||||||
ArraySize = ArraySize // 2
|
ArraySize = ArraySize / 2
|
||||||
Value = NewValue + '0 }'
|
Value = NewValue + '0 }'
|
||||||
if ArraySize < ValueSize:
|
if ArraySize < ValueSize:
|
||||||
if Pcd.MaxSizeUserSet:
|
if Pcd.MaxSizeUserSet:
|
||||||
@ -1063,7 +1061,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
|
|||||||
else:
|
else:
|
||||||
ArraySize = Pcd.GetPcdSize()
|
ArraySize = Pcd.GetPcdSize()
|
||||||
if Unicode:
|
if Unicode:
|
||||||
ArraySize = ArraySize // 2
|
ArraySize = ArraySize / 2
|
||||||
Array = '[%d]' % ArraySize
|
Array = '[%d]' % ArraySize
|
||||||
#
|
#
|
||||||
# skip casting for fixed at build since it breaks ARM assembly.
|
# skip casting for fixed at build since it breaks ARM assembly.
|
||||||
@ -1798,7 +1796,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
|
|||||||
TempBuffer += Buffer
|
TempBuffer += Buffer
|
||||||
elif File.Ext.upper() == '.JPG':
|
elif File.Ext.upper() == '.JPG':
|
||||||
ImageType, = struct.unpack('4s', Buffer[6:10])
|
ImageType, = struct.unpack('4s', Buffer[6:10])
|
||||||
if ImageType != b'JFIF':
|
if ImageType != 'JFIF':
|
||||||
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
|
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
|
||||||
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
|
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
|
||||||
TempBuffer += pack('I', len(Buffer))
|
TempBuffer += pack('I', len(Buffer))
|
||||||
@ -1898,7 +1896,7 @@ def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
|
|||||||
|
|
||||||
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
|
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
|
||||||
ImageType, = struct.unpack('2s', Buffer[0:2])
|
ImageType, = struct.unpack('2s', Buffer[0:2])
|
||||||
if ImageType!= b'BM': # BMP file type is 'BM'
|
if ImageType!= 'BM': # BMP file type is 'BM'
|
||||||
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
|
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
|
||||||
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
|
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
|
||||||
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
|
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
|
||||||
@ -1922,7 +1920,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
|
|||||||
else:
|
else:
|
||||||
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
|
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
|
||||||
ImageBuffer += pack('B', PaletteIndex)
|
ImageBuffer += pack('B', PaletteIndex)
|
||||||
Width = (BmpHeader.biWidth + 7)//8
|
Width = (BmpHeader.biWidth + 7)/8
|
||||||
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
|
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
|
||||||
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
|
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
|
||||||
elif BmpHeader.biBitCount == 4:
|
elif BmpHeader.biBitCount == 4:
|
||||||
@ -1931,7 +1929,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
|
|||||||
else:
|
else:
|
||||||
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
|
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
|
||||||
ImageBuffer += pack('B', PaletteIndex)
|
ImageBuffer += pack('B', PaletteIndex)
|
||||||
Width = (BmpHeader.biWidth + 1)//2
|
Width = (BmpHeader.biWidth + 1)/2
|
||||||
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
|
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
|
||||||
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
|
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
|
||||||
elif BmpHeader.biBitCount == 8:
|
elif BmpHeader.biBitCount == 8:
|
||||||
@ -1970,7 +1968,7 @@ def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
|
|||||||
for Index in range(0, len(PaletteBuffer)):
|
for Index in range(0, len(PaletteBuffer)):
|
||||||
if Index % 4 == 3:
|
if Index % 4 == 3:
|
||||||
continue
|
continue
|
||||||
PaletteTemp += bytes([PaletteBuffer[Index]])
|
PaletteTemp += PaletteBuffer[Index]
|
||||||
PaletteBuffer = PaletteTemp[1:]
|
PaletteBuffer = PaletteTemp[1:]
|
||||||
return ImageBuffer, PaletteBuffer
|
return ImageBuffer, PaletteBuffer
|
||||||
|
|
||||||
@ -2068,7 +2066,7 @@ def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer,
|
|||||||
if Guid in Info.Module.GetGuidsUsedByPcd():
|
if Guid in Info.Module.GetGuidsUsedByPcd():
|
||||||
continue
|
continue
|
||||||
GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
|
GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
|
||||||
for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
|
for Guid, Value in Info.Module.Protocols.items() + Info.Module.Ppis.items():
|
||||||
GuidMacros.append('#define %s %s' % (Guid, Value))
|
GuidMacros.append('#define %s %s' % (Guid, Value))
|
||||||
# supports FixedAtBuild and FeaturePcd usage in VFR file
|
# supports FixedAtBuild and FeaturePcd usage in VFR file
|
||||||
if Info.VfrFileList and Info.ModulePcdList:
|
if Info.VfrFileList and Info.ModulePcdList:
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
## Import Modules
|
## Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import sys
|
import sys
|
||||||
import string
|
import string
|
||||||
@ -491,7 +492,7 @@ cleanlib:
|
|||||||
# EdkII modules always use "_ModuleEntryPoint" as entry point
|
# EdkII modules always use "_ModuleEntryPoint" as entry point
|
||||||
ImageEntryPoint = "_ModuleEntryPoint"
|
ImageEntryPoint = "_ModuleEntryPoint"
|
||||||
|
|
||||||
for k, v in MyAgo.Module.Defines.items():
|
for k, v in MyAgo.Module.Defines.iteritems():
|
||||||
if k not in MyAgo.Macros:
|
if k not in MyAgo.Macros:
|
||||||
MyAgo.Macros[k] = v
|
MyAgo.Macros[k] = v
|
||||||
|
|
||||||
@ -503,7 +504,7 @@ cleanlib:
|
|||||||
MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
|
MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
|
||||||
|
|
||||||
PCI_COMPRESS_Flag = False
|
PCI_COMPRESS_Flag = False
|
||||||
for k, v in MyAgo.Module.Defines.items():
|
for k, v in MyAgo.Module.Defines.iteritems():
|
||||||
if 'PCI_COMPRESS' == k and 'TRUE' == v:
|
if 'PCI_COMPRESS' == k and 'TRUE' == v:
|
||||||
PCI_COMPRESS_Flag = True
|
PCI_COMPRESS_Flag = True
|
||||||
|
|
||||||
@ -654,7 +655,7 @@ cleanlib:
|
|||||||
"module_relative_directory" : MyAgo.SourceDir,
|
"module_relative_directory" : MyAgo.SourceDir,
|
||||||
"module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
|
"module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
|
||||||
"package_relative_directory": package_rel_dir,
|
"package_relative_directory": package_rel_dir,
|
||||||
"module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
|
"module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.iteritems()],
|
||||||
|
|
||||||
"architecture" : MyAgo.Arch,
|
"architecture" : MyAgo.Arch,
|
||||||
"toolchain_tag" : MyAgo.ToolChain,
|
"toolchain_tag" : MyAgo.ToolChain,
|
||||||
@ -668,8 +669,8 @@ cleanlib:
|
|||||||
"separator" : Separator,
|
"separator" : Separator,
|
||||||
"module_tool_definitions" : ToolsDef,
|
"module_tool_definitions" : ToolsDef,
|
||||||
|
|
||||||
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
|
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
|
||||||
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
|
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
|
||||||
|
|
||||||
"module_entry_point" : ModuleEntryPoint,
|
"module_entry_point" : ModuleEntryPoint,
|
||||||
"image_entry_point" : ImageEntryPoint,
|
"image_entry_point" : ImageEntryPoint,
|
||||||
@ -917,7 +918,7 @@ cleanlib:
|
|||||||
#
|
#
|
||||||
# Extract common files list in the dependency files
|
# Extract common files list in the dependency files
|
||||||
#
|
#
|
||||||
for File in sorted(DepSet, key=lambda x: str(x)):
|
for File in DepSet:
|
||||||
self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros))
|
self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros))
|
||||||
|
|
||||||
for File in FileDependencyDict:
|
for File in FileDependencyDict:
|
||||||
@ -926,11 +927,11 @@ cleanlib:
|
|||||||
continue
|
continue
|
||||||
NewDepSet = set(FileDependencyDict[File])
|
NewDepSet = set(FileDependencyDict[File])
|
||||||
NewDepSet -= DepSet
|
NewDepSet -= DepSet
|
||||||
FileDependencyDict[File] = ["$(COMMON_DEPS)"] + sorted(NewDepSet, key=lambda x: str(x))
|
FileDependencyDict[File] = ["$(COMMON_DEPS)"] + list(NewDepSet)
|
||||||
|
|
||||||
# Convert target description object to target string in makefile
|
# Convert target description object to target string in makefile
|
||||||
for Type in self._AutoGenObject.Targets:
|
for Type in self._AutoGenObject.Targets:
|
||||||
for T in sorted(self._AutoGenObject.Targets[Type], key=lambda x: str(x)):
|
for T in self._AutoGenObject.Targets[Type]:
|
||||||
# Generate related macros if needed
|
# Generate related macros if needed
|
||||||
if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
|
if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
|
||||||
self.FileListMacros[T.FileListMacro] = []
|
self.FileListMacros[T.FileListMacro] = []
|
||||||
@ -1031,7 +1032,7 @@ cleanlib:
|
|||||||
CurrentFileDependencyList = DepDb[F]
|
CurrentFileDependencyList = DepDb[F]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
Fd = open(F.Path, 'rb')
|
Fd = open(F.Path, 'r')
|
||||||
except BaseException as X:
|
except BaseException as X:
|
||||||
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
|
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
|
||||||
|
|
||||||
@ -1041,14 +1042,8 @@ cleanlib:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
|
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
|
||||||
FileContent = str(FileContent, encoding="utf-16")
|
FileContent = unicode(FileContent, "utf-16")
|
||||||
IncludedFileList = gIncludePattern.findall(FileContent)
|
IncludedFileList = gIncludePattern.findall(FileContent)
|
||||||
else:
|
|
||||||
try:
|
|
||||||
FileContent = str(FileContent, encoding="utf-8")
|
|
||||||
IncludedFileList = gIncludePattern.findall(FileContent)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for Inc in IncludedFileList:
|
for Inc in IncludedFileList:
|
||||||
Inc = Inc.strip()
|
Inc = Inc.strip()
|
||||||
@ -1097,7 +1092,7 @@ cleanlib:
|
|||||||
DependencySet.update(ForceList)
|
DependencySet.update(ForceList)
|
||||||
if File in DependencySet:
|
if File in DependencySet:
|
||||||
DependencySet.remove(File)
|
DependencySet.remove(File)
|
||||||
DependencyList = sorted(DependencySet, key=lambda x: str(x)) # remove duplicate ones
|
DependencyList = list(DependencySet) # remove duplicate ones
|
||||||
|
|
||||||
return DependencyList
|
return DependencyList
|
||||||
|
|
||||||
@ -1274,8 +1269,8 @@ ${BEGIN}\t-@${create_directory_command}\n${END}\
|
|||||||
"separator" : Separator,
|
"separator" : Separator,
|
||||||
"module_tool_definitions" : ToolsDef,
|
"module_tool_definitions" : ToolsDef,
|
||||||
|
|
||||||
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
|
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
|
||||||
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
|
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
|
||||||
|
|
||||||
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
|
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
|
||||||
"custom_makefile_content" : CustomMakefile
|
"custom_makefile_content" : CustomMakefile
|
||||||
@ -1448,8 +1443,8 @@ cleanlib:
|
|||||||
|
|
||||||
"toolchain_tag" : MyAgo.ToolChain,
|
"toolchain_tag" : MyAgo.ToolChain,
|
||||||
"build_target" : MyAgo.BuildTarget,
|
"build_target" : MyAgo.BuildTarget,
|
||||||
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
|
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
|
||||||
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
|
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
|
||||||
"build_architecture_list" : MyAgo.Arch,
|
"build_architecture_list" : MyAgo.Arch,
|
||||||
"architecture" : MyAgo.Arch,
|
"architecture" : MyAgo.Arch,
|
||||||
"separator" : Separator,
|
"separator" : Separator,
|
||||||
@ -1584,8 +1579,8 @@ class TopLevelMakefile(BuildFile):
|
|||||||
|
|
||||||
"toolchain_tag" : MyAgo.ToolChain,
|
"toolchain_tag" : MyAgo.ToolChain,
|
||||||
"build_target" : MyAgo.BuildTarget,
|
"build_target" : MyAgo.BuildTarget,
|
||||||
"shell_command_code" : list(self._SHELL_CMD_[self._FileType].keys()),
|
"shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
|
||||||
"shell_command" : list(self._SHELL_CMD_[self._FileType].values()),
|
"shell_command" : self._SHELL_CMD_[self._FileType].values(),
|
||||||
'arch' : list(MyAgo.ArchList),
|
'arch' : list(MyAgo.ArchList),
|
||||||
"build_architecture_list" : ','.join(MyAgo.ArchList),
|
"build_architecture_list" : ','.join(MyAgo.ArchList),
|
||||||
"separator" : Separator,
|
"separator" : Separator,
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from Common.Misc import *
|
from Common.Misc import *
|
||||||
from Common.StringUtils import StringToArray
|
from Common.StringUtils import StringToArray
|
||||||
@ -257,7 +258,7 @@ class DbItemList:
|
|||||||
# Variable length, need to calculate one by one
|
# Variable length, need to calculate one by one
|
||||||
#
|
#
|
||||||
assert(Index < len(self.RawDataList))
|
assert(Index < len(self.RawDataList))
|
||||||
for ItemIndex in range(Index):
|
for ItemIndex in xrange(Index):
|
||||||
Offset += len(self.RawDataList[ItemIndex])
|
Offset += len(self.RawDataList[ItemIndex])
|
||||||
else:
|
else:
|
||||||
Offset = self.ItemSize * Index
|
Offset = self.ItemSize * Index
|
||||||
@ -291,7 +292,7 @@ class DbItemList:
|
|||||||
|
|
||||||
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
|
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
|
||||||
|
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
for Datas in self.RawDataList:
|
for Datas in self.RawDataList:
|
||||||
if type(Datas) in (list, tuple):
|
if type(Datas) in (list, tuple):
|
||||||
for Data in Datas:
|
for Data in Datas:
|
||||||
@ -316,7 +317,7 @@ class DbExMapTblItemList (DbItemList):
|
|||||||
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
|
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
|
||||||
|
|
||||||
def PackData(self):
|
def PackData(self):
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
PackStr = "=LHH"
|
PackStr = "=LHH"
|
||||||
for Datas in self.RawDataList:
|
for Datas in self.RawDataList:
|
||||||
Buffer += pack(PackStr,
|
Buffer += pack(PackStr,
|
||||||
@ -344,7 +345,7 @@ class DbComItemList (DbItemList):
|
|||||||
assert(False)
|
assert(False)
|
||||||
else:
|
else:
|
||||||
assert(Index < len(self.RawDataList))
|
assert(Index < len(self.RawDataList))
|
||||||
for ItemIndex in range(Index):
|
for ItemIndex in xrange(Index):
|
||||||
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
|
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
|
||||||
|
|
||||||
return Offset
|
return Offset
|
||||||
@ -365,7 +366,7 @@ class DbComItemList (DbItemList):
|
|||||||
def PackData(self):
|
def PackData(self):
|
||||||
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
|
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
|
||||||
|
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
for DataList in self.RawDataList:
|
for DataList in self.RawDataList:
|
||||||
for Data in DataList:
|
for Data in DataList:
|
||||||
if type(Data) in (list, tuple):
|
if type(Data) in (list, tuple):
|
||||||
@ -386,7 +387,7 @@ class DbVariableTableItemList (DbComItemList):
|
|||||||
|
|
||||||
def PackData(self):
|
def PackData(self):
|
||||||
PackStr = "=LLHHLHH"
|
PackStr = "=LLHHLHH"
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
for DataList in self.RawDataList:
|
for DataList in self.RawDataList:
|
||||||
for Data in DataList:
|
for Data in DataList:
|
||||||
Buffer += pack(PackStr,
|
Buffer += pack(PackStr,
|
||||||
@ -410,7 +411,7 @@ class DbStringHeadTableItemList(DbItemList):
|
|||||||
# Variable length, need to calculate one by one
|
# Variable length, need to calculate one by one
|
||||||
#
|
#
|
||||||
assert(Index < len(self.RawDataList))
|
assert(Index < len(self.RawDataList))
|
||||||
for ItemIndex in range(Index):
|
for ItemIndex in xrange(Index):
|
||||||
Offset += len(self.RawDataList[ItemIndex])
|
Offset += len(self.RawDataList[ItemIndex])
|
||||||
else:
|
else:
|
||||||
for innerIndex in range(Index):
|
for innerIndex in range(Index):
|
||||||
@ -447,7 +448,7 @@ class DbSkuHeadTableItemList (DbItemList):
|
|||||||
|
|
||||||
def PackData(self):
|
def PackData(self):
|
||||||
PackStr = "=LL"
|
PackStr = "=LL"
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
for Data in self.RawDataList:
|
for Data in self.RawDataList:
|
||||||
Buffer += pack(PackStr,
|
Buffer += pack(PackStr,
|
||||||
GetIntegerValue(Data[0]),
|
GetIntegerValue(Data[0]),
|
||||||
@ -469,7 +470,7 @@ class DbSizeTableItemList (DbItemList):
|
|||||||
return length * self.ItemSize
|
return length * self.ItemSize
|
||||||
def PackData(self):
|
def PackData(self):
|
||||||
PackStr = "=H"
|
PackStr = "=H"
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
for Data in self.RawDataList:
|
for Data in self.RawDataList:
|
||||||
Buffer += pack(PackStr,
|
Buffer += pack(PackStr,
|
||||||
GetIntegerValue(Data[0]))
|
GetIntegerValue(Data[0]))
|
||||||
@ -494,14 +495,14 @@ class DbStringItemList (DbComItemList):
|
|||||||
assert(len(RawDataList) == len(LenList))
|
assert(len(RawDataList) == len(LenList))
|
||||||
DataList = []
|
DataList = []
|
||||||
# adjust DataList according to the LenList
|
# adjust DataList according to the LenList
|
||||||
for Index in range(len(RawDataList)):
|
for Index in xrange(len(RawDataList)):
|
||||||
Len = LenList[Index]
|
Len = LenList[Index]
|
||||||
RawDatas = RawDataList[Index]
|
RawDatas = RawDataList[Index]
|
||||||
assert(Len >= len(RawDatas))
|
assert(Len >= len(RawDatas))
|
||||||
ActualDatas = []
|
ActualDatas = []
|
||||||
for i in range(len(RawDatas)):
|
for i in xrange(len(RawDatas)):
|
||||||
ActualDatas.append(RawDatas[i])
|
ActualDatas.append(RawDatas[i])
|
||||||
for i in range(len(RawDatas), Len):
|
for i in xrange(len(RawDatas), Len):
|
||||||
ActualDatas.append(0)
|
ActualDatas.append(0)
|
||||||
DataList.append(ActualDatas)
|
DataList.append(ActualDatas)
|
||||||
self.LenList = LenList
|
self.LenList = LenList
|
||||||
@ -510,7 +511,7 @@ class DbStringItemList (DbComItemList):
|
|||||||
Offset = 0
|
Offset = 0
|
||||||
|
|
||||||
assert(Index < len(self.LenList))
|
assert(Index < len(self.LenList))
|
||||||
for ItemIndex in range(Index):
|
for ItemIndex in xrange(Index):
|
||||||
Offset += self.LenList[ItemIndex]
|
Offset += self.LenList[ItemIndex]
|
||||||
|
|
||||||
return Offset
|
return Offset
|
||||||
@ -611,7 +612,7 @@ def BuildExDataBase(Dict):
|
|||||||
DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
|
DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
|
||||||
VpdHeadValue = Dict['VPD_DB_VALUE']
|
VpdHeadValue = Dict['VPD_DB_VALUE']
|
||||||
DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
|
DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
|
||||||
ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
|
ExMapTable = zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX'])
|
||||||
DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
|
DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
|
||||||
LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
|
LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
|
||||||
DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
|
DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
|
||||||
@ -645,7 +646,7 @@ def BuildExDataBase(Dict):
|
|||||||
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
|
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
|
||||||
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
|
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
|
||||||
|
|
||||||
SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
|
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
|
||||||
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
|
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
|
||||||
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
|
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
|
||||||
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
|
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
|
||||||
@ -698,7 +699,7 @@ def BuildExDataBase(Dict):
|
|||||||
|
|
||||||
# Get offset of SkuId table in the database
|
# Get offset of SkuId table in the database
|
||||||
SkuIdTableOffset = FixedHeaderLen
|
SkuIdTableOffset = FixedHeaderLen
|
||||||
for DbIndex in range(len(DbTotal)):
|
for DbIndex in xrange(len(DbTotal)):
|
||||||
if DbTotal[DbIndex] is SkuidValue:
|
if DbTotal[DbIndex] is SkuidValue:
|
||||||
break
|
break
|
||||||
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
|
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
|
||||||
@ -710,7 +711,7 @@ def BuildExDataBase(Dict):
|
|||||||
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
|
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
|
||||||
DbIndex = 0
|
DbIndex = 0
|
||||||
DbOffset = FixedHeaderLen
|
DbOffset = FixedHeaderLen
|
||||||
for DbIndex in range(len(DbTotal)):
|
for DbIndex in xrange(len(DbTotal)):
|
||||||
if DbTotal[DbIndex] is Table:
|
if DbTotal[DbIndex] is Table:
|
||||||
DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
|
DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
|
||||||
break
|
break
|
||||||
@ -736,7 +737,7 @@ def BuildExDataBase(Dict):
|
|||||||
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
|
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
|
||||||
DbIndex = 0
|
DbIndex = 0
|
||||||
DbOffset = FixedHeaderLen
|
DbOffset = FixedHeaderLen
|
||||||
for DbIndex in range(len(DbTotal)):
|
for DbIndex in xrange(len(DbTotal)):
|
||||||
if DbTotal[DbIndex] is VariableRefTable:
|
if DbTotal[DbIndex] is VariableRefTable:
|
||||||
DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
|
DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
|
||||||
break
|
break
|
||||||
@ -756,7 +757,7 @@ def BuildExDataBase(Dict):
|
|||||||
|
|
||||||
# calculate various table offset now
|
# calculate various table offset now
|
||||||
DbTotalLength = FixedHeaderLen
|
DbTotalLength = FixedHeaderLen
|
||||||
for DbIndex in range(len(DbItemTotal)):
|
for DbIndex in xrange(len(DbItemTotal)):
|
||||||
if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
|
if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
|
||||||
LocalTokenNumberTableOffset = DbTotalLength
|
LocalTokenNumberTableOffset = DbTotalLength
|
||||||
elif DbItemTotal[DbIndex] is DbExMapTable:
|
elif DbItemTotal[DbIndex] is DbExMapTable:
|
||||||
@ -849,7 +850,7 @@ def BuildExDataBase(Dict):
|
|||||||
Index = 0
|
Index = 0
|
||||||
for Item in DbItemTotal:
|
for Item in DbItemTotal:
|
||||||
Index +=1
|
Index +=1
|
||||||
b = bytes(Item.PackData())
|
b = Item.PackData()
|
||||||
Buffer += b
|
Buffer += b
|
||||||
if Index == InitTableNum:
|
if Index == InitTableNum:
|
||||||
if len(Buffer) % 8:
|
if len(Buffer) % 8:
|
||||||
@ -917,9 +918,9 @@ def CreatePcdDataBase(PcdDBData):
|
|||||||
totallenbuff = pack("=L", totallen)
|
totallenbuff = pack("=L", totallen)
|
||||||
newbuffer = databasebuff[:32]
|
newbuffer = databasebuff[:32]
|
||||||
for i in range(4):
|
for i in range(4):
|
||||||
newbuffer += bytes([totallenbuff[i]])
|
newbuffer += totallenbuff[i]
|
||||||
for i in range(36, totallen):
|
for i in range(36, totallen):
|
||||||
newbuffer += bytes([databasebuff[i]])
|
newbuffer += databasebuff[i]
|
||||||
|
|
||||||
return newbuffer
|
return newbuffer
|
||||||
|
|
||||||
@ -962,7 +963,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
|
|||||||
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
|
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
|
||||||
final_data = ()
|
final_data = ()
|
||||||
for item in PcdDbBuffer:
|
for item in PcdDbBuffer:
|
||||||
final_data += unpack("B", bytes([item]))
|
final_data += unpack("B", item)
|
||||||
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
|
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
|
||||||
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
|
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
|
||||||
VarCheckTableData[(skuname, skuid)] = VarCheckTab
|
VarCheckTableData[(skuname, skuid)] = VarCheckTab
|
||||||
@ -975,7 +976,7 @@ def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
|
|||||||
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
|
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
|
||||||
final_data = ()
|
final_data = ()
|
||||||
for item in PcdDbBuffer:
|
for item in PcdDbBuffer:
|
||||||
final_data += unpack("B", bytes([item]))
|
final_data += unpack("B", item)
|
||||||
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
|
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
|
||||||
|
|
||||||
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
|
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
|
||||||
@ -1348,7 +1349,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
|
|||||||
|
|
||||||
DbValueList.append(Sku.DefaultValue)
|
DbValueList.append(Sku.DefaultValue)
|
||||||
|
|
||||||
Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
|
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
|
||||||
if Pcd.DatumType == TAB_VOID:
|
if Pcd.DatumType == TAB_VOID:
|
||||||
Dict['SIZE_TABLE_CNAME'].append(CName)
|
Dict['SIZE_TABLE_CNAME'].append(CName)
|
||||||
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
|
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
|
||||||
@ -1449,7 +1450,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
|
|||||||
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
|
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
|
||||||
|
|
||||||
|
|
||||||
Pcd.TokenTypeList = sorted(set(Pcd.TokenTypeList))
|
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
|
||||||
|
|
||||||
# search the Offset and Table, used by LocalTokenNumberTableOffset
|
# search the Offset and Table, used by LocalTokenNumberTableOffset
|
||||||
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
|
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
|
||||||
|
@ -66,7 +66,7 @@ class VariableMgr(object):
|
|||||||
data = value_list[0]
|
data = value_list[0]
|
||||||
value_list = []
|
value_list = []
|
||||||
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
|
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
|
||||||
value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
|
value_list.append(hex(unpack("B", data_byte)[0]))
|
||||||
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
|
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = value_list
|
||||||
try:
|
try:
|
||||||
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
|
newvaluestr = "{" + ",".join(VariableMgr.assemble_variable(newvalue)) +"}"
|
||||||
@ -87,13 +87,13 @@ class VariableMgr(object):
|
|||||||
data = value_list[0]
|
data = value_list[0]
|
||||||
value_list = []
|
value_list = []
|
||||||
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
|
for data_byte in pack(data_flag, int(data, 16) if data.upper().startswith('0X') else int(data)):
|
||||||
value_list.append(hex(unpack("B", bytes([data_byte]))[0]))
|
value_list.append(hex(unpack("B", data_byte)[0]))
|
||||||
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)
|
newvalue[int(item.var_offset, 16) if item.var_offset.upper().startswith("0X") else int(item.var_offset)] = (value_list,item.pcdname,item.PcdDscLine)
|
||||||
for offset in newvalue:
|
for offset in newvalue:
|
||||||
value_list,itemPcdname,itemPcdDscLine = newvalue[offset]
|
value_list,itemPcdname,itemPcdDscLine = newvalue[offset]
|
||||||
if offset > len(BaseValue) or (offset + len(value_list) > len(BaseValue)):
|
if offset > len(BaseValue) or (offset + len(value_list) > len(BaseValue)):
|
||||||
EdkLogger.error("build", AUTOGEN_ERROR, "The EFI Variable referred by PCD %s in line %s exceeds variable size: %s\n" % (itemPcdname,itemPcdDscLine,hex(len(BaseValue))))
|
EdkLogger.error("build", AUTOGEN_ERROR, "The EFI Variable referred by PCD %s in line %s exceeds variable size: %s\n" % (itemPcdname,itemPcdDscLine,hex(len(BaseValue))))
|
||||||
for i in range(len(value_list)):
|
for i in xrange(len(value_list)):
|
||||||
BaseValue[offset + i] = value_list[i]
|
BaseValue[offset + i] = value_list[i]
|
||||||
newvaluestr = "{" + ",".join(BaseValue) +"}"
|
newvaluestr = "{" + ",".join(BaseValue) +"}"
|
||||||
return newvaluestr
|
return newvaluestr
|
||||||
@ -129,7 +129,7 @@ class VariableMgr(object):
|
|||||||
for current_valuedict_key in ordered_valuedict_keys:
|
for current_valuedict_key in ordered_valuedict_keys:
|
||||||
if current_valuedict_key < len(var_value):
|
if current_valuedict_key < len(var_value):
|
||||||
raise
|
raise
|
||||||
for _ in range(current_valuedict_key - len(var_value)):
|
for _ in xrange(current_valuedict_key - len(var_value)):
|
||||||
var_value.append('0x00')
|
var_value.append('0x00')
|
||||||
var_value += valuedict[current_valuedict_key]
|
var_value += valuedict[current_valuedict_key]
|
||||||
return var_value
|
return var_value
|
||||||
@ -161,7 +161,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
default_data_array = ()
|
default_data_array = ()
|
||||||
for item in default_data_buffer:
|
for item in default_data_buffer:
|
||||||
default_data_array += unpack("B", bytes([item]))
|
default_data_array += unpack("B", item)
|
||||||
|
|
||||||
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
|
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
|
||||||
|
|
||||||
@ -179,7 +179,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
others_data_array = ()
|
others_data_array = ()
|
||||||
for item in others_data_buffer:
|
for item in others_data_buffer:
|
||||||
others_data_array += unpack("B", bytes([item]))
|
others_data_array += unpack("B", item)
|
||||||
|
|
||||||
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
|
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
|
||||||
|
|
||||||
@ -195,7 +195,7 @@ class VariableMgr(object):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
|
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
|
||||||
NvStoreDataBuffer = bytearray()
|
NvStoreDataBuffer = ""
|
||||||
var_data_offset = collections.OrderedDict()
|
var_data_offset = collections.OrderedDict()
|
||||||
offset = NvStorageHeaderSize
|
offset = NvStorageHeaderSize
|
||||||
for default_data, default_info in pcds_default_data.values():
|
for default_data, default_info in pcds_default_data.values():
|
||||||
@ -222,7 +222,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
|
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
|
||||||
|
|
||||||
data_delta_structure_buffer = bytearray()
|
data_delta_structure_buffer = ""
|
||||||
for skuname, defaultstore in var_data:
|
for skuname, defaultstore in var_data:
|
||||||
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
|
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
|
||||||
continue
|
continue
|
||||||
@ -254,7 +254,7 @@ class VariableMgr(object):
|
|||||||
def unpack_data(data):
|
def unpack_data(data):
|
||||||
final_data = ()
|
final_data = ()
|
||||||
for item in data:
|
for item in data:
|
||||||
final_data += unpack("B", bytes([item]))
|
final_data += unpack("B", item)
|
||||||
return final_data
|
return final_data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -322,7 +322,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
|
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
|
||||||
Buffer = bytearray()
|
Buffer = ""
|
||||||
data_len = 0
|
data_len = 0
|
||||||
if data_type == DataType.TAB_VOID:
|
if data_type == DataType.TAB_VOID:
|
||||||
for value_char in var_value.strip("{").strip("}").split(","):
|
for value_char in var_value.strip("{").strip("}").split(","):
|
||||||
@ -352,7 +352,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
|
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
|
||||||
Buffer = bytearray()
|
Buffer = ""
|
||||||
Buffer += pack("=L", 4+8+8)
|
Buffer += pack("=L", 4+8+8)
|
||||||
Buffer += pack("=Q", int(skuid))
|
Buffer += pack("=Q", int(skuid))
|
||||||
Buffer += pack("=Q", int(defaultstoragename))
|
Buffer += pack("=Q", int(defaultstoragename))
|
||||||
@ -377,7 +377,7 @@ class VariableMgr(object):
|
|||||||
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
|
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
|
||||||
skuid = self.GetSkuId(skuname)
|
skuid = self.GetSkuId(skuname)
|
||||||
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
|
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
|
||||||
Buffer = bytearray()
|
Buffer = ""
|
||||||
Buffer += pack("=L", 4+8+8)
|
Buffer += pack("=L", 4+8+8)
|
||||||
Buffer += pack("=Q", int(skuid))
|
Buffer += pack("=Q", int(skuid))
|
||||||
Buffer += pack("=Q", int(defaultstorageid))
|
Buffer += pack("=Q", int(defaultstorageid))
|
||||||
@ -400,7 +400,7 @@ class VariableMgr(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def PACK_VARIABLE_NAME(var_name):
|
def PACK_VARIABLE_NAME(var_name):
|
||||||
Buffer = bytearray()
|
Buffer = ""
|
||||||
for name_char in var_name.strip("{").strip("}").split(","):
|
for name_char in var_name.strip("{").strip("}").split(","):
|
||||||
Buffer += pack("=B", int(name_char, 16))
|
Buffer += pack("=B", int(name_char, 16))
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
from Common.BuildToolError import *
|
from Common.BuildToolError import *
|
||||||
from Common.StringUtils import GetLineNo
|
from Common.StringUtils import GetLineNo
|
||||||
|
@ -34,7 +34,7 @@ class InfSectionParser():
|
|||||||
SectionData = []
|
SectionData = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
FileLinesList = open(self._FilePath, "r").readlines()
|
FileLinesList = open(self._FilePath, "r", 0).readlines()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
|
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import re
|
import re
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
from Common.BuildToolError import *
|
from Common.BuildToolError import *
|
||||||
@ -122,8 +123,6 @@ def DecToHexList(Dec, Digit = 8):
|
|||||||
# @retval: A list for formatted hex string
|
# @retval: A list for formatted hex string
|
||||||
#
|
#
|
||||||
def AscToHexList(Ascii):
|
def AscToHexList(Ascii):
|
||||||
if isinstance(Ascii, bytes):
|
|
||||||
return ['0x{0:02X}'.format(Item) for Item in Ascii]
|
|
||||||
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
|
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
|
||||||
|
|
||||||
## Create content of .h file
|
## Create content of .h file
|
||||||
@ -552,9 +551,9 @@ def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, Ski
|
|||||||
#
|
#
|
||||||
# support ISO 639-2 codes in .UNI files of EDK Shell
|
# support ISO 639-2 codes in .UNI files of EDK Shell
|
||||||
#
|
#
|
||||||
Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), True, IncludePathList)
|
Uni = UniFileClassObject(sorted (UniFilList), True, IncludePathList)
|
||||||
else:
|
else:
|
||||||
Uni = UniFileClassObject(sorted (UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
|
Uni = UniFileClassObject(sorted (UniFilList), IsCompatibleMode, IncludePathList)
|
||||||
else:
|
else:
|
||||||
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
|
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
import Common.LongFilePathOs as os, codecs, re
|
import Common.LongFilePathOs as os, codecs, re
|
||||||
import distutils.util
|
import distutils.util
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
@ -45,6 +46,18 @@ BACK_SLASH_PLACEHOLDER = u'\u0006'
|
|||||||
|
|
||||||
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
|
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
|
||||||
|
|
||||||
|
## Convert a python unicode string to a normal string
|
||||||
|
#
|
||||||
|
# Convert a python unicode string to a normal string
|
||||||
|
# UniToStr(u'I am a string') is 'I am a string'
|
||||||
|
#
|
||||||
|
# @param Uni: The python unicode string
|
||||||
|
#
|
||||||
|
# @retval: The formatted normal string
|
||||||
|
#
|
||||||
|
def UniToStr(Uni):
|
||||||
|
return repr(Uni)[2:-1]
|
||||||
|
|
||||||
## Convert a unicode string to a Hex list
|
## Convert a unicode string to a Hex list
|
||||||
#
|
#
|
||||||
# Convert a unicode string to a Hex list
|
# Convert a unicode string to a Hex list
|
||||||
@ -426,7 +439,7 @@ class UniFileClassObject(object):
|
|||||||
if EndPos != -1 and EndPos - StartPos == 6 :
|
if EndPos != -1 and EndPos - StartPos == 6 :
|
||||||
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
|
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
|
||||||
EndStr = Line[EndPos: ]
|
EndStr = Line[EndPos: ]
|
||||||
UniStr = Line[StartPos + 2: EndPos]
|
UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')
|
||||||
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
|
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
|
||||||
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
|
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
|
||||||
Line = Line[0 : StartPos] + UniStr + EndStr
|
Line = Line[0 : StartPos] + UniStr + EndStr
|
||||||
|
@ -41,7 +41,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
|
|||||||
os.mkdir(dest)
|
os.mkdir(dest)
|
||||||
BinFileName = "PcdVarCheck.bin"
|
BinFileName = "PcdVarCheck.bin"
|
||||||
BinFilePath = os.path.join(dest, BinFileName)
|
BinFilePath = os.path.join(dest, BinFileName)
|
||||||
Buffer = bytearray()
|
Buffer = ''
|
||||||
index = 0
|
index = 0
|
||||||
for var_check_tab in self.var_check_info:
|
for var_check_tab in self.var_check_info:
|
||||||
index += 1
|
index += 1
|
||||||
@ -57,7 +57,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
|
|||||||
itemIndex += 1
|
itemIndex += 1
|
||||||
realLength += 5
|
realLength += 5
|
||||||
for v_data in item.data:
|
for v_data in item.data:
|
||||||
if isinstance(v_data, int):
|
if type(v_data) in (int, long):
|
||||||
realLength += item.StorageWidth
|
realLength += item.StorageWidth
|
||||||
else:
|
else:
|
||||||
realLength += item.StorageWidth
|
realLength += item.StorageWidth
|
||||||
@ -137,7 +137,7 @@ class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
|
|||||||
Buffer += b
|
Buffer += b
|
||||||
realLength += 1
|
realLength += 1
|
||||||
for v_data in item.data:
|
for v_data in item.data:
|
||||||
if isinstance(v_data, int):
|
if type(v_data) in (int, long):
|
||||||
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
|
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
|
||||||
Buffer += b
|
Buffer += b
|
||||||
realLength += item.StorageWidth
|
realLength += item.StorageWidth
|
||||||
@ -241,7 +241,7 @@ class VAR_CHECK_PCD_VALID_OBJ(object):
|
|||||||
|
|
||||||
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
|
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
|
||||||
def __init__(self, VarOffset, validlist, PcdDataType):
|
def __init__(self, VarOffset, validlist, PcdDataType):
|
||||||
super().__init__(VarOffset, validlist, PcdDataType)
|
super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
|
||||||
self.Type = 1
|
self.Type = 1
|
||||||
valid_num_list = []
|
valid_num_list = []
|
||||||
for item in self.rawdata:
|
for item in self.rawdata:
|
||||||
@ -261,7 +261,7 @@ class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
|
|||||||
|
|
||||||
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
|
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
|
||||||
def __init__(self, VarOffset, validrange, PcdDataType):
|
def __init__(self, VarOffset, validrange, PcdDataType):
|
||||||
super().__init__(VarOffset, validrange, PcdDataType)
|
super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
|
||||||
self.Type = 2
|
self.Type = 2
|
||||||
RangeExpr = ""
|
RangeExpr = ""
|
||||||
i = 0
|
i = 0
|
||||||
|
@ -20,6 +20,8 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import sys
|
import sys
|
||||||
import encodings.ascii
|
import encodings.ascii
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from . import StringTable as st
|
from . import StringTable as st
|
||||||
@ -185,7 +186,7 @@ class PcdEntry:
|
|||||||
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
|
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
|
||||||
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
|
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
|
||||||
try:
|
try:
|
||||||
self.PcdValue = pack('%ds' % Size, bytes(ValueString, 'utf-8'))
|
self.PcdValue = pack('%ds' % Size, ValueString)
|
||||||
except:
|
except:
|
||||||
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
|
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
|
||||||
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
|
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
|
||||||
@ -211,7 +212,7 @@ class PcdEntry:
|
|||||||
|
|
||||||
ReturnArray = array.array('B')
|
ReturnArray = array.array('B')
|
||||||
|
|
||||||
for Index in range(len(ValueList)):
|
for Index in xrange(len(ValueList)):
|
||||||
Value = None
|
Value = None
|
||||||
if ValueList[Index].lower().startswith('0x'):
|
if ValueList[Index].lower().startswith('0x'):
|
||||||
# translate hex value
|
# translate hex value
|
||||||
@ -237,7 +238,7 @@ class PcdEntry:
|
|||||||
|
|
||||||
ReturnArray.append(Value)
|
ReturnArray.append(Value)
|
||||||
|
|
||||||
for Index in range(len(ValueList), Size):
|
for Index in xrange(len(ValueList), Size):
|
||||||
ReturnArray.append(0)
|
ReturnArray.append(0)
|
||||||
|
|
||||||
self.PcdValue = ReturnArray.tolist()
|
self.PcdValue = ReturnArray.tolist()
|
||||||
@ -272,7 +273,7 @@ class PcdEntry:
|
|||||||
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
|
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
|
||||||
(Value, UnicodeString, self.FileName, self.Lineno))
|
(Value, UnicodeString, self.FileName, self.Lineno))
|
||||||
|
|
||||||
for Index in range(len(UnicodeString) * 2, Size):
|
for Index in xrange(len(UnicodeString) * 2, Size):
|
||||||
ReturnArray.append(0)
|
ReturnArray.append(0)
|
||||||
|
|
||||||
self.PcdValue = ReturnArray.tolist()
|
self.PcdValue = ReturnArray.tolist()
|
||||||
@ -305,7 +306,7 @@ class GenVPD :
|
|||||||
self.PcdFixedOffsetSizeList = []
|
self.PcdFixedOffsetSizeList = []
|
||||||
self.PcdUnknownOffsetList = []
|
self.PcdUnknownOffsetList = []
|
||||||
try:
|
try:
|
||||||
fInputfile = open(InputFileName, "r")
|
fInputfile = open(InputFileName, "r", 0)
|
||||||
try:
|
try:
|
||||||
self.FileLinesList = fInputfile.readlines()
|
self.FileLinesList = fInputfile.readlines()
|
||||||
except:
|
except:
|
||||||
@ -430,7 +431,7 @@ class GenVPD :
|
|||||||
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
|
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
|
||||||
else:
|
else:
|
||||||
if PCD.PcdOccupySize % Alignment != 0:
|
if PCD.PcdOccupySize % Alignment != 0:
|
||||||
PCD.PcdOccupySize = (PCD.PcdOccupySize // Alignment + 1) * Alignment
|
PCD.PcdOccupySize = (PCD.PcdOccupySize / Alignment + 1) * Alignment
|
||||||
|
|
||||||
PackSize = PCD.PcdOccupySize
|
PackSize = PCD.PcdOccupySize
|
||||||
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
|
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
|
||||||
@ -508,7 +509,7 @@ class GenVPD :
|
|||||||
NowOffset = 0
|
NowOffset = 0
|
||||||
for Pcd in self.PcdUnknownOffsetList :
|
for Pcd in self.PcdUnknownOffsetList :
|
||||||
if NowOffset % Pcd.Alignment != 0:
|
if NowOffset % Pcd.Alignment != 0:
|
||||||
NowOffset = (NowOffset // Pcd.Alignment + 1) * Pcd.Alignment
|
NowOffset = (NowOffset/ Pcd.Alignment + 1) * Pcd.Alignment
|
||||||
Pcd.PcdBinOffset = NowOffset
|
Pcd.PcdBinOffset = NowOffset
|
||||||
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
|
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
|
||||||
NowOffset += Pcd.PcdOccupySize
|
NowOffset += Pcd.PcdOccupySize
|
||||||
@ -572,7 +573,7 @@ class GenVPD :
|
|||||||
# Not been fixed
|
# Not been fixed
|
||||||
if eachUnfixedPcd.PcdOffset == '*' :
|
if eachUnfixedPcd.PcdOffset == '*' :
|
||||||
if LastOffset % eachUnfixedPcd.Alignment != 0:
|
if LastOffset % eachUnfixedPcd.Alignment != 0:
|
||||||
LastOffset = (LastOffset // eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
|
LastOffset = (LastOffset / eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
|
||||||
# The offset un-fixed pcd can write into this free space
|
# The offset un-fixed pcd can write into this free space
|
||||||
if needFixPcdSize <= (NowOffset - LastOffset) :
|
if needFixPcdSize <= (NowOffset - LastOffset) :
|
||||||
# Change the offset value of un-fixed pcd
|
# Change the offset value of un-fixed pcd
|
||||||
@ -626,7 +627,7 @@ class GenVPD :
|
|||||||
|
|
||||||
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
|
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
|
||||||
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
|
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
|
||||||
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset // NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
|
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset / NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
|
||||||
|
|
||||||
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
|
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
|
||||||
|
|
||||||
@ -650,13 +651,13 @@ class GenVPD :
|
|||||||
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
|
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
|
||||||
|
|
||||||
try :
|
try :
|
||||||
fMapFile = open(MapFileName, "w")
|
fMapFile = open(MapFileName, "w", 0)
|
||||||
except:
|
except:
|
||||||
# Open failed
|
# Open failed
|
||||||
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
|
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
|
||||||
|
|
||||||
# Use a instance of BytesIO to cache data
|
# Use a instance of BytesIO to cache data
|
||||||
fStringIO = BytesIO()
|
fStringIO = BytesIO('')
|
||||||
|
|
||||||
# Write the header of map file.
|
# Write the header of map file.
|
||||||
try :
|
try :
|
||||||
@ -674,7 +675,8 @@ class GenVPD :
|
|||||||
# Write Vpd binary file
|
# Write Vpd binary file
|
||||||
fStringIO.seek (eachPcd.PcdBinOffset)
|
fStringIO.seek (eachPcd.PcdBinOffset)
|
||||||
if isinstance(eachPcd.PcdValue, list):
|
if isinstance(eachPcd.PcdValue, list):
|
||||||
fStringIO.write(bytes(eachPcd.PcdValue))
|
ValueList = [chr(Item) for Item in eachPcd.PcdValue]
|
||||||
|
fStringIO.write(''.join(ValueList))
|
||||||
else:
|
else:
|
||||||
fStringIO.write (eachPcd.PcdValue)
|
fStringIO.write (eachPcd.PcdValue)
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
## Import modules
|
## Import modules
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os, sys, logging
|
import Common.LongFilePathOs as os, sys, logging
|
||||||
import traceback
|
import traceback
|
||||||
from .BuildToolError import *
|
from .BuildToolError import *
|
||||||
|
@ -12,6 +12,8 @@
|
|||||||
|
|
||||||
## Import Modules
|
## Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
from Common.GlobalData import *
|
from Common.GlobalData import *
|
||||||
from CommonDataClass.Exceptions import BadExpression
|
from CommonDataClass.Exceptions import BadExpression
|
||||||
from CommonDataClass.Exceptions import WrnExpression
|
from CommonDataClass.Exceptions import WrnExpression
|
||||||
@ -204,7 +206,7 @@ SupportedInMacroList = ['TARGET', 'TOOL_CHAIN_TAG', 'ARCH', 'FAMILY']
|
|||||||
|
|
||||||
class BaseExpression(object):
|
class BaseExpression(object):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__()
|
super(BaseExpression, self).__init__()
|
||||||
|
|
||||||
# Check if current token matches the operators given from parameter
|
# Check if current token matches the operators given from parameter
|
||||||
def _IsOperator(self, OpSet):
|
def _IsOperator(self, OpSet):
|
||||||
@ -324,7 +326,7 @@ class ValueExpression(BaseExpression):
|
|||||||
return Val
|
return Val
|
||||||
|
|
||||||
def __init__(self, Expression, SymbolTable={}):
|
def __init__(self, Expression, SymbolTable={}):
|
||||||
super().__init__(self, Expression, SymbolTable)
|
super(ValueExpression, self).__init__(self, Expression, SymbolTable)
|
||||||
self._NoProcess = False
|
self._NoProcess = False
|
||||||
if not isinstance(Expression, type('')):
|
if not isinstance(Expression, type('')):
|
||||||
self._Expr = Expression
|
self._Expr = Expression
|
||||||
@ -425,13 +427,6 @@ class ValueExpression(BaseExpression):
|
|||||||
else:
|
else:
|
||||||
Val = Val3
|
Val = Val3
|
||||||
continue
|
continue
|
||||||
#
|
|
||||||
# PEP 238 -- Changing the Division Operator
|
|
||||||
# x/y to return a reasonable approximation of the mathematical result of the division ("true division")
|
|
||||||
# x//y to return the floor ("floor division")
|
|
||||||
#
|
|
||||||
if Op == '/':
|
|
||||||
Op = '//'
|
|
||||||
try:
|
try:
|
||||||
Val = self.Eval(Op, Val, EvalFunc())
|
Val = self.Eval(Op, Val, EvalFunc())
|
||||||
except WrnExpression as Warn:
|
except WrnExpression as Warn:
|
||||||
@ -905,7 +900,7 @@ class ValueExpressionEx(ValueExpression):
|
|||||||
if TmpValue.bit_length() == 0:
|
if TmpValue.bit_length() == 0:
|
||||||
PcdValue = '{0x00}'
|
PcdValue = '{0x00}'
|
||||||
else:
|
else:
|
||||||
for I in range((TmpValue.bit_length() + 7) // 8):
|
for I in range((TmpValue.bit_length() + 7) / 8):
|
||||||
TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
|
TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
|
||||||
PcdValue = '{' + ', '.join(TmpList) + '}'
|
PcdValue = '{' + ', '.join(TmpList) + '}'
|
||||||
except:
|
except:
|
||||||
@ -1033,7 +1028,7 @@ class ValueExpressionEx(ValueExpression):
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
pass
|
pass
|
||||||
while True:
|
while True:
|
||||||
input = input('Input expr: ')
|
input = raw_input('Input expr: ')
|
||||||
if input in 'qQ':
|
if input in 'qQ':
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
|
@ -11,9 +11,11 @@
|
|||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
import os
|
import os
|
||||||
from . import LongFilePathOsPath
|
from . import LongFilePathOsPath
|
||||||
from Common.LongFilePathSupport import LongFilePath
|
from Common.LongFilePathSupport import LongFilePath
|
||||||
|
from Common.LongFilePathSupport import UniToStr
|
||||||
import time
|
import time
|
||||||
|
|
||||||
path = LongFilePathOsPath
|
path = LongFilePathOsPath
|
||||||
@ -62,7 +64,7 @@ def listdir(path):
|
|||||||
List = []
|
List = []
|
||||||
uList = os.listdir(u"%s" % LongFilePath(path))
|
uList = os.listdir(u"%s" % LongFilePath(path))
|
||||||
for Item in uList:
|
for Item in uList:
|
||||||
List.append(Item)
|
List.append(UniToStr(Item))
|
||||||
return List
|
return List
|
||||||
|
|
||||||
environ = os.environ
|
environ = os.environ
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
## @file
|
## @file
|
||||||
# Override built in function file.open to provide support for long file path
|
# Override built in function file.open to provide support for long file path
|
||||||
#
|
#
|
||||||
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
|
# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
|
||||||
# This program and the accompanying materials
|
# This program and the accompanying materials
|
||||||
# are licensed and made available under the terms and conditions of the BSD License
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
# which accompanies this distribution. The full text of the license may be found at
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
@ -49,3 +49,15 @@ def CopyLongFilePath(src, dst):
|
|||||||
with open(LongFilePath(src), 'rb') as fsrc:
|
with open(LongFilePath(src), 'rb') as fsrc:
|
||||||
with open(LongFilePath(dst), 'wb') as fdst:
|
with open(LongFilePath(dst), 'wb') as fdst:
|
||||||
shutil.copyfileobj(fsrc, fdst)
|
shutil.copyfileobj(fsrc, fdst)
|
||||||
|
|
||||||
|
## Convert a python unicode string to a normal string
|
||||||
|
#
|
||||||
|
# Convert a python unicode string to a normal string
|
||||||
|
# UniToStr(u'I am a string') is 'I am a string'
|
||||||
|
#
|
||||||
|
# @param Uni: The python unicode string
|
||||||
|
#
|
||||||
|
# @retval: The formatted normal string
|
||||||
|
#
|
||||||
|
def UniToStr(Uni):
|
||||||
|
return repr(Uni)[2:-1]
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import sys
|
import sys
|
||||||
import string
|
import string
|
||||||
@ -24,8 +25,8 @@ import pickle
|
|||||||
import array
|
import array
|
||||||
import shutil
|
import shutil
|
||||||
from struct import pack
|
from struct import pack
|
||||||
from collections import UserDict as IterableUserDict
|
from UserDict import IterableUserDict
|
||||||
from collections import OrderedDict
|
from UserList import UserList
|
||||||
|
|
||||||
from Common import EdkLogger as EdkLogger
|
from Common import EdkLogger as EdkLogger
|
||||||
from Common import GlobalData as GlobalData
|
from Common import GlobalData as GlobalData
|
||||||
@ -454,15 +455,12 @@ def RemoveDirectory(Directory, Recursively=False):
|
|||||||
# @retval False If the file content is the same
|
# @retval False If the file content is the same
|
||||||
#
|
#
|
||||||
def SaveFileOnChange(File, Content, IsBinaryFile=True):
|
def SaveFileOnChange(File, Content, IsBinaryFile=True):
|
||||||
|
if not IsBinaryFile:
|
||||||
|
Content = Content.replace("\n", os.linesep)
|
||||||
|
|
||||||
if os.path.exists(File):
|
if os.path.exists(File):
|
||||||
try:
|
try:
|
||||||
if isinstance(Content, bytes):
|
if Content == open(File, "rb").read():
|
||||||
with open(File, "rb") as f:
|
|
||||||
if Content == f.read():
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
with open(File, "r") as f:
|
|
||||||
if Content == f.read():
|
|
||||||
return False
|
return False
|
||||||
except:
|
except:
|
||||||
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
|
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
|
||||||
@ -477,12 +475,19 @@ def SaveFileOnChange(File, Content, IsBinaryFile=True):
|
|||||||
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
|
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(Content, bytes):
|
if GlobalData.gIsWindows:
|
||||||
with open(File, "wb") as Fd:
|
try:
|
||||||
|
from .PyUtility import SaveFileToDisk
|
||||||
|
if not SaveFileToDisk(File, Content):
|
||||||
|
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File)
|
||||||
|
except:
|
||||||
|
Fd = open(File, "wb")
|
||||||
Fd.write(Content)
|
Fd.write(Content)
|
||||||
|
Fd.close()
|
||||||
else:
|
else:
|
||||||
with open(File, "w") as Fd:
|
Fd = open(File, "wb")
|
||||||
Fd.write(Content)
|
Fd.write(Content)
|
||||||
|
Fd.close()
|
||||||
except IOError as X:
|
except IOError as X:
|
||||||
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
|
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
|
||||||
|
|
||||||
@ -641,7 +646,7 @@ def RealPath2(File, Dir='', OverrideDir=''):
|
|||||||
#
|
#
|
||||||
def GuidValue(CName, PackageList, Inffile = None):
|
def GuidValue(CName, PackageList, Inffile = None):
|
||||||
for P in PackageList:
|
for P in PackageList:
|
||||||
GuidKeys = list(P.Guids.keys())
|
GuidKeys = P.Guids.keys()
|
||||||
if Inffile and P._PrivateGuids:
|
if Inffile and P._PrivateGuids:
|
||||||
if not Inffile.startswith(P.MetaFile.Dir):
|
if not Inffile.startswith(P.MetaFile.Dir):
|
||||||
GuidKeys = [x for x in P.Guids if x not in P._PrivateGuids]
|
GuidKeys = [x for x in P.Guids if x not in P._PrivateGuids]
|
||||||
@ -660,7 +665,7 @@ def GuidValue(CName, PackageList, Inffile = None):
|
|||||||
#
|
#
|
||||||
def ProtocolValue(CName, PackageList, Inffile = None):
|
def ProtocolValue(CName, PackageList, Inffile = None):
|
||||||
for P in PackageList:
|
for P in PackageList:
|
||||||
ProtocolKeys = list(P.Protocols.keys())
|
ProtocolKeys = P.Protocols.keys()
|
||||||
if Inffile and P._PrivateProtocols:
|
if Inffile and P._PrivateProtocols:
|
||||||
if not Inffile.startswith(P.MetaFile.Dir):
|
if not Inffile.startswith(P.MetaFile.Dir):
|
||||||
ProtocolKeys = [x for x in P.Protocols if x not in P._PrivateProtocols]
|
ProtocolKeys = [x for x in P.Protocols if x not in P._PrivateProtocols]
|
||||||
@ -679,7 +684,7 @@ def ProtocolValue(CName, PackageList, Inffile = None):
|
|||||||
#
|
#
|
||||||
def PpiValue(CName, PackageList, Inffile = None):
|
def PpiValue(CName, PackageList, Inffile = None):
|
||||||
for P in PackageList:
|
for P in PackageList:
|
||||||
PpiKeys = list(P.Ppis.keys())
|
PpiKeys = P.Ppis.keys()
|
||||||
if Inffile and P._PrivatePpis:
|
if Inffile and P._PrivatePpis:
|
||||||
if not Inffile.startswith(P.MetaFile.Dir):
|
if not Inffile.startswith(P.MetaFile.Dir):
|
||||||
PpiKeys = [x for x in P.Ppis if x not in P._PrivatePpis]
|
PpiKeys = [x for x in P.Ppis if x not in P._PrivatePpis]
|
||||||
@ -975,7 +980,7 @@ class sdict(IterableUserDict):
|
|||||||
|
|
||||||
## append support
|
## append support
|
||||||
def append(self, sdict):
|
def append(self, sdict):
|
||||||
for key in sdict.keys():
|
for key in sdict:
|
||||||
if key not in self._key_list:
|
if key not in self._key_list:
|
||||||
self._key_list.append(key)
|
self._key_list.append(key)
|
||||||
IterableUserDict.__setitem__(self, key, sdict[key])
|
IterableUserDict.__setitem__(self, key, sdict[key])
|
||||||
@ -1015,11 +1020,11 @@ class sdict(IterableUserDict):
|
|||||||
|
|
||||||
## Keys interation support
|
## Keys interation support
|
||||||
def iterkeys(self):
|
def iterkeys(self):
|
||||||
return self.keys()
|
return iter(self.keys())
|
||||||
|
|
||||||
## Values interation support
|
## Values interation support
|
||||||
def itervalues(self):
|
def itervalues(self):
|
||||||
return self.values()
|
return iter(self.values())
|
||||||
|
|
||||||
## Return value related to a key, and remove the (key, value) from the dict
|
## Return value related to a key, and remove the (key, value) from the dict
|
||||||
def pop(self, key, *dv):
|
def pop(self, key, *dv):
|
||||||
@ -1028,7 +1033,7 @@ class sdict(IterableUserDict):
|
|||||||
value = self[key]
|
value = self[key]
|
||||||
self.__delitem__(key)
|
self.__delitem__(key)
|
||||||
elif len(dv) != 0 :
|
elif len(dv) != 0 :
|
||||||
value = dv[0]
|
value = kv[0]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
## Return (key, value) pair, and remove the (key, value) from the dict
|
## Return (key, value) pair, and remove the (key, value) from the dict
|
||||||
@ -1292,12 +1297,12 @@ def ParseDevPathValue (Value):
|
|||||||
if err:
|
if err:
|
||||||
raise BadExpression("DevicePath: %s" % str(err))
|
raise BadExpression("DevicePath: %s" % str(err))
|
||||||
Size = len(out.split())
|
Size = len(out.split())
|
||||||
out = ','.join(out.decode(encoding='utf-8', errors='ignore').split())
|
out = ','.join(out.split())
|
||||||
return '{' + out + '}', Size
|
return '{' + out + '}', Size
|
||||||
|
|
||||||
def ParseFieldValue (Value):
|
def ParseFieldValue (Value):
|
||||||
if isinstance(Value, type(0)):
|
if isinstance(Value, type(0)):
|
||||||
return Value, (Value.bit_length() + 7) // 8
|
return Value, (Value.bit_length() + 7) / 8
|
||||||
if not isinstance(Value, type('')):
|
if not isinstance(Value, type('')):
|
||||||
raise BadExpression('Type %s is %s' %(Value, type(Value)))
|
raise BadExpression('Type %s is %s' %(Value, type(Value)))
|
||||||
Value = Value.strip()
|
Value = Value.strip()
|
||||||
@ -1331,7 +1336,7 @@ def ParseFieldValue (Value):
|
|||||||
if Value[0] == '"' and Value[-1] == '"':
|
if Value[0] == '"' and Value[-1] == '"':
|
||||||
Value = Value[1:-1]
|
Value = Value[1:-1]
|
||||||
try:
|
try:
|
||||||
Value = "{" + ','.join([str(i) for i in uuid.UUID(Value).bytes_le]) + "}"
|
Value = "'" + uuid.UUID(Value).get_bytes_le() + "'"
|
||||||
except ValueError as Message:
|
except ValueError as Message:
|
||||||
raise BadExpression(Message)
|
raise BadExpression(Message)
|
||||||
Value, Size = ParseFieldValue(Value)
|
Value, Size = ParseFieldValue(Value)
|
||||||
@ -1418,12 +1423,12 @@ def ParseFieldValue (Value):
|
|||||||
raise BadExpression("invalid hex value: %s" % Value)
|
raise BadExpression("invalid hex value: %s" % Value)
|
||||||
if Value == 0:
|
if Value == 0:
|
||||||
return 0, 1
|
return 0, 1
|
||||||
return Value, (Value.bit_length() + 7) // 8
|
return Value, (Value.bit_length() + 7) / 8
|
||||||
if Value[0].isdigit():
|
if Value[0].isdigit():
|
||||||
Value = int(Value, 10)
|
Value = int(Value, 10)
|
||||||
if Value == 0:
|
if Value == 0:
|
||||||
return 0, 1
|
return 0, 1
|
||||||
return Value, (Value.bit_length() + 7) // 8
|
return Value, (Value.bit_length() + 7) / 8
|
||||||
if Value.lower() == 'true':
|
if Value.lower() == 'true':
|
||||||
return 1, 1
|
return 1, 1
|
||||||
if Value.lower() == 'false':
|
if Value.lower() == 'false':
|
||||||
@ -1584,19 +1589,15 @@ def CheckPcdDatum(Type, Value):
|
|||||||
return False, "Invalid value [%s] of type [%s]; must be one of TRUE, True, true, 0x1, 0x01, 1"\
|
return False, "Invalid value [%s] of type [%s]; must be one of TRUE, True, true, 0x1, 0x01, 1"\
|
||||||
", FALSE, False, false, 0x0, 0x00, 0" % (Value, Type)
|
", FALSE, False, false, 0x0, 0x00, 0" % (Value, Type)
|
||||||
elif Type in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64]:
|
elif Type in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64]:
|
||||||
try:
|
if Value and int(Value, 0) < 0:
|
||||||
Val = int(Value, 0)
|
|
||||||
except:
|
|
||||||
try:
|
|
||||||
Val = int(Value.lstrip('0'))
|
|
||||||
except:
|
|
||||||
return False, "Invalid value [%s] of type [%s];" \
|
|
||||||
" must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
|
|
||||||
if Val > MAX_VAL_TYPE[Type]:
|
|
||||||
return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
|
|
||||||
if Val < 0:
|
|
||||||
return False, "PCD can't be set to negative value[%s] for datum type [%s]" % (Value, Type)
|
return False, "PCD can't be set to negative value[%s] for datum type [%s]" % (Value, Type)
|
||||||
|
try:
|
||||||
|
Value = long(Value, 0)
|
||||||
|
if Value > MAX_VAL_TYPE[Type]:
|
||||||
|
return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
|
||||||
|
except:
|
||||||
|
return False, "Invalid value [%s] of type [%s];"\
|
||||||
|
" must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
|
||||||
else:
|
else:
|
||||||
return True, "StructurePcd"
|
return True, "StructurePcd"
|
||||||
|
|
||||||
@ -1634,7 +1635,7 @@ def SplitOption(OptionString):
|
|||||||
def CommonPath(PathList):
|
def CommonPath(PathList):
|
||||||
P1 = min(PathList).split(os.path.sep)
|
P1 = min(PathList).split(os.path.sep)
|
||||||
P2 = max(PathList).split(os.path.sep)
|
P2 = max(PathList).split(os.path.sep)
|
||||||
for Index in range(min(len(P1), len(P2))):
|
for Index in xrange(min(len(P1), len(P2))):
|
||||||
if P1[Index] != P2[Index]:
|
if P1[Index] != P2[Index]:
|
||||||
return os.path.sep.join(P1[:Index])
|
return os.path.sep.join(P1[:Index])
|
||||||
return os.path.sep.join(P1)
|
return os.path.sep.join(P1)
|
||||||
@ -1859,7 +1860,7 @@ class PeImageClass():
|
|||||||
ByteArray = array.array('B')
|
ByteArray = array.array('B')
|
||||||
ByteArray.fromfile(PeObject, 4)
|
ByteArray.fromfile(PeObject, 4)
|
||||||
# PE signature should be 'PE\0\0'
|
# PE signature should be 'PE\0\0'
|
||||||
if ByteArray.tostring() != b'PE\0\0':
|
if ByteArray.tostring() != 'PE\0\0':
|
||||||
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
|
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1951,7 +1952,7 @@ class SkuClass():
|
|||||||
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
|
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
|
||||||
% (SkuName, SkuId))
|
% (SkuName, SkuId))
|
||||||
|
|
||||||
self.AvailableSkuIds = OrderedDict()
|
self.AvailableSkuIds = sdict()
|
||||||
self.SkuIdSet = []
|
self.SkuIdSet = []
|
||||||
self.SkuIdNumberSet = []
|
self.SkuIdNumberSet = []
|
||||||
self.SkuData = SkuIds
|
self.SkuData = SkuIds
|
||||||
@ -1961,7 +1962,7 @@ class SkuClass():
|
|||||||
self.SkuIdSet = ['DEFAULT']
|
self.SkuIdSet = ['DEFAULT']
|
||||||
self.SkuIdNumberSet = ['0U']
|
self.SkuIdNumberSet = ['0U']
|
||||||
elif SkuIdentifier == 'ALL':
|
elif SkuIdentifier == 'ALL':
|
||||||
self.SkuIdSet = list(SkuIds.keys())
|
self.SkuIdSet = SkuIds.keys()
|
||||||
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
|
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
|
||||||
else:
|
else:
|
||||||
r = SkuIdentifier.split('|')
|
r = SkuIdentifier.split('|')
|
||||||
@ -2081,7 +2082,7 @@ def PackRegistryFormatGuid(Guid):
|
|||||||
# @retval Value The integer value that the input represents
|
# @retval Value The integer value that the input represents
|
||||||
#
|
#
|
||||||
def GetIntegerValue(Input):
|
def GetIntegerValue(Input):
|
||||||
if isinstance(Input, int):
|
if type(Input) in (int, long):
|
||||||
return Input
|
return Input
|
||||||
String = Input
|
String = Input
|
||||||
if String.endswith("U"):
|
if String.endswith("U"):
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
from .StringUtils import *
|
from .StringUtils import *
|
||||||
from CommonDataClass.DataClass import *
|
from CommonDataClass.DataClass import *
|
||||||
from .DataType import *
|
from .DataType import *
|
||||||
|
BIN
BaseTools/Source/Python/Common/PyUtility.pyd
Normal file
BIN
BaseTools/Source/Python/Common/PyUtility.pyd
Normal file
Binary file not shown.
@ -12,6 +12,7 @@
|
|||||||
|
|
||||||
# # Import Modules
|
# # Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
from Common.GlobalData import *
|
from Common.GlobalData import *
|
||||||
from CommonDataClass.Exceptions import BadExpression
|
from CommonDataClass.Exceptions import BadExpression
|
||||||
from CommonDataClass.Exceptions import WrnExpression
|
from CommonDataClass.Exceptions import WrnExpression
|
||||||
@ -347,7 +348,7 @@ class RangeExpression(BaseExpression):
|
|||||||
|
|
||||||
|
|
||||||
def __init__(self, Expression, PcdDataType, SymbolTable = {}):
|
def __init__(self, Expression, PcdDataType, SymbolTable = {}):
|
||||||
super().__init__(self, Expression, PcdDataType, SymbolTable)
|
super(RangeExpression, self).__init__(self, Expression, PcdDataType, SymbolTable)
|
||||||
self._NoProcess = False
|
self._NoProcess = False
|
||||||
if not isinstance(Expression, type('')):
|
if not isinstance(Expression, type('')):
|
||||||
self._Expr = Expression
|
self._Expr = Expression
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import re
|
import re
|
||||||
from . import DataType
|
from . import DataType
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
@ -98,7 +99,7 @@ def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
|
|||||||
# @retval list() A list for splitted string
|
# @retval list() A list for splitted string
|
||||||
#
|
#
|
||||||
def GetSplitList(String, SplitStr=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
|
def GetSplitList(String, SplitStr=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
|
||||||
return list(map(lambda l: l.strip(), String.split(SplitStr, MaxSplit)))
|
return map(lambda l: l.strip(), String.split(SplitStr, MaxSplit))
|
||||||
|
|
||||||
## MergeArches
|
## MergeArches
|
||||||
#
|
#
|
||||||
@ -544,7 +545,7 @@ def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCh
|
|||||||
#
|
#
|
||||||
LineList[1] = CleanString(LineList[1], CommentCharacter)
|
LineList[1] = CleanString(LineList[1], CommentCharacter)
|
||||||
if ValueSplitFlag:
|
if ValueSplitFlag:
|
||||||
Value = list(map(string.strip, LineList[1].split(ValueSplitCharacter)))
|
Value = map(string.strip, LineList[1].split(ValueSplitCharacter))
|
||||||
else:
|
else:
|
||||||
Value = CleanString(LineList[1], CommentCharacter).splitlines()
|
Value = CleanString(LineList[1], CommentCharacter).splitlines()
|
||||||
|
|
||||||
@ -612,7 +613,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
|
|||||||
#
|
#
|
||||||
# Regenerate FileContent
|
# Regenerate FileContent
|
||||||
#
|
#
|
||||||
NewFileContent = NewFileContent + Line + '\n'
|
NewFileContent = NewFileContent + Line + '\r\n'
|
||||||
|
|
||||||
if IsFailed:
|
if IsFailed:
|
||||||
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
|
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
|
||||||
@ -750,7 +751,7 @@ def SplitString(String):
|
|||||||
# @param StringList: A list for strings to be converted
|
# @param StringList: A list for strings to be converted
|
||||||
#
|
#
|
||||||
def ConvertToSqlString(StringList):
|
def ConvertToSqlString(StringList):
|
||||||
return list(map(lambda s: s.replace("'", "''"), StringList))
|
return map(lambda s: s.replace("'", "''"), StringList)
|
||||||
|
|
||||||
## Convert To Sql String
|
## Convert To Sql String
|
||||||
#
|
#
|
||||||
@ -815,7 +816,11 @@ def GetHelpTextList(HelpTextClassList):
|
|||||||
return List
|
return List
|
||||||
|
|
||||||
def StringToArray(String):
|
def StringToArray(String):
|
||||||
if String.startswith('L"'):
|
if isinstance(String, unicode):
|
||||||
|
if len(unicode) == 0:
|
||||||
|
return "{0x00,0x00}"
|
||||||
|
return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String)
|
||||||
|
elif String.startswith('L"'):
|
||||||
if String == "L\"\"":
|
if String == "L\"\"":
|
||||||
return "{0x00,0x00}"
|
return "{0x00,0x00}"
|
||||||
else:
|
else:
|
||||||
@ -838,7 +843,9 @@ def StringToArray(String):
|
|||||||
return '{%s,0,0}' % ','.join(String.split())
|
return '{%s,0,0}' % ','.join(String.split())
|
||||||
|
|
||||||
def StringArrayLength(String):
|
def StringArrayLength(String):
|
||||||
if String.startswith('L"'):
|
if isinstance(String, unicode):
|
||||||
|
return (len(String) + 1) * 2 + 1;
|
||||||
|
elif String.startswith('L"'):
|
||||||
return (len(String) - 3 + 1) * 2
|
return (len(String) - 3 + 1) * 2
|
||||||
elif String.startswith('"'):
|
elif String.startswith('"'):
|
||||||
return (len(String) - 2 + 1)
|
return (len(String) - 2 + 1)
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
from . import EdkLogger
|
from . import EdkLogger
|
||||||
from . import DataType
|
from . import DataType
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import re
|
import re
|
||||||
from . import EdkLogger
|
from . import EdkLogger
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
||||||
#
|
#
|
||||||
|
from __future__ import print_function
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import re
|
import re
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
@ -91,18 +92,18 @@ class VpdInfoFile:
|
|||||||
if (Vpd is None):
|
if (Vpd is None):
|
||||||
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
|
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
|
||||||
|
|
||||||
if not (Offset >= "0" or Offset == "*"):
|
if not (Offset >= 0 or Offset == "*"):
|
||||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
|
||||||
|
|
||||||
if Vpd.DatumType == TAB_VOID:
|
if Vpd.DatumType == TAB_VOID:
|
||||||
if Vpd.MaxDatumSize <= "0":
|
if Vpd.MaxDatumSize <= 0:
|
||||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||||
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||||
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
|
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
|
||||||
if not Vpd.MaxDatumSize:
|
if not Vpd.MaxDatumSize:
|
||||||
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
|
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
|
||||||
else:
|
else:
|
||||||
if Vpd.MaxDatumSize <= "0":
|
if Vpd.MaxDatumSize <= 0:
|
||||||
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
|
||||||
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
|
||||||
|
|
||||||
@ -126,7 +127,7 @@ class VpdInfoFile:
|
|||||||
"Invalid parameter FilePath: %s." % FilePath)
|
"Invalid parameter FilePath: %s." % FilePath)
|
||||||
|
|
||||||
Content = FILE_COMMENT_TEMPLATE
|
Content = FILE_COMMENT_TEMPLATE
|
||||||
Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
|
Pcds = sorted(self._VpdArray.keys())
|
||||||
for Pcd in Pcds:
|
for Pcd in Pcds:
|
||||||
i = 0
|
i = 0
|
||||||
PcdTokenCName = Pcd.TokenCName
|
PcdTokenCName = Pcd.TokenCName
|
||||||
@ -248,7 +249,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
|
|||||||
except Exception as X:
|
except Exception as X:
|
||||||
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
|
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
|
||||||
(out, error) = PopenObject.communicate()
|
(out, error) = PopenObject.communicate()
|
||||||
print(out.decode(encoding='utf-8', errors='ignore'))
|
print(out)
|
||||||
while PopenObject.returncode is None :
|
while PopenObject.returncode is None :
|
||||||
PopenObject.wait()
|
PopenObject.wait()
|
||||||
|
|
||||||
|
@ -1,636 +0,0 @@
|
|||||||
/* @file
|
|
||||||
This file is used to be the grammar file of ECC tool
|
|
||||||
|
|
||||||
Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
|
|
||||||
This program and the accompanying materials
|
|
||||||
are licensed and made available under the terms and conditions of the BSD License
|
|
||||||
which accompanies this distribution. The full text of the license may be found at
|
|
||||||
http://opensource.org/licenses/bsd-license.php
|
|
||||||
|
|
||||||
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
grammar C;
|
|
||||||
options {
|
|
||||||
language=Python;
|
|
||||||
}
|
|
||||||
|
|
||||||
@header {
|
|
||||||
## @file
|
|
||||||
# The file defines the parser for C source files.
|
|
||||||
#
|
|
||||||
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
|
|
||||||
# This file is generated by running:
|
|
||||||
# java org.antlr.Tool C.g
|
|
||||||
#
|
|
||||||
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
|
|
||||||
#
|
|
||||||
# This program and the accompanying materials are licensed and made available
|
|
||||||
# under the terms and conditions of the BSD License which accompanies this
|
|
||||||
# distribution. The full text of the license may be found at:
|
|
||||||
# http://opensource.org/licenses/bsd-license.php
|
|
||||||
#
|
|
||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
||||||
#
|
|
||||||
##
|
|
||||||
|
|
||||||
import Ecc.CodeFragment as CodeFragment
|
|
||||||
import Ecc.FileProfile as FileProfile
|
|
||||||
}
|
|
||||||
|
|
||||||
@members {
|
|
||||||
|
|
||||||
def printTokenInfo(self, line, offset, tokenText):
|
|
||||||
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
|
|
||||||
|
|
||||||
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
|
|
||||||
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.PredicateExpressionList.append(PredExp)
|
|
||||||
|
|
||||||
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
|
|
||||||
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.EnumerationDefinitionList.append(EnumDef)
|
|
||||||
|
|
||||||
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
|
|
||||||
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.StructUnionDefinitionList.append(SUDef)
|
|
||||||
|
|
||||||
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
|
|
||||||
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.TypedefDefinitionList.append(Tdef)
|
|
||||||
|
|
||||||
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
|
|
||||||
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
|
|
||||||
FileProfile.FunctionDefinitionList.append(FuncDef)
|
|
||||||
|
|
||||||
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
|
|
||||||
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.VariableDeclarationList.append(VarDecl)
|
|
||||||
|
|
||||||
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
|
|
||||||
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
|
|
||||||
FileProfile.FunctionCallingList.append(FuncCall)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
translation_unit
|
|
||||||
: external_declaration*
|
|
||||||
;
|
|
||||||
|
|
||||||
|
|
||||||
external_declaration
|
|
||||||
: ( declaration_specifiers? declarator declaration* '{' )
|
|
||||||
| function_definition
|
|
||||||
| declaration
|
|
||||||
| macro_statement (';')?
|
|
||||||
;
|
|
||||||
|
|
||||||
function_definition
|
|
||||||
locals [String ModifierText = '', String DeclText = '', int LBLine = 0, int LBOffset = 0, int DeclLine = 0, int DeclOffset = 0]
|
|
||||||
@init {
|
|
||||||
ModifierText = '';
|
|
||||||
DeclText = '';
|
|
||||||
LBLine = 0;
|
|
||||||
LBOffset = 0;
|
|
||||||
DeclLine = 0;
|
|
||||||
DeclOffset = 0;
|
|
||||||
}
|
|
||||||
@after{
|
|
||||||
self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
|
|
||||||
}
|
|
||||||
: d=declaration_specifiers? declarator
|
|
||||||
( declaration+ a=compound_statement // K&R style
|
|
||||||
| b=compound_statement // ANSI style
|
|
||||||
) {
|
|
||||||
if localctx.d != None:
|
|
||||||
ModifierText = $declaration_specifiers.text
|
|
||||||
else:
|
|
||||||
ModifierText = ''
|
|
||||||
DeclText = $declarator.text
|
|
||||||
DeclLine = $declarator.start.line
|
|
||||||
DeclOffset = $declarator.start.column
|
|
||||||
if localctx.a != None:
|
|
||||||
LBLine = $a.start.line
|
|
||||||
LBOffset = $a.start.column
|
|
||||||
else:
|
|
||||||
LBLine = $b.start.line
|
|
||||||
LBOffset = $b.start.column
|
|
||||||
}
|
|
||||||
;
|
|
||||||
|
|
||||||
|
|
||||||
declaration_specifiers
|
|
||||||
: ( storage_class_specifier
|
|
||||||
| type_specifier
|
|
||||||
| type_qualifier
|
|
||||||
)+
|
|
||||||
;
|
|
||||||
|
|
||||||
declaration
|
|
||||||
: a='typedef' b=declaration_specifiers? c=init_declarator_list d=';'
|
|
||||||
{
|
|
||||||
if localctx.b is not None:
|
|
||||||
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, $b.text, $c.text)
|
|
||||||
else:
|
|
||||||
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, '', $c.text)
|
|
||||||
}
|
|
||||||
| s=declaration_specifiers t=init_declarator_list? e=';'
|
|
||||||
{
|
|
||||||
if localctx.t is not None:
|
|
||||||
self.StoreVariableDeclaration($s.start.line, $s.start.column, $t.start.line, $t.start.column, $s.text, $t.text)
|
|
||||||
}
|
|
||||||
;
|
|
||||||
|
|
||||||
init_declarator_list
|
|
||||||
: init_declarator (',' init_declarator)*
|
|
||||||
;
|
|
||||||
|
|
||||||
init_declarator
|
|
||||||
: declarator ('=' initializer)?
|
|
||||||
;
|
|
||||||
|
|
||||||
storage_class_specifier
|
|
||||||
: 'extern'
|
|
||||||
| 'static'
|
|
||||||
| 'auto'
|
|
||||||
| 'register'
|
|
||||||
| 'STATIC'
|
|
||||||
;
|
|
||||||
|
|
||||||
type_specifier
|
|
||||||
: 'void'
|
|
||||||
| 'char'
|
|
||||||
| 'short'
|
|
||||||
| 'int'
|
|
||||||
| 'long'
|
|
||||||
| 'float'
|
|
||||||
| 'double'
|
|
||||||
| 'signed'
|
|
||||||
| 'unsigned'
|
|
||||||
| s=struct_or_union_specifier
|
|
||||||
{
|
|
||||||
if localctx.s.stop is not None:
|
|
||||||
self.StoreStructUnionDefinition($s.start.line, $s.start.column, $s.stop.line, $s.stop.column, $s.text)
|
|
||||||
}
|
|
||||||
| e=enum_specifier
|
|
||||||
{
|
|
||||||
if localctx.e.stop is not None:
|
|
||||||
self.StoreEnumerationDefinition($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)
|
|
||||||
}
|
|
||||||
| (IDENTIFIER type_qualifier* declarator)
|
|
||||||
| type_id
|
|
||||||
;
|
|
||||||
|
|
||||||
type_id
|
|
||||||
: IDENTIFIER
|
|
||||||
//{self.printTokenInfo($a.line, $a.pos, $a.text)}
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_or_union_specifier
|
|
||||||
: struct_or_union IDENTIFIER? '{' struct_declaration_list '}'
|
|
||||||
| struct_or_union IDENTIFIER
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_or_union
|
|
||||||
: 'struct'
|
|
||||||
| 'union'
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_declaration_list
|
|
||||||
: struct_declaration+
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_declaration
|
|
||||||
: specifier_qualifier_list struct_declarator_list ';'
|
|
||||||
;
|
|
||||||
|
|
||||||
specifier_qualifier_list
|
|
||||||
: ( type_qualifier | type_specifier )+
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_declarator_list
|
|
||||||
: struct_declarator (',' struct_declarator)*
|
|
||||||
;
|
|
||||||
|
|
||||||
struct_declarator
|
|
||||||
: declarator (':' constant_expression)?
|
|
||||||
| ':' constant_expression
|
|
||||||
;
|
|
||||||
|
|
||||||
enum_specifier
|
|
||||||
: 'enum' '{' enumerator_list ','? '}'
|
|
||||||
| 'enum' IDENTIFIER '{' enumerator_list ','? '}'
|
|
||||||
| 'enum' IDENTIFIER
|
|
||||||
;
|
|
||||||
|
|
||||||
enumerator_list
|
|
||||||
: enumerator (',' enumerator)*
|
|
||||||
;
|
|
||||||
|
|
||||||
enumerator
|
|
||||||
: IDENTIFIER ('=' constant_expression)?
|
|
||||||
;
|
|
||||||
|
|
||||||
type_qualifier
|
|
||||||
: 'const'
|
|
||||||
| 'volatile'
|
|
||||||
| 'IN'
|
|
||||||
| 'OUT'
|
|
||||||
| 'OPTIONAL'
|
|
||||||
| 'CONST'
|
|
||||||
| 'UNALIGNED'
|
|
||||||
| 'VOLATILE'
|
|
||||||
| 'GLOBAL_REMOVE_IF_UNREFERENCED'
|
|
||||||
| 'EFIAPI'
|
|
||||||
| 'EFI_BOOTSERVICE'
|
|
||||||
| 'EFI_RUNTIMESERVICE'
|
|
||||||
| 'PACKED'
|
|
||||||
;
|
|
||||||
|
|
||||||
declarator
|
|
||||||
: pointer? ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? direct_declarator
|
|
||||||
// | ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? pointer? direct_declarator
|
|
||||||
| pointer
|
|
||||||
;
|
|
||||||
|
|
||||||
direct_declarator
|
|
||||||
: IDENTIFIER declarator_suffix*
|
|
||||||
| '(' ('EFIAPI')? declarator ')' declarator_suffix+
|
|
||||||
;
|
|
||||||
|
|
||||||
declarator_suffix
|
|
||||||
: '[' constant_expression ']'
|
|
||||||
| '[' ']'
|
|
||||||
| '(' parameter_type_list ')'
|
|
||||||
| '(' identifier_list ')'
|
|
||||||
| '(' ')'
|
|
||||||
;
|
|
||||||
|
|
||||||
pointer
|
|
||||||
: '*' type_qualifier+ pointer?
|
|
||||||
| '*' pointer
|
|
||||||
| '*'
|
|
||||||
;
|
|
||||||
|
|
||||||
parameter_type_list
|
|
||||||
: parameter_list (',' ('OPTIONAL')? '...')?
|
|
||||||
;
|
|
||||||
|
|
||||||
parameter_list
|
|
||||||
: parameter_declaration (',' ('OPTIONAL')? parameter_declaration)*
|
|
||||||
;
|
|
||||||
|
|
||||||
parameter_declaration
|
|
||||||
: declaration_specifiers (declarator|abstract_declarator)* ('OPTIONAL')?
|
|
||||||
//accomerdate user-defined type only, no declarator follow.
|
|
||||||
| pointer* IDENTIFIER
|
|
||||||
;
|
|
||||||
|
|
||||||
identifier_list
|
|
||||||
: IDENTIFIER
|
|
||||||
(',' IDENTIFIER)*
|
|
||||||
;
|
|
||||||
|
|
||||||
type_name
|
|
||||||
: specifier_qualifier_list abstract_declarator?
|
|
||||||
| type_id
|
|
||||||
;
|
|
||||||
|
|
||||||
abstract_declarator
|
|
||||||
: pointer direct_abstract_declarator?
|
|
||||||
| direct_abstract_declarator
|
|
||||||
;
|
|
||||||
|
|
||||||
direct_abstract_declarator
|
|
||||||
: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) abstract_declarator_suffix*
|
|
||||||
;
|
|
||||||
|
|
||||||
abstract_declarator_suffix
|
|
||||||
: '[' ']'
|
|
||||||
| '[' constant_expression ']'
|
|
||||||
| '(' ')'
|
|
||||||
| '(' parameter_type_list ')'
|
|
||||||
;
|
|
||||||
|
|
||||||
initializer
|
|
||||||
|
|
||||||
: assignment_expression
|
|
||||||
| '{' initializer_list ','? '}'
|
|
||||||
;
|
|
||||||
|
|
||||||
initializer_list
|
|
||||||
: initializer (',' initializer )*
|
|
||||||
;
|
|
||||||
|
|
||||||
// E x p r e s s i o n s
|
|
||||||
|
|
||||||
argument_expression_list
|
|
||||||
: assignment_expression ('OPTIONAL')? (',' assignment_expression ('OPTIONAL')?)*
|
|
||||||
;
|
|
||||||
|
|
||||||
additive_expression
|
|
||||||
: (multiplicative_expression) ('+' multiplicative_expression | '-' multiplicative_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
multiplicative_expression
|
|
||||||
: (cast_expression) ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
cast_expression
|
|
||||||
: '(' type_name ')' cast_expression
|
|
||||||
| unary_expression
|
|
||||||
;
|
|
||||||
|
|
||||||
unary_expression
|
|
||||||
: postfix_expression
|
|
||||||
| '++' unary_expression
|
|
||||||
| '--' unary_expression
|
|
||||||
| unary_operator cast_expression
|
|
||||||
| 'sizeof' unary_expression
|
|
||||||
| 'sizeof' '(' type_name ')'
|
|
||||||
;
|
|
||||||
|
|
||||||
postfix_expression
|
|
||||||
locals [FuncCallText='']
|
|
||||||
@init
|
|
||||||
{
|
|
||||||
self.FuncCallText=''
|
|
||||||
}
|
|
||||||
: p=primary_expression {self.FuncCallText += $p.text}
|
|
||||||
( '[' expression ']'
|
|
||||||
| '(' a=')'{self.StoreFunctionCalling($p.start.line, $p.start.column, $a.line, localctx.a.column, self.FuncCallText, '')}
|
|
||||||
| '(' c=argument_expression_list b=')' {self.StoreFunctionCalling($p.start.line, $p.start.column, $b.line, localctx.b.column, self.FuncCallText, $c.text)}
|
|
||||||
| '(' macro_parameter_list ')'
|
|
||||||
| '.' x=IDENTIFIER {self.FuncCallText += '.' + $x.text}
|
|
||||||
| '*' y=IDENTIFIER {self.FuncCallText = $y.text}
|
|
||||||
| '->' z=IDENTIFIER {self.FuncCallText += '->' + $z.text}
|
|
||||||
| '++'
|
|
||||||
| '--'
|
|
||||||
)*
|
|
||||||
;
|
|
||||||
|
|
||||||
macro_parameter_list
|
|
||||||
: parameter_declaration (',' parameter_declaration)*
|
|
||||||
;
|
|
||||||
|
|
||||||
unary_operator
|
|
||||||
: '&'
|
|
||||||
| '*'
|
|
||||||
| '+'
|
|
||||||
| '-'
|
|
||||||
| '~'
|
|
||||||
| '!'
|
|
||||||
;
|
|
||||||
|
|
||||||
primary_expression
|
|
||||||
: IDENTIFIER
|
|
||||||
| constant
|
|
||||||
| '(' expression ')'
|
|
||||||
;
|
|
||||||
|
|
||||||
constant
|
|
||||||
: HEX_LITERAL
|
|
||||||
| OCTAL_LITERAL
|
|
||||||
| DECIMAL_LITERAL
|
|
||||||
| CHARACTER_LITERAL
|
|
||||||
| (IDENTIFIER* STRING_LITERAL+)+ IDENTIFIER*
|
|
||||||
| FLOATING_POINT_LITERAL
|
|
||||||
;
|
|
||||||
|
|
||||||
/////
|
|
||||||
|
|
||||||
expression
|
|
||||||
: assignment_expression (',' assignment_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
constant_expression
|
|
||||||
: conditional_expression
|
|
||||||
;
|
|
||||||
|
|
||||||
assignment_expression
|
|
||||||
: lvalue assignment_operator assignment_expression
|
|
||||||
| conditional_expression
|
|
||||||
;
|
|
||||||
|
|
||||||
lvalue
|
|
||||||
: unary_expression
|
|
||||||
;
|
|
||||||
|
|
||||||
assignment_operator
|
|
||||||
: '='
|
|
||||||
| '*='
|
|
||||||
| '/='
|
|
||||||
| '%='
|
|
||||||
| '+='
|
|
||||||
| '-='
|
|
||||||
| '<<='
|
|
||||||
| '>>='
|
|
||||||
| '&='
|
|
||||||
| '^='
|
|
||||||
| '|='
|
|
||||||
;
|
|
||||||
|
|
||||||
conditional_expression
|
|
||||||
: e=logical_or_expression ('?' expression ':' conditional_expression {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)})?
|
|
||||||
;
|
|
||||||
|
|
||||||
logical_or_expression
|
|
||||||
: logical_and_expression ('||' logical_and_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
logical_and_expression
|
|
||||||
: inclusive_or_expression ('&&' inclusive_or_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
inclusive_or_expression
|
|
||||||
: exclusive_or_expression ('|' exclusive_or_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
exclusive_or_expression
|
|
||||||
: and_expression ('^' and_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
and_expression
|
|
||||||
: equality_expression ('&' equality_expression)*
|
|
||||||
;
|
|
||||||
equality_expression
|
|
||||||
: relational_expression (('=='|'!=') relational_expression )*
|
|
||||||
;
|
|
||||||
|
|
||||||
relational_expression
|
|
||||||
: shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
shift_expression
|
|
||||||
: additive_expression (('<<'|'>>') additive_expression)*
|
|
||||||
;
|
|
||||||
|
|
||||||
// S t a t e m e n t s
|
|
||||||
|
|
||||||
statement
|
|
||||||
: labeled_statement
|
|
||||||
| compound_statement
|
|
||||||
| expression_statement
|
|
||||||
| selection_statement
|
|
||||||
| iteration_statement
|
|
||||||
| jump_statement
|
|
||||||
| macro_statement
|
|
||||||
| asm2_statement
|
|
||||||
| asm1_statement
|
|
||||||
| asm_statement
|
|
||||||
| declaration
|
|
||||||
;
|
|
||||||
|
|
||||||
asm2_statement
|
|
||||||
: '__asm__'? IDENTIFIER '(' (~(';'))* ')' ';'
|
|
||||||
;
|
|
||||||
|
|
||||||
asm1_statement
|
|
||||||
: '_asm' '{' (~('}'))* '}'
|
|
||||||
;
|
|
||||||
|
|
||||||
asm_statement
|
|
||||||
: '__asm' '{' (~('}'))* '}'
|
|
||||||
;
|
|
||||||
|
|
||||||
macro_statement
|
|
||||||
: IDENTIFIER '(' declaration* statement_list? expression? ')'
|
|
||||||
;
|
|
||||||
|
|
||||||
labeled_statement
|
|
||||||
: IDENTIFIER ':' statement
|
|
||||||
| 'case' constant_expression ':' statement
|
|
||||||
| 'default' ':' statement
|
|
||||||
;
|
|
||||||
|
|
||||||
compound_statement
|
|
||||||
: '{' declaration* statement_list? '}'
|
|
||||||
;
|
|
||||||
|
|
||||||
statement_list
|
|
||||||
: statement+
|
|
||||||
;
|
|
||||||
|
|
||||||
expression_statement
|
|
||||||
: ';'
|
|
||||||
| expression ';'
|
|
||||||
;
|
|
||||||
|
|
||||||
selection_statement
|
|
||||||
: 'if' '(' e=expression ')' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)} statement (:'else' statement)?
|
|
||||||
| 'switch' '(' expression ')' statement
|
|
||||||
;
|
|
||||||
|
|
||||||
iteration_statement
|
|
||||||
: 'while' '(' e=expression ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
|
|
||||||
| 'do' statement 'while' '(' e=expression ')' ';' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
|
|
||||||
//| 'for' '(' expression_statement e=expression_statement expression? ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
|
|
||||||
;
|
|
||||||
|
|
||||||
jump_statement
|
|
||||||
: 'goto' IDENTIFIER ';'
|
|
||||||
| 'continue' ';'
|
|
||||||
| 'break' ';'
|
|
||||||
| 'return' ';'
|
|
||||||
| 'return' expression ';'
|
|
||||||
;
|
|
||||||
|
|
||||||
IDENTIFIER
|
|
||||||
: LETTER (LETTER|'0'..'9')*
|
|
||||||
;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
LETTER
|
|
||||||
: '$'
|
|
||||||
| 'A'..'Z'
|
|
||||||
| 'a'..'z'
|
|
||||||
| '_'
|
|
||||||
;
|
|
||||||
|
|
||||||
CHARACTER_LITERAL
|
|
||||||
: ('L')? '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
|
|
||||||
;
|
|
||||||
|
|
||||||
STRING_LITERAL
|
|
||||||
: ('L')? '"' ( EscapeSequence | ~('\\'|'"') )* '"'
|
|
||||||
;
|
|
||||||
|
|
||||||
HEX_LITERAL : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;
|
|
||||||
|
|
||||||
DECIMAL_LITERAL : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;
|
|
||||||
|
|
||||||
OCTAL_LITERAL : '0' ('0'..'7')+ IntegerTypeSuffix? ;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
IntegerTypeSuffix
|
|
||||||
: ('u'|'U')
|
|
||||||
| ('l'|'L')
|
|
||||||
| ('u'|'U') ('l'|'L')
|
|
||||||
| ('u'|'U') ('l'|'L') ('l'|'L')
|
|
||||||
;
|
|
||||||
|
|
||||||
FLOATING_POINT_LITERAL
|
|
||||||
: ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
|
|
||||||
| '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
|
|
||||||
| ('0'..'9')+ Exponent FloatTypeSuffix?
|
|
||||||
| ('0'..'9')+ Exponent? FloatTypeSuffix
|
|
||||||
;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
FloatTypeSuffix : ('f'|'F'|'d'|'D') ;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
EscapeSequence
|
|
||||||
: '\\' ('b'|'t'|'n'|'f'|'r'|'\''|'\\')
|
|
||||||
| OctalEscape
|
|
||||||
;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
OctalEscape
|
|
||||||
: '\\' ('0'..'3') ('0'..'7') ('0'..'7')
|
|
||||||
| '\\' ('0'..'7') ('0'..'7')
|
|
||||||
| '\\' ('0'..'7')
|
|
||||||
;
|
|
||||||
|
|
||||||
fragment
|
|
||||||
UnicodeEscape
|
|
||||||
: '\\' 'u' HexDigit HexDigit HexDigit HexDigit
|
|
||||||
;
|
|
||||||
|
|
||||||
WS : (' '|'\r'|'\t'|'\u000C'|'\n')
|
|
||||||
-> channel(HIDDEN)
|
|
||||||
;
|
|
||||||
|
|
||||||
// ingore '\' of line concatenation
|
|
||||||
BS : ('\\')
|
|
||||||
-> channel(HIDDEN)
|
|
||||||
;
|
|
||||||
|
|
||||||
UnicodeVocabulary
|
|
||||||
: '\u0003'..'\uFFFE'
|
|
||||||
;
|
|
||||||
|
|
||||||
COMMENT
|
|
||||||
: '/*' .*? '*/'
|
|
||||||
-> channel(HIDDEN)
|
|
||||||
;
|
|
||||||
|
|
||||||
LINE_COMMENT
|
|
||||||
: '//' ~('\n'|'\r')* '\r'? '\n'
|
|
||||||
-> channel(HIDDEN)
|
|
||||||
;
|
|
||||||
|
|
||||||
// ignore #line info for now
|
|
||||||
LINE_COMMAND
|
|
||||||
: '#' ~('\n'|'\r')* '\r'? '\n'
|
|
||||||
-> channel(HIDDEN)
|
|
||||||
;
|
|
File diff suppressed because it is too large
Load Diff
@ -1,672 +0,0 @@
|
|||||||
# Generated from C.g4 by ANTLR 4.7.1
|
|
||||||
from antlr4 import *
|
|
||||||
if __name__ is not None and "." in __name__:
|
|
||||||
from .CParser import CParser
|
|
||||||
else:
|
|
||||||
from CParser import CParser
|
|
||||||
|
|
||||||
## @file
|
|
||||||
# The file defines the parser for C source files.
|
|
||||||
#
|
|
||||||
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
|
|
||||||
# This file is generated by running:
|
|
||||||
# java org.antlr.Tool C.g
|
|
||||||
#
|
|
||||||
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
|
|
||||||
#
|
|
||||||
# This program and the accompanying materials are licensed and made available
|
|
||||||
# under the terms and conditions of the BSD License which accompanies this
|
|
||||||
# distribution. The full text of the license may be found at:
|
|
||||||
# http://opensource.org/licenses/bsd-license.php
|
|
||||||
#
|
|
||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
||||||
#
|
|
||||||
##
|
|
||||||
|
|
||||||
import Ecc.CodeFragment as CodeFragment
|
|
||||||
import Ecc.FileProfile as FileProfile
|
|
||||||
|
|
||||||
|
|
||||||
# This class defines a complete listener for a parse tree produced by CParser.
|
|
||||||
class CListener(ParseTreeListener):
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#translation_unit.
|
|
||||||
def enterTranslation_unit(self, ctx:CParser.Translation_unitContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#translation_unit.
|
|
||||||
def exitTranslation_unit(self, ctx:CParser.Translation_unitContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#external_declaration.
|
|
||||||
def enterExternal_declaration(self, ctx:CParser.External_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#external_declaration.
|
|
||||||
def exitExternal_declaration(self, ctx:CParser.External_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#function_definition.
|
|
||||||
def enterFunction_definition(self, ctx:CParser.Function_definitionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#function_definition.
|
|
||||||
def exitFunction_definition(self, ctx:CParser.Function_definitionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declaration_specifiers.
|
|
||||||
def enterDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declaration_specifiers.
|
|
||||||
def exitDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declaration.
|
|
||||||
def enterDeclaration(self, ctx:CParser.DeclarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declaration.
|
|
||||||
def exitDeclaration(self, ctx:CParser.DeclarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#init_declarator_list.
|
|
||||||
def enterInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#init_declarator_list.
|
|
||||||
def exitInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#init_declarator.
|
|
||||||
def enterInit_declarator(self, ctx:CParser.Init_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#init_declarator.
|
|
||||||
def exitInit_declarator(self, ctx:CParser.Init_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#storage_class_specifier.
|
|
||||||
def enterStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#storage_class_specifier.
|
|
||||||
def exitStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_specifier.
|
|
||||||
def enterType_specifier(self, ctx:CParser.Type_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_specifier.
|
|
||||||
def exitType_specifier(self, ctx:CParser.Type_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_id.
|
|
||||||
def enterType_id(self, ctx:CParser.Type_idContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_id.
|
|
||||||
def exitType_id(self, ctx:CParser.Type_idContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_or_union_specifier.
|
|
||||||
def enterStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_or_union_specifier.
|
|
||||||
def exitStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_or_union.
|
|
||||||
def enterStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_or_union.
|
|
||||||
def exitStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declaration_list.
|
|
||||||
def enterStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declaration_list.
|
|
||||||
def exitStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declaration.
|
|
||||||
def enterStruct_declaration(self, ctx:CParser.Struct_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declaration.
|
|
||||||
def exitStruct_declaration(self, ctx:CParser.Struct_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#specifier_qualifier_list.
|
|
||||||
def enterSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#specifier_qualifier_list.
|
|
||||||
def exitSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declarator_list.
|
|
||||||
def enterStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declarator_list.
|
|
||||||
def exitStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declarator.
|
|
||||||
def enterStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declarator.
|
|
||||||
def exitStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enum_specifier.
|
|
||||||
def enterEnum_specifier(self, ctx:CParser.Enum_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enum_specifier.
|
|
||||||
def exitEnum_specifier(self, ctx:CParser.Enum_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enumerator_list.
|
|
||||||
def enterEnumerator_list(self, ctx:CParser.Enumerator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enumerator_list.
|
|
||||||
def exitEnumerator_list(self, ctx:CParser.Enumerator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enumerator.
|
|
||||||
def enterEnumerator(self, ctx:CParser.EnumeratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enumerator.
|
|
||||||
def exitEnumerator(self, ctx:CParser.EnumeratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_qualifier.
|
|
||||||
def enterType_qualifier(self, ctx:CParser.Type_qualifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_qualifier.
|
|
||||||
def exitType_qualifier(self, ctx:CParser.Type_qualifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declarator.
|
|
||||||
def enterDeclarator(self, ctx:CParser.DeclaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declarator.
|
|
||||||
def exitDeclarator(self, ctx:CParser.DeclaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#direct_declarator.
|
|
||||||
def enterDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#direct_declarator.
|
|
||||||
def exitDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declarator_suffix.
|
|
||||||
def enterDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declarator_suffix.
|
|
||||||
def exitDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#pointer.
|
|
||||||
def enterPointer(self, ctx:CParser.PointerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#pointer.
|
|
||||||
def exitPointer(self, ctx:CParser.PointerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_type_list.
|
|
||||||
def enterParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_type_list.
|
|
||||||
def exitParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_list.
|
|
||||||
def enterParameter_list(self, ctx:CParser.Parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_list.
|
|
||||||
def exitParameter_list(self, ctx:CParser.Parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_declaration.
|
|
||||||
def enterParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_declaration.
|
|
||||||
def exitParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#identifier_list.
|
|
||||||
def enterIdentifier_list(self, ctx:CParser.Identifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#identifier_list.
|
|
||||||
def exitIdentifier_list(self, ctx:CParser.Identifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_name.
|
|
||||||
def enterType_name(self, ctx:CParser.Type_nameContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_name.
|
|
||||||
def exitType_name(self, ctx:CParser.Type_nameContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#abstract_declarator.
|
|
||||||
def enterAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#abstract_declarator.
|
|
||||||
def exitAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#direct_abstract_declarator.
|
|
||||||
def enterDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#direct_abstract_declarator.
|
|
||||||
def exitDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
|
|
||||||
def enterAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
|
|
||||||
def exitAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#initializer.
|
|
||||||
def enterInitializer(self, ctx:CParser.InitializerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#initializer.
|
|
||||||
def exitInitializer(self, ctx:CParser.InitializerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#initializer_list.
|
|
||||||
def enterInitializer_list(self, ctx:CParser.Initializer_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#initializer_list.
|
|
||||||
def exitInitializer_list(self, ctx:CParser.Initializer_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#argument_expression_list.
|
|
||||||
def enterArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#argument_expression_list.
|
|
||||||
def exitArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#additive_expression.
|
|
||||||
def enterAdditive_expression(self, ctx:CParser.Additive_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#additive_expression.
|
|
||||||
def exitAdditive_expression(self, ctx:CParser.Additive_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#multiplicative_expression.
|
|
||||||
def enterMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#multiplicative_expression.
|
|
||||||
def exitMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#cast_expression.
|
|
||||||
def enterCast_expression(self, ctx:CParser.Cast_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#cast_expression.
|
|
||||||
def exitCast_expression(self, ctx:CParser.Cast_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#unary_expression.
|
|
||||||
def enterUnary_expression(self, ctx:CParser.Unary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#unary_expression.
|
|
||||||
def exitUnary_expression(self, ctx:CParser.Unary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#postfix_expression.
|
|
||||||
def enterPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#postfix_expression.
|
|
||||||
def exitPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#macro_parameter_list.
|
|
||||||
def enterMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#macro_parameter_list.
|
|
||||||
def exitMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#unary_operator.
|
|
||||||
def enterUnary_operator(self, ctx:CParser.Unary_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#unary_operator.
|
|
||||||
def exitUnary_operator(self, ctx:CParser.Unary_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#primary_expression.
|
|
||||||
def enterPrimary_expression(self, ctx:CParser.Primary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#primary_expression.
|
|
||||||
def exitPrimary_expression(self, ctx:CParser.Primary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#constant.
|
|
||||||
def enterConstant(self, ctx:CParser.ConstantContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#constant.
|
|
||||||
def exitConstant(self, ctx:CParser.ConstantContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#expression.
|
|
||||||
def enterExpression(self, ctx:CParser.ExpressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#expression.
|
|
||||||
def exitExpression(self, ctx:CParser.ExpressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#constant_expression.
|
|
||||||
def enterConstant_expression(self, ctx:CParser.Constant_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#constant_expression.
|
|
||||||
def exitConstant_expression(self, ctx:CParser.Constant_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#assignment_expression.
|
|
||||||
def enterAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#assignment_expression.
|
|
||||||
def exitAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#lvalue.
|
|
||||||
def enterLvalue(self, ctx:CParser.LvalueContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#lvalue.
|
|
||||||
def exitLvalue(self, ctx:CParser.LvalueContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#assignment_operator.
|
|
||||||
def enterAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#assignment_operator.
|
|
||||||
def exitAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#conditional_expression.
|
|
||||||
def enterConditional_expression(self, ctx:CParser.Conditional_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#conditional_expression.
|
|
||||||
def exitConditional_expression(self, ctx:CParser.Conditional_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#logical_or_expression.
|
|
||||||
def enterLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#logical_or_expression.
|
|
||||||
def exitLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#logical_and_expression.
|
|
||||||
def enterLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#logical_and_expression.
|
|
||||||
def exitLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#inclusive_or_expression.
|
|
||||||
def enterInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#inclusive_or_expression.
|
|
||||||
def exitInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#exclusive_or_expression.
|
|
||||||
def enterExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#exclusive_or_expression.
|
|
||||||
def exitExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#and_expression.
|
|
||||||
def enterAnd_expression(self, ctx:CParser.And_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#and_expression.
|
|
||||||
def exitAnd_expression(self, ctx:CParser.And_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#equality_expression.
|
|
||||||
def enterEquality_expression(self, ctx:CParser.Equality_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#equality_expression.
|
|
||||||
def exitEquality_expression(self, ctx:CParser.Equality_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#relational_expression.
|
|
||||||
def enterRelational_expression(self, ctx:CParser.Relational_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#relational_expression.
|
|
||||||
def exitRelational_expression(self, ctx:CParser.Relational_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#shift_expression.
|
|
||||||
def enterShift_expression(self, ctx:CParser.Shift_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#shift_expression.
|
|
||||||
def exitShift_expression(self, ctx:CParser.Shift_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#statement.
|
|
||||||
def enterStatement(self, ctx:CParser.StatementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#statement.
|
|
||||||
def exitStatement(self, ctx:CParser.StatementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm2_statement.
|
|
||||||
def enterAsm2_statement(self, ctx:CParser.Asm2_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm2_statement.
|
|
||||||
def exitAsm2_statement(self, ctx:CParser.Asm2_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm1_statement.
|
|
||||||
def enterAsm1_statement(self, ctx:CParser.Asm1_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm1_statement.
|
|
||||||
def exitAsm1_statement(self, ctx:CParser.Asm1_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm_statement.
|
|
||||||
def enterAsm_statement(self, ctx:CParser.Asm_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm_statement.
|
|
||||||
def exitAsm_statement(self, ctx:CParser.Asm_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#macro_statement.
|
|
||||||
def enterMacro_statement(self, ctx:CParser.Macro_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#macro_statement.
|
|
||||||
def exitMacro_statement(self, ctx:CParser.Macro_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#labeled_statement.
|
|
||||||
def enterLabeled_statement(self, ctx:CParser.Labeled_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#labeled_statement.
|
|
||||||
def exitLabeled_statement(self, ctx:CParser.Labeled_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#compound_statement.
|
|
||||||
def enterCompound_statement(self, ctx:CParser.Compound_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#compound_statement.
|
|
||||||
def exitCompound_statement(self, ctx:CParser.Compound_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#statement_list.
|
|
||||||
def enterStatement_list(self, ctx:CParser.Statement_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#statement_list.
|
|
||||||
def exitStatement_list(self, ctx:CParser.Statement_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#expression_statement.
|
|
||||||
def enterExpression_statement(self, ctx:CParser.Expression_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#expression_statement.
|
|
||||||
def exitExpression_statement(self, ctx:CParser.Expression_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#selection_statement.
|
|
||||||
def enterSelection_statement(self, ctx:CParser.Selection_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#selection_statement.
|
|
||||||
def exitSelection_statement(self, ctx:CParser.Selection_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#iteration_statement.
|
|
||||||
def enterIteration_statement(self, ctx:CParser.Iteration_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#iteration_statement.
|
|
||||||
def exitIteration_statement(self, ctx:CParser.Iteration_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#jump_statement.
|
|
||||||
def enterJump_statement(self, ctx:CParser.Jump_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#jump_statement.
|
|
||||||
def exitJump_statement(self, ctx:CParser.Jump_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -223,7 +223,7 @@ class Check(object):
|
|||||||
IndexOfLine = 0
|
IndexOfLine = 0
|
||||||
for Line in op:
|
for Line in op:
|
||||||
IndexOfLine += 1
|
IndexOfLine += 1
|
||||||
if not bytes.decode(Line).endswith('\r\n'):
|
if not Line.endswith('\r\n'):
|
||||||
OtherMsg = "File %s has invalid line ending at line %s" % (Record[1], IndexOfLine)
|
OtherMsg = "File %s has invalid line ending at line %s" % (Record[1], IndexOfLine)
|
||||||
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_INVALID_LINE_ENDING, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
|
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_INVALID_LINE_ENDING, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
|
||||||
|
|
||||||
@ -235,7 +235,7 @@ class Check(object):
|
|||||||
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
|
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
|
||||||
for Record in RecordSet:
|
for Record in RecordSet:
|
||||||
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
|
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
|
||||||
op = open(Record[1], 'r').readlines()
|
op = open(Record[1], 'rb').readlines()
|
||||||
IndexOfLine = 0
|
IndexOfLine = 0
|
||||||
for Line in op:
|
for Line in op:
|
||||||
IndexOfLine += 1
|
IndexOfLine += 1
|
||||||
|
@ -22,7 +22,7 @@ import re
|
|||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import antlr4
|
import antlr3
|
||||||
from Ecc.CLexer import CLexer
|
from Ecc.CLexer import CLexer
|
||||||
from Ecc.CParser import CParser
|
from Ecc.CParser import CParser
|
||||||
|
|
||||||
@ -499,14 +499,13 @@ class CodeFragmentCollector:
|
|||||||
def ParseFile(self):
|
def ParseFile(self):
|
||||||
self.PreprocessFile()
|
self.PreprocessFile()
|
||||||
# restore from ListOfList to ListOfString
|
# restore from ListOfList to ListOfString
|
||||||
# print(self.Profile.FileLinesList)
|
|
||||||
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
|
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
|
||||||
FileStringContents = ''
|
FileStringContents = ''
|
||||||
for fileLine in self.Profile.FileLinesList:
|
for fileLine in self.Profile.FileLinesList:
|
||||||
FileStringContents += fileLine
|
FileStringContents += fileLine
|
||||||
cStream = antlr4.InputStream(FileStringContents)
|
cStream = antlr3.StringStream(FileStringContents)
|
||||||
lexer = CLexer(cStream)
|
lexer = CLexer(cStream)
|
||||||
tStream = antlr4.CommonTokenStream(lexer)
|
tStream = antlr3.CommonTokenStream(lexer)
|
||||||
parser = CParser(tStream)
|
parser = CParser(tStream)
|
||||||
parser.translation_unit()
|
parser.translation_unit()
|
||||||
|
|
||||||
@ -517,9 +516,9 @@ class CodeFragmentCollector:
|
|||||||
FileStringContents = ''
|
FileStringContents = ''
|
||||||
for fileLine in self.Profile.FileLinesList:
|
for fileLine in self.Profile.FileLinesList:
|
||||||
FileStringContents += fileLine
|
FileStringContents += fileLine
|
||||||
cStream = antlr4.InputStream(FileStringContents)
|
cStream = antlr3.StringStream(FileStringContents)
|
||||||
lexer = CLexer(cStream)
|
lexer = CLexer(cStream)
|
||||||
tStream = antlr4.CommonTokenStream(lexer)
|
tStream = antlr3.CommonTokenStream(lexer)
|
||||||
parser = CParser(tStream)
|
parser = CParser(tStream)
|
||||||
parser.translation_unit()
|
parser.translation_unit()
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ class Ecc(object):
|
|||||||
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
|
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
|
||||||
#SkipDirs = Read from config file
|
#SkipDirs = Read from config file
|
||||||
SkipDirs = EccGlobalData.gConfig.SkipDirList
|
SkipDirs = EccGlobalData.gConfig.SkipDirList
|
||||||
SkipDirString = '|'.join(SkipDirs)
|
SkipDirString = string.join(SkipDirs, '|')
|
||||||
# p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
|
# p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
|
||||||
p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
|
p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
|
||||||
for scanFolder in ScanFolders:
|
for scanFolder in ScanFolders:
|
||||||
|
@ -47,7 +47,7 @@ class FileProfile :
|
|||||||
self.FileLinesList = []
|
self.FileLinesList = []
|
||||||
self.FileLinesListFromFile = []
|
self.FileLinesListFromFile = []
|
||||||
try:
|
try:
|
||||||
fsock = open(FileName, "r")
|
fsock = open(FileName, "rb", 0)
|
||||||
try:
|
try:
|
||||||
self.FileLinesListFromFile = fsock.readlines()
|
self.FileLinesListFromFile = fsock.readlines()
|
||||||
finally:
|
finally:
|
||||||
|
@ -113,7 +113,7 @@ def ParseHeaderCommentSection(CommentList, FileName = None):
|
|||||||
#
|
#
|
||||||
Last = 0
|
Last = 0
|
||||||
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
|
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
|
||||||
for Index in range(len(CommentList) - 1, 0, -1):
|
for Index in xrange(len(CommentList)-1, 0, -1):
|
||||||
Line = CommentList[Index][0]
|
Line = CommentList[Index][0]
|
||||||
if _IsCopyrightLine(Line):
|
if _IsCopyrightLine(Line):
|
||||||
Last = Index
|
Last = Index
|
||||||
|
@ -35,7 +35,7 @@ IgnoredKeywordList = ['EFI_ERROR']
|
|||||||
|
|
||||||
def GetIgnoredDirListPattern():
|
def GetIgnoredDirListPattern():
|
||||||
skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
|
skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
|
||||||
DirString = '|'.join(skipList)
|
DirString = string.join(skipList, '|')
|
||||||
p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
|
p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
|
||||||
return p
|
return p
|
||||||
|
|
||||||
@ -963,7 +963,7 @@ def StripComments(Str):
|
|||||||
ListFromStr[Index] = ' '
|
ListFromStr[Index] = ' '
|
||||||
Index += 1
|
Index += 1
|
||||||
# check for // comment
|
# check for // comment
|
||||||
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/':
|
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/' and ListFromStr[Index + 2] != '\n':
|
||||||
InComment = True
|
InComment = True
|
||||||
DoubleSlashComment = True
|
DoubleSlashComment = True
|
||||||
|
|
||||||
@ -1297,7 +1297,7 @@ def CheckFuncLayoutReturnType(FullFileName):
|
|||||||
Result0 = Result[0]
|
Result0 = Result[0]
|
||||||
if Result0.upper().startswith('STATIC'):
|
if Result0.upper().startswith('STATIC'):
|
||||||
Result0 = Result0[6:].strip()
|
Result0 = Result0[6:].strip()
|
||||||
Index = Result0.find(TypeStart)
|
Index = Result0.find(ReturnType)
|
||||||
if Index != 0 or Result[3] != 0:
|
if Index != 0 or Result[3] != 0:
|
||||||
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])
|
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,672 +0,0 @@
|
|||||||
# Generated from C.g4 by ANTLR 4.7.1
|
|
||||||
from antlr4 import *
|
|
||||||
if __name__ is not None and "." in __name__:
|
|
||||||
from .CParser import CParser
|
|
||||||
else:
|
|
||||||
from CParser import CParser
|
|
||||||
|
|
||||||
## @file
|
|
||||||
# The file defines the parser for C source files.
|
|
||||||
#
|
|
||||||
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
|
|
||||||
# This file is generated by running:
|
|
||||||
# java org.antlr.Tool C.g
|
|
||||||
#
|
|
||||||
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
|
|
||||||
#
|
|
||||||
# This program and the accompanying materials are licensed and made available
|
|
||||||
# under the terms and conditions of the BSD License which accompanies this
|
|
||||||
# distribution. The full text of the license may be found at:
|
|
||||||
# http://opensource.org/licenses/bsd-license.php
|
|
||||||
#
|
|
||||||
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
|
|
||||||
#
|
|
||||||
##
|
|
||||||
|
|
||||||
import Ecc.CodeFragment as CodeFragment
|
|
||||||
import Ecc.FileProfile as FileProfile
|
|
||||||
|
|
||||||
|
|
||||||
# This class defines a complete listener for a parse tree produced by CParser.
|
|
||||||
class CListener(ParseTreeListener):
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#translation_unit.
|
|
||||||
def enterTranslation_unit(self, ctx:CParser.Translation_unitContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#translation_unit.
|
|
||||||
def exitTranslation_unit(self, ctx:CParser.Translation_unitContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#external_declaration.
|
|
||||||
def enterExternal_declaration(self, ctx:CParser.External_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#external_declaration.
|
|
||||||
def exitExternal_declaration(self, ctx:CParser.External_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#function_definition.
|
|
||||||
def enterFunction_definition(self, ctx:CParser.Function_definitionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#function_definition.
|
|
||||||
def exitFunction_definition(self, ctx:CParser.Function_definitionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declaration_specifiers.
|
|
||||||
def enterDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declaration_specifiers.
|
|
||||||
def exitDeclaration_specifiers(self, ctx:CParser.Declaration_specifiersContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declaration.
|
|
||||||
def enterDeclaration(self, ctx:CParser.DeclarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declaration.
|
|
||||||
def exitDeclaration(self, ctx:CParser.DeclarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#init_declarator_list.
|
|
||||||
def enterInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#init_declarator_list.
|
|
||||||
def exitInit_declarator_list(self, ctx:CParser.Init_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#init_declarator.
|
|
||||||
def enterInit_declarator(self, ctx:CParser.Init_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#init_declarator.
|
|
||||||
def exitInit_declarator(self, ctx:CParser.Init_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#storage_class_specifier.
|
|
||||||
def enterStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#storage_class_specifier.
|
|
||||||
def exitStorage_class_specifier(self, ctx:CParser.Storage_class_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_specifier.
|
|
||||||
def enterType_specifier(self, ctx:CParser.Type_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_specifier.
|
|
||||||
def exitType_specifier(self, ctx:CParser.Type_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_id.
|
|
||||||
def enterType_id(self, ctx:CParser.Type_idContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_id.
|
|
||||||
def exitType_id(self, ctx:CParser.Type_idContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_or_union_specifier.
|
|
||||||
def enterStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_or_union_specifier.
|
|
||||||
def exitStruct_or_union_specifier(self, ctx:CParser.Struct_or_union_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_or_union.
|
|
||||||
def enterStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_or_union.
|
|
||||||
def exitStruct_or_union(self, ctx:CParser.Struct_or_unionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declaration_list.
|
|
||||||
def enterStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declaration_list.
|
|
||||||
def exitStruct_declaration_list(self, ctx:CParser.Struct_declaration_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declaration.
|
|
||||||
def enterStruct_declaration(self, ctx:CParser.Struct_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declaration.
|
|
||||||
def exitStruct_declaration(self, ctx:CParser.Struct_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#specifier_qualifier_list.
|
|
||||||
def enterSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#specifier_qualifier_list.
|
|
||||||
def exitSpecifier_qualifier_list(self, ctx:CParser.Specifier_qualifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declarator_list.
|
|
||||||
def enterStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declarator_list.
|
|
||||||
def exitStruct_declarator_list(self, ctx:CParser.Struct_declarator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#struct_declarator.
|
|
||||||
def enterStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#struct_declarator.
|
|
||||||
def exitStruct_declarator(self, ctx:CParser.Struct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enum_specifier.
|
|
||||||
def enterEnum_specifier(self, ctx:CParser.Enum_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enum_specifier.
|
|
||||||
def exitEnum_specifier(self, ctx:CParser.Enum_specifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enumerator_list.
|
|
||||||
def enterEnumerator_list(self, ctx:CParser.Enumerator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enumerator_list.
|
|
||||||
def exitEnumerator_list(self, ctx:CParser.Enumerator_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#enumerator.
|
|
||||||
def enterEnumerator(self, ctx:CParser.EnumeratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#enumerator.
|
|
||||||
def exitEnumerator(self, ctx:CParser.EnumeratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_qualifier.
|
|
||||||
def enterType_qualifier(self, ctx:CParser.Type_qualifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_qualifier.
|
|
||||||
def exitType_qualifier(self, ctx:CParser.Type_qualifierContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declarator.
|
|
||||||
def enterDeclarator(self, ctx:CParser.DeclaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declarator.
|
|
||||||
def exitDeclarator(self, ctx:CParser.DeclaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#direct_declarator.
|
|
||||||
def enterDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#direct_declarator.
|
|
||||||
def exitDirect_declarator(self, ctx:CParser.Direct_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#declarator_suffix.
|
|
||||||
def enterDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#declarator_suffix.
|
|
||||||
def exitDeclarator_suffix(self, ctx:CParser.Declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#pointer.
|
|
||||||
def enterPointer(self, ctx:CParser.PointerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#pointer.
|
|
||||||
def exitPointer(self, ctx:CParser.PointerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_type_list.
|
|
||||||
def enterParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_type_list.
|
|
||||||
def exitParameter_type_list(self, ctx:CParser.Parameter_type_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_list.
|
|
||||||
def enterParameter_list(self, ctx:CParser.Parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_list.
|
|
||||||
def exitParameter_list(self, ctx:CParser.Parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#parameter_declaration.
|
|
||||||
def enterParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#parameter_declaration.
|
|
||||||
def exitParameter_declaration(self, ctx:CParser.Parameter_declarationContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#identifier_list.
|
|
||||||
def enterIdentifier_list(self, ctx:CParser.Identifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#identifier_list.
|
|
||||||
def exitIdentifier_list(self, ctx:CParser.Identifier_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#type_name.
|
|
||||||
def enterType_name(self, ctx:CParser.Type_nameContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#type_name.
|
|
||||||
def exitType_name(self, ctx:CParser.Type_nameContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#abstract_declarator.
|
|
||||||
def enterAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#abstract_declarator.
|
|
||||||
def exitAbstract_declarator(self, ctx:CParser.Abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#direct_abstract_declarator.
|
|
||||||
def enterDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#direct_abstract_declarator.
|
|
||||||
def exitDirect_abstract_declarator(self, ctx:CParser.Direct_abstract_declaratorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
|
|
||||||
def enterAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
|
|
||||||
def exitAbstract_declarator_suffix(self, ctx:CParser.Abstract_declarator_suffixContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#initializer.
|
|
||||||
def enterInitializer(self, ctx:CParser.InitializerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#initializer.
|
|
||||||
def exitInitializer(self, ctx:CParser.InitializerContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#initializer_list.
|
|
||||||
def enterInitializer_list(self, ctx:CParser.Initializer_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#initializer_list.
|
|
||||||
def exitInitializer_list(self, ctx:CParser.Initializer_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#argument_expression_list.
|
|
||||||
def enterArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#argument_expression_list.
|
|
||||||
def exitArgument_expression_list(self, ctx:CParser.Argument_expression_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#additive_expression.
|
|
||||||
def enterAdditive_expression(self, ctx:CParser.Additive_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#additive_expression.
|
|
||||||
def exitAdditive_expression(self, ctx:CParser.Additive_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#multiplicative_expression.
|
|
||||||
def enterMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#multiplicative_expression.
|
|
||||||
def exitMultiplicative_expression(self, ctx:CParser.Multiplicative_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#cast_expression.
|
|
||||||
def enterCast_expression(self, ctx:CParser.Cast_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#cast_expression.
|
|
||||||
def exitCast_expression(self, ctx:CParser.Cast_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#unary_expression.
|
|
||||||
def enterUnary_expression(self, ctx:CParser.Unary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#unary_expression.
|
|
||||||
def exitUnary_expression(self, ctx:CParser.Unary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#postfix_expression.
|
|
||||||
def enterPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#postfix_expression.
|
|
||||||
def exitPostfix_expression(self, ctx:CParser.Postfix_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#macro_parameter_list.
|
|
||||||
def enterMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#macro_parameter_list.
|
|
||||||
def exitMacro_parameter_list(self, ctx:CParser.Macro_parameter_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#unary_operator.
|
|
||||||
def enterUnary_operator(self, ctx:CParser.Unary_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#unary_operator.
|
|
||||||
def exitUnary_operator(self, ctx:CParser.Unary_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#primary_expression.
|
|
||||||
def enterPrimary_expression(self, ctx:CParser.Primary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#primary_expression.
|
|
||||||
def exitPrimary_expression(self, ctx:CParser.Primary_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#constant.
|
|
||||||
def enterConstant(self, ctx:CParser.ConstantContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#constant.
|
|
||||||
def exitConstant(self, ctx:CParser.ConstantContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#expression.
|
|
||||||
def enterExpression(self, ctx:CParser.ExpressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#expression.
|
|
||||||
def exitExpression(self, ctx:CParser.ExpressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#constant_expression.
|
|
||||||
def enterConstant_expression(self, ctx:CParser.Constant_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#constant_expression.
|
|
||||||
def exitConstant_expression(self, ctx:CParser.Constant_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#assignment_expression.
|
|
||||||
def enterAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#assignment_expression.
|
|
||||||
def exitAssignment_expression(self, ctx:CParser.Assignment_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#lvalue.
|
|
||||||
def enterLvalue(self, ctx:CParser.LvalueContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#lvalue.
|
|
||||||
def exitLvalue(self, ctx:CParser.LvalueContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#assignment_operator.
|
|
||||||
def enterAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#assignment_operator.
|
|
||||||
def exitAssignment_operator(self, ctx:CParser.Assignment_operatorContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#conditional_expression.
|
|
||||||
def enterConditional_expression(self, ctx:CParser.Conditional_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#conditional_expression.
|
|
||||||
def exitConditional_expression(self, ctx:CParser.Conditional_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#logical_or_expression.
|
|
||||||
def enterLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#logical_or_expression.
|
|
||||||
def exitLogical_or_expression(self, ctx:CParser.Logical_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#logical_and_expression.
|
|
||||||
def enterLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#logical_and_expression.
|
|
||||||
def exitLogical_and_expression(self, ctx:CParser.Logical_and_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#inclusive_or_expression.
|
|
||||||
def enterInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#inclusive_or_expression.
|
|
||||||
def exitInclusive_or_expression(self, ctx:CParser.Inclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#exclusive_or_expression.
|
|
||||||
def enterExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#exclusive_or_expression.
|
|
||||||
def exitExclusive_or_expression(self, ctx:CParser.Exclusive_or_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#and_expression.
|
|
||||||
def enterAnd_expression(self, ctx:CParser.And_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#and_expression.
|
|
||||||
def exitAnd_expression(self, ctx:CParser.And_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#equality_expression.
|
|
||||||
def enterEquality_expression(self, ctx:CParser.Equality_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#equality_expression.
|
|
||||||
def exitEquality_expression(self, ctx:CParser.Equality_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#relational_expression.
|
|
||||||
def enterRelational_expression(self, ctx:CParser.Relational_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#relational_expression.
|
|
||||||
def exitRelational_expression(self, ctx:CParser.Relational_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#shift_expression.
|
|
||||||
def enterShift_expression(self, ctx:CParser.Shift_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#shift_expression.
|
|
||||||
def exitShift_expression(self, ctx:CParser.Shift_expressionContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#statement.
|
|
||||||
def enterStatement(self, ctx:CParser.StatementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#statement.
|
|
||||||
def exitStatement(self, ctx:CParser.StatementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm2_statement.
|
|
||||||
def enterAsm2_statement(self, ctx:CParser.Asm2_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm2_statement.
|
|
||||||
def exitAsm2_statement(self, ctx:CParser.Asm2_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm1_statement.
|
|
||||||
def enterAsm1_statement(self, ctx:CParser.Asm1_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm1_statement.
|
|
||||||
def exitAsm1_statement(self, ctx:CParser.Asm1_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#asm_statement.
|
|
||||||
def enterAsm_statement(self, ctx:CParser.Asm_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#asm_statement.
|
|
||||||
def exitAsm_statement(self, ctx:CParser.Asm_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#macro_statement.
|
|
||||||
def enterMacro_statement(self, ctx:CParser.Macro_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#macro_statement.
|
|
||||||
def exitMacro_statement(self, ctx:CParser.Macro_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#labeled_statement.
|
|
||||||
def enterLabeled_statement(self, ctx:CParser.Labeled_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#labeled_statement.
|
|
||||||
def exitLabeled_statement(self, ctx:CParser.Labeled_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#compound_statement.
|
|
||||||
def enterCompound_statement(self, ctx:CParser.Compound_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#compound_statement.
|
|
||||||
def exitCompound_statement(self, ctx:CParser.Compound_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#statement_list.
|
|
||||||
def enterStatement_list(self, ctx:CParser.Statement_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#statement_list.
|
|
||||||
def exitStatement_list(self, ctx:CParser.Statement_listContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#expression_statement.
|
|
||||||
def enterExpression_statement(self, ctx:CParser.Expression_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#expression_statement.
|
|
||||||
def exitExpression_statement(self, ctx:CParser.Expression_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#selection_statement.
|
|
||||||
def enterSelection_statement(self, ctx:CParser.Selection_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#selection_statement.
|
|
||||||
def exitSelection_statement(self, ctx:CParser.Selection_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#iteration_statement.
|
|
||||||
def enterIteration_statement(self, ctx:CParser.Iteration_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#iteration_statement.
|
|
||||||
def exitIteration_statement(self, ctx:CParser.Iteration_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Enter a parse tree produced by CParser#jump_statement.
|
|
||||||
def enterJump_statement(self, ctx:CParser.Jump_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Exit a parse tree produced by CParser#jump_statement.
|
|
||||||
def exitJump_statement(self, ctx:CParser.Jump_statementContext):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -21,7 +21,7 @@ import re
|
|||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import antlr4
|
import antlr3
|
||||||
from .CLexer import CLexer
|
from .CLexer import CLexer
|
||||||
from .CParser import CParser
|
from .CParser import CParser
|
||||||
|
|
||||||
|
@ -17,20 +17,18 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import Common.LongFilePathOs as os, time, glob
|
import Common.LongFilePathOs as os, time, glob
|
||||||
import Common.EdkLogger as EdkLogger
|
import Common.EdkLogger as EdkLogger
|
||||||
from Eot import EotGlobalData
|
from . import EotGlobalData
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from Common.StringUtils import NormPath
|
from Common.StringUtils import NormPath
|
||||||
from Common import BuildToolError
|
from Common import BuildToolError
|
||||||
from Common.Misc import GuidStructureStringToGuidString, sdict
|
from Common.Misc import GuidStructureStringToGuidString, sdict
|
||||||
from Eot.Parser import *
|
from .InfParserLite import *
|
||||||
from Eot.InfParserLite import EdkInfParser
|
from . import c
|
||||||
from Common.StringUtils import GetSplitValueList
|
from . import Database
|
||||||
from Eot import c
|
|
||||||
from Eot import Database
|
|
||||||
from array import array
|
from array import array
|
||||||
from Eot.Report import Report
|
from .Report import Report
|
||||||
from Common.BuildVersion import gBUILD_VERSION
|
from Common.BuildVersion import gBUILD_VERSION
|
||||||
from Eot.Parser import ConvertGuid
|
from .Parser import ConvertGuid
|
||||||
from Common.LongFilePathSupport import OpenLongFilePath as open
|
from Common.LongFilePathSupport import OpenLongFilePath as open
|
||||||
import struct
|
import struct
|
||||||
import uuid
|
import uuid
|
||||||
@ -60,14 +58,14 @@ class Image(array):
|
|||||||
|
|
||||||
self._SubImages = sdict() # {offset: Image()}
|
self._SubImages = sdict() # {offset: Image()}
|
||||||
|
|
||||||
array.__init__(self)
|
array.__init__(self, 'B')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self._ID_
|
return self._ID_
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
Len = array.__len__(self)
|
Len = array.__len__(self)
|
||||||
for Offset in self._SubImages.keys():
|
for Offset in self._SubImages:
|
||||||
Len += len(self._SubImages[Offset])
|
Len += len(self._SubImages[Offset])
|
||||||
return Len
|
return Len
|
||||||
|
|
||||||
@ -156,11 +154,19 @@ class CompressedImage(Image):
|
|||||||
|
|
||||||
def _GetSections(self):
|
def _GetSections(self):
|
||||||
try:
|
try:
|
||||||
TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:])
|
from . import EfiCompressor
|
||||||
|
TmpData = EfiCompressor.FrameworkDecompress(
|
||||||
|
self[self._HEADER_SIZE_:],
|
||||||
|
len(self) - self._HEADER_SIZE_
|
||||||
|
)
|
||||||
DecData = array('B')
|
DecData = array('B')
|
||||||
DecData.fromstring(TmpData)
|
DecData.fromstring(TmpData)
|
||||||
except:
|
except:
|
||||||
TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:])
|
from . import EfiCompressor
|
||||||
|
TmpData = EfiCompressor.UefiDecompress(
|
||||||
|
self[self._HEADER_SIZE_:],
|
||||||
|
len(self) - self._HEADER_SIZE_
|
||||||
|
)
|
||||||
DecData = array('B')
|
DecData = array('B')
|
||||||
DecData.fromstring(TmpData)
|
DecData.fromstring(TmpData)
|
||||||
|
|
||||||
@ -291,7 +297,7 @@ class Depex(Image):
|
|||||||
|
|
||||||
Expression = property(_GetExpression)
|
Expression = property(_GetExpression)
|
||||||
|
|
||||||
# # FirmwareVolume() class
|
## FirmwareVolume() class
|
||||||
#
|
#
|
||||||
# A class for Firmware Volume
|
# A class for Firmware Volume
|
||||||
#
|
#
|
||||||
@ -381,7 +387,7 @@ class FirmwareVolume(Image):
|
|||||||
DepexString = DepexList[0].strip()
|
DepexString = DepexList[0].strip()
|
||||||
return (CouldBeLoaded, DepexString, FileDepex)
|
return (CouldBeLoaded, DepexString, FileDepex)
|
||||||
|
|
||||||
def Dispatch(self, Db=None):
|
def Dispatch(self, Db = None):
|
||||||
if Db is None:
|
if Db is None:
|
||||||
return False
|
return False
|
||||||
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
|
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
|
||||||
@ -391,7 +397,7 @@ class FirmwareVolume(Image):
|
|||||||
FfsDxeCoreGuid = None
|
FfsDxeCoreGuid = None
|
||||||
FfsPeiPrioriGuid = None
|
FfsPeiPrioriGuid = None
|
||||||
FfsDxePrioriGuid = None
|
FfsDxePrioriGuid = None
|
||||||
for FfsID in self.UnDispatchedFfsDict.keys():
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
Ffs = self.UnDispatchedFfsDict[FfsID]
|
Ffs = self.UnDispatchedFfsDict[FfsID]
|
||||||
if Ffs.Type == 0x03:
|
if Ffs.Type == 0x03:
|
||||||
FfsSecCoreGuid = FfsID
|
FfsSecCoreGuid = FfsID
|
||||||
@ -433,7 +439,6 @@ class FirmwareVolume(Image):
|
|||||||
if GuidString in self.UnDispatchedFfsDict:
|
if GuidString in self.UnDispatchedFfsDict:
|
||||||
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
||||||
self.LoadPpi(Db, GuidString)
|
self.LoadPpi(Db, GuidString)
|
||||||
|
|
||||||
self.DisPatchPei(Db)
|
self.DisPatchPei(Db)
|
||||||
|
|
||||||
# Parse DXE then
|
# Parse DXE then
|
||||||
@ -455,7 +460,6 @@ class FirmwareVolume(Image):
|
|||||||
if GuidString in self.UnDispatchedFfsDict:
|
if GuidString in self.UnDispatchedFfsDict:
|
||||||
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
||||||
self.LoadProtocol(Db, GuidString)
|
self.LoadProtocol(Db, GuidString)
|
||||||
|
|
||||||
self.DisPatchDxe(Db)
|
self.DisPatchDxe(Db)
|
||||||
|
|
||||||
def LoadProtocol(self, Db, ModuleGuid):
|
def LoadProtocol(self, Db, ModuleGuid):
|
||||||
@ -497,7 +501,7 @@ class FirmwareVolume(Image):
|
|||||||
def DisPatchDxe(self, Db):
|
def DisPatchDxe(self, Db):
|
||||||
IsInstalled = False
|
IsInstalled = False
|
||||||
ScheduleList = sdict()
|
ScheduleList = sdict()
|
||||||
for FfsID in self.UnDispatchedFfsDict.keys():
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
CouldBeLoaded = False
|
CouldBeLoaded = False
|
||||||
DepexString = ''
|
DepexString = ''
|
||||||
FileDepex = None
|
FileDepex = None
|
||||||
@ -544,7 +548,7 @@ class FirmwareVolume(Image):
|
|||||||
else:
|
else:
|
||||||
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
|
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
|
||||||
|
|
||||||
for FfsID in ScheduleList.keys():
|
for FfsID in ScheduleList:
|
||||||
NewFfs = ScheduleList.pop(FfsID)
|
NewFfs = ScheduleList.pop(FfsID)
|
||||||
FfsName = 'UnKnown'
|
FfsName = 'UnKnown'
|
||||||
self.OrderedFfsDict[FfsID] = NewFfs
|
self.OrderedFfsDict[FfsID] = NewFfs
|
||||||
@ -556,13 +560,12 @@ class FirmwareVolume(Image):
|
|||||||
RecordSet = Db.TblReport.Exec(SqlCommand)
|
RecordSet = Db.TblReport.Exec(SqlCommand)
|
||||||
if RecordSet != []:
|
if RecordSet != []:
|
||||||
FfsName = RecordSet[0][0]
|
FfsName = RecordSet[0][0]
|
||||||
|
|
||||||
if IsInstalled:
|
if IsInstalled:
|
||||||
self.DisPatchDxe(Db)
|
self.DisPatchDxe(Db)
|
||||||
|
|
||||||
def DisPatchPei(self, Db):
|
def DisPatchPei(self, Db):
|
||||||
IsInstalled = False
|
IsInstalled = False
|
||||||
for FfsID in self.UnDispatchedFfsDict.keys():
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
CouldBeLoaded = True
|
CouldBeLoaded = True
|
||||||
DepexString = ''
|
DepexString = ''
|
||||||
FileDepex = None
|
FileDepex = None
|
||||||
@ -573,6 +576,7 @@ class FirmwareVolume(Image):
|
|||||||
if Section.Type == 0x1B:
|
if Section.Type == 0x1B:
|
||||||
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
|
||||||
break
|
break
|
||||||
|
|
||||||
if Section.Type == 0x01:
|
if Section.Type == 0x01:
|
||||||
CompressSections = Section._SubImages[4]
|
CompressSections = Section._SubImages[4]
|
||||||
for CompressSection in CompressSections.Sections:
|
for CompressSection in CompressSections.Sections:
|
||||||
@ -599,7 +603,6 @@ class FirmwareVolume(Image):
|
|||||||
if IsInstalled:
|
if IsInstalled:
|
||||||
self.DisPatchPei(Db)
|
self.DisPatchPei(Db)
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
global gIndention
|
global gIndention
|
||||||
gIndention += 4
|
gIndention += 4
|
||||||
@ -612,7 +615,7 @@ class FirmwareVolume(Image):
|
|||||||
def _Unpack(self):
|
def _Unpack(self):
|
||||||
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
|
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
|
||||||
self.empty()
|
self.empty()
|
||||||
self.extend(self._BUF_[self._OFF_:self._OFF_ + Size])
|
self.extend(self._BUF_[self._OFF_:self._OFF_+Size])
|
||||||
|
|
||||||
# traverse the FFS
|
# traverse the FFS
|
||||||
EndOfFv = Size
|
EndOfFv = Size
|
||||||
@ -740,9 +743,10 @@ class GuidDefinedImage(Image):
|
|||||||
SectionList.append(Sec)
|
SectionList.append(Sec)
|
||||||
elif Guid == self.TIANO_COMPRESS_GUID:
|
elif Guid == self.TIANO_COMPRESS_GUID:
|
||||||
try:
|
try:
|
||||||
|
from . import EfiCompressor
|
||||||
# skip the header
|
# skip the header
|
||||||
Offset = self.DataOffset - 4
|
Offset = self.DataOffset - 4
|
||||||
TmpData = DeCompress('Framework', self[self.Offset:])
|
TmpData = EfiCompressor.FrameworkDecompress(self[Offset:], len(self)-Offset)
|
||||||
DecData = array('B')
|
DecData = array('B')
|
||||||
DecData.fromstring(TmpData)
|
DecData.fromstring(TmpData)
|
||||||
Offset = 0
|
Offset = 0
|
||||||
@ -760,10 +764,10 @@ class GuidDefinedImage(Image):
|
|||||||
pass
|
pass
|
||||||
elif Guid == self.LZMA_COMPRESS_GUID:
|
elif Guid == self.LZMA_COMPRESS_GUID:
|
||||||
try:
|
try:
|
||||||
|
from . import LzmaCompressor
|
||||||
# skip the header
|
# skip the header
|
||||||
Offset = self.DataOffset - 4
|
Offset = self.DataOffset - 4
|
||||||
|
TmpData = LzmaCompressor.LzmaDecompress(self[Offset:], len(self)-Offset)
|
||||||
TmpData = DeCompress('Lzma', self[self.Offset:])
|
|
||||||
DecData = array('B')
|
DecData = array('B')
|
||||||
DecData.fromstring(TmpData)
|
DecData.fromstring(TmpData)
|
||||||
Offset = 0
|
Offset = 0
|
||||||
@ -844,7 +848,7 @@ class Section(Image):
|
|||||||
SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size)
|
SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size)
|
||||||
else:
|
else:
|
||||||
SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size)
|
SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size)
|
||||||
for Offset in self._SubImages.keys():
|
for Offset in self._SubImages:
|
||||||
SectionInfo += ", " + str(self._SubImages[Offset])
|
SectionInfo += ", " + str(self._SubImages[Offset])
|
||||||
gIndention -= 4
|
gIndention -= 4
|
||||||
return SectionInfo
|
return SectionInfo
|
||||||
@ -978,7 +982,7 @@ class Ffs(Image):
|
|||||||
FfsInfo = Indention
|
FfsInfo = Indention
|
||||||
FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \
|
FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \
|
||||||
(Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment)
|
(Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment)
|
||||||
SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()])
|
SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections])
|
||||||
gIndention -= 4
|
gIndention -= 4
|
||||||
return FfsInfo + SectionInfo + "\n"
|
return FfsInfo + SectionInfo + "\n"
|
||||||
|
|
||||||
@ -1083,6 +1087,379 @@ class Ffs(Image):
|
|||||||
Alignment = property(_GetAlignment)
|
Alignment = property(_GetAlignment)
|
||||||
State = property(_GetState, _SetState)
|
State = property(_GetState, _SetState)
|
||||||
|
|
||||||
|
## FirmwareVolume() class
|
||||||
|
#
|
||||||
|
# A class for Firmware Volume
|
||||||
|
#
|
||||||
|
class FirmwareVolume(Image):
|
||||||
|
# Read FvLength, Attributes, HeaderLength, Checksum
|
||||||
|
_HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H")
|
||||||
|
_HEADER_SIZE_ = _HEADER_.size
|
||||||
|
|
||||||
|
_FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3"
|
||||||
|
|
||||||
|
_GUID_ = struct.Struct("16x 1I2H8B")
|
||||||
|
_LENGTH_ = struct.Struct("16x 16x 1Q")
|
||||||
|
_SIG_ = struct.Struct("16x 16x 8x 1I")
|
||||||
|
_ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
|
||||||
|
_HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
|
||||||
|
_CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
|
||||||
|
|
||||||
|
def __init__(self, Name=''):
|
||||||
|
Image.__init__(self)
|
||||||
|
self.Name = Name
|
||||||
|
self.FfsDict = sdict()
|
||||||
|
self.OrderedFfsDict = sdict()
|
||||||
|
self.UnDispatchedFfsDict = sdict()
|
||||||
|
self.ProtocolList = sdict()
|
||||||
|
|
||||||
|
def CheckArchProtocol(self):
|
||||||
|
for Item in EotGlobalData.gArchProtocolGuids:
|
||||||
|
if Item.lower() not in EotGlobalData.gProtocolList:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def ParseDepex(self, Depex, Type):
|
||||||
|
List = None
|
||||||
|
if Type == 'Ppi':
|
||||||
|
List = EotGlobalData.gPpiList
|
||||||
|
if Type == 'Protocol':
|
||||||
|
List = EotGlobalData.gProtocolList
|
||||||
|
DepexStack = []
|
||||||
|
DepexList = []
|
||||||
|
DepexString = ''
|
||||||
|
FileDepex = None
|
||||||
|
CouldBeLoaded = True
|
||||||
|
for Index in range(0, len(Depex.Expression)):
|
||||||
|
Item = Depex.Expression[Index]
|
||||||
|
if Item == 0x00:
|
||||||
|
Index = Index + 1
|
||||||
|
Guid = gGuidStringFormat % Depex.Expression[Index]
|
||||||
|
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
|
||||||
|
return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE'])
|
||||||
|
elif Item == 0x01:
|
||||||
|
Index = Index + 1
|
||||||
|
Guid = gGuidStringFormat % Depex.Expression[Index]
|
||||||
|
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
|
||||||
|
return (True, 'AFTER %s' % Guid, [Guid, 'AFTER'])
|
||||||
|
elif Item == 0x02:
|
||||||
|
Index = Index + 1
|
||||||
|
Guid = gGuidStringFormat % Depex.Expression[Index]
|
||||||
|
if Guid.lower() in List:
|
||||||
|
DepexStack.append(True)
|
||||||
|
DepexList.append(Guid)
|
||||||
|
else:
|
||||||
|
DepexStack.append(False)
|
||||||
|
DepexList.append(Guid)
|
||||||
|
continue
|
||||||
|
elif Item == 0x03 or Item == 0x04:
|
||||||
|
DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
|
||||||
|
DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop()))
|
||||||
|
elif Item == 0x05:
|
||||||
|
DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
|
||||||
|
DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop()))
|
||||||
|
elif Item == 0x06:
|
||||||
|
DepexStack.append(True)
|
||||||
|
DepexList.append('TRUE')
|
||||||
|
DepexString = DepexString + 'TRUE' + ' '
|
||||||
|
elif Item == 0x07:
|
||||||
|
DepexStack.append(False)
|
||||||
|
DepexList.append('False')
|
||||||
|
DepexString = DepexString + 'FALSE' + ' '
|
||||||
|
elif Item == 0x08:
|
||||||
|
if Index != len(Depex.Expression) - 1:
|
||||||
|
CouldBeLoaded = False
|
||||||
|
else:
|
||||||
|
CouldBeLoaded = DepexStack.pop()
|
||||||
|
else:
|
||||||
|
CouldBeLoaded = False
|
||||||
|
if DepexList != []:
|
||||||
|
DepexString = DepexList[0].strip()
|
||||||
|
return (CouldBeLoaded, DepexString, FileDepex)
|
||||||
|
|
||||||
|
def Dispatch(self, Db = None):
|
||||||
|
if Db is None:
|
||||||
|
return False
|
||||||
|
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
|
||||||
|
# Find PeiCore, DexCore, PeiPriori, DxePriori first
|
||||||
|
FfsSecCoreGuid = None
|
||||||
|
FfsPeiCoreGuid = None
|
||||||
|
FfsDxeCoreGuid = None
|
||||||
|
FfsPeiPrioriGuid = None
|
||||||
|
FfsDxePrioriGuid = None
|
||||||
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
|
Ffs = self.UnDispatchedFfsDict[FfsID]
|
||||||
|
if Ffs.Type == 0x03:
|
||||||
|
FfsSecCoreGuid = FfsID
|
||||||
|
continue
|
||||||
|
if Ffs.Type == 0x04:
|
||||||
|
FfsPeiCoreGuid = FfsID
|
||||||
|
continue
|
||||||
|
if Ffs.Type == 0x05:
|
||||||
|
FfsDxeCoreGuid = FfsID
|
||||||
|
continue
|
||||||
|
if Ffs.Guid.lower() == gPeiAprioriFileNameGuid:
|
||||||
|
FfsPeiPrioriGuid = FfsID
|
||||||
|
continue
|
||||||
|
if Ffs.Guid.lower() == gAprioriGuid:
|
||||||
|
FfsDxePrioriGuid = FfsID
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Parse SEC_CORE first
|
||||||
|
if FfsSecCoreGuid is not None:
|
||||||
|
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
|
||||||
|
self.LoadPpi(Db, FfsSecCoreGuid)
|
||||||
|
|
||||||
|
# Parse PEI first
|
||||||
|
if FfsPeiCoreGuid is not None:
|
||||||
|
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
|
||||||
|
self.LoadPpi(Db, FfsPeiCoreGuid)
|
||||||
|
if FfsPeiPrioriGuid is not None:
|
||||||
|
# Load PEIM described in priori file
|
||||||
|
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
|
||||||
|
if len(FfsPeiPriori.Sections) == 1:
|
||||||
|
Section = FfsPeiPriori.Sections.popitem()[1]
|
||||||
|
if Section.Type == 0x19:
|
||||||
|
GuidStruct = struct.Struct('1I2H8B')
|
||||||
|
Start = 4
|
||||||
|
while len(Section) > Start:
|
||||||
|
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
|
||||||
|
GuidString = gGuidStringFormat % Guid
|
||||||
|
Start = Start + 16
|
||||||
|
if GuidString in self.UnDispatchedFfsDict:
|
||||||
|
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
||||||
|
self.LoadPpi(Db, GuidString)
|
||||||
|
|
||||||
|
self.DisPatchPei(Db)
|
||||||
|
|
||||||
|
# Parse DXE then
|
||||||
|
if FfsDxeCoreGuid is not None:
|
||||||
|
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
|
||||||
|
self.LoadProtocol(Db, FfsDxeCoreGuid)
|
||||||
|
if FfsDxePrioriGuid is not None:
|
||||||
|
# Load PEIM described in priori file
|
||||||
|
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
|
||||||
|
if len(FfsDxePriori.Sections) == 1:
|
||||||
|
Section = FfsDxePriori.Sections.popitem()[1]
|
||||||
|
if Section.Type == 0x19:
|
||||||
|
GuidStruct = struct.Struct('1I2H8B')
|
||||||
|
Start = 4
|
||||||
|
while len(Section) > Start:
|
||||||
|
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
|
||||||
|
GuidString = gGuidStringFormat % Guid
|
||||||
|
Start = Start + 16
|
||||||
|
if GuidString in self.UnDispatchedFfsDict:
|
||||||
|
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
|
||||||
|
self.LoadProtocol(Db, GuidString)
|
||||||
|
|
||||||
|
self.DisPatchDxe(Db)
|
||||||
|
|
||||||
|
def LoadProtocol(self, Db, ModuleGuid):
|
||||||
|
SqlCommand = """select GuidValue from Report
|
||||||
|
where SourceFileFullPath in
|
||||||
|
(select Value1 from Inf where BelongsToFile =
|
||||||
|
(select BelongsToFile from Inf
|
||||||
|
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
|
||||||
|
and Model = %s)
|
||||||
|
and ItemType = 'Protocol' and ItemMode = 'Produced'""" \
|
||||||
|
% (ModuleGuid, 5001, 3007)
|
||||||
|
RecordSet = Db.TblReport.Exec(SqlCommand)
|
||||||
|
for Record in RecordSet:
|
||||||
|
SqlCommand = """select Value2 from Inf where BelongsToFile =
|
||||||
|
(select DISTINCT BelongsToFile from Inf
|
||||||
|
where Value1 =
|
||||||
|
(select SourceFileFullPath from Report
|
||||||
|
where GuidValue like '%s' and ItemMode = 'Callback'))
|
||||||
|
and Value1 = 'FILE_GUID'""" % Record[0]
|
||||||
|
CallBackSet = Db.TblReport.Exec(SqlCommand)
|
||||||
|
if CallBackSet != []:
|
||||||
|
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
|
||||||
|
else:
|
||||||
|
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
|
||||||
|
|
||||||
|
def LoadPpi(self, Db, ModuleGuid):
|
||||||
|
SqlCommand = """select GuidValue from Report
|
||||||
|
where SourceFileFullPath in
|
||||||
|
(select Value1 from Inf where BelongsToFile =
|
||||||
|
(select BelongsToFile from Inf
|
||||||
|
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
|
||||||
|
and Model = %s)
|
||||||
|
and ItemType = 'Ppi' and ItemMode = 'Produced'""" \
|
||||||
|
% (ModuleGuid, 5001, 3007)
|
||||||
|
RecordSet = Db.TblReport.Exec(SqlCommand)
|
||||||
|
for Record in RecordSet:
|
||||||
|
EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid
|
||||||
|
|
||||||
|
def DisPatchDxe(self, Db):
|
||||||
|
IsInstalled = False
|
||||||
|
ScheduleList = sdict()
|
||||||
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
|
CouldBeLoaded = False
|
||||||
|
DepexString = ''
|
||||||
|
FileDepex = None
|
||||||
|
Ffs = self.UnDispatchedFfsDict[FfsID]
|
||||||
|
if Ffs.Type == 0x07:
|
||||||
|
# Get Depex
|
||||||
|
IsFoundDepex = False
|
||||||
|
for Section in Ffs.Sections.values():
|
||||||
|
# Find Depex
|
||||||
|
if Section.Type == 0x13:
|
||||||
|
IsFoundDepex = True
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol')
|
||||||
|
break
|
||||||
|
if Section.Type == 0x01:
|
||||||
|
CompressSections = Section._SubImages[4]
|
||||||
|
for CompressSection in CompressSections.Sections:
|
||||||
|
if CompressSection.Type == 0x13:
|
||||||
|
IsFoundDepex = True
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol')
|
||||||
|
break
|
||||||
|
if CompressSection.Type == 0x02:
|
||||||
|
NewSections = CompressSection._SubImages[4]
|
||||||
|
for NewSection in NewSections.Sections:
|
||||||
|
if NewSection.Type == 0x13:
|
||||||
|
IsFoundDepex = True
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol')
|
||||||
|
break
|
||||||
|
|
||||||
|
# Not find Depex
|
||||||
|
if not IsFoundDepex:
|
||||||
|
CouldBeLoaded = self.CheckArchProtocol()
|
||||||
|
DepexString = ''
|
||||||
|
FileDepex = None
|
||||||
|
|
||||||
|
# Append New Ffs
|
||||||
|
if CouldBeLoaded:
|
||||||
|
IsInstalled = True
|
||||||
|
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
|
||||||
|
NewFfs.Depex = DepexString
|
||||||
|
if FileDepex is not None:
|
||||||
|
ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
|
||||||
|
else:
|
||||||
|
ScheduleList[FfsID] = NewFfs
|
||||||
|
else:
|
||||||
|
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
|
||||||
|
|
||||||
|
for FfsID in ScheduleList:
|
||||||
|
NewFfs = ScheduleList.pop(FfsID)
|
||||||
|
FfsName = 'UnKnown'
|
||||||
|
self.OrderedFfsDict[FfsID] = NewFfs
|
||||||
|
self.LoadProtocol(Db, FfsID)
|
||||||
|
|
||||||
|
SqlCommand = """select Value2 from Inf
|
||||||
|
where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
|
||||||
|
and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001)
|
||||||
|
RecordSet = Db.TblReport.Exec(SqlCommand)
|
||||||
|
if RecordSet != []:
|
||||||
|
FfsName = RecordSet[0][0]
|
||||||
|
|
||||||
|
if IsInstalled:
|
||||||
|
self.DisPatchDxe(Db)
|
||||||
|
|
||||||
|
def DisPatchPei(self, Db):
|
||||||
|
IsInstalled = False
|
||||||
|
for FfsID in self.UnDispatchedFfsDict:
|
||||||
|
CouldBeLoaded = True
|
||||||
|
DepexString = ''
|
||||||
|
FileDepex = None
|
||||||
|
Ffs = self.UnDispatchedFfsDict[FfsID]
|
||||||
|
if Ffs.Type == 0x06 or Ffs.Type == 0x08:
|
||||||
|
# Get Depex
|
||||||
|
for Section in Ffs.Sections.values():
|
||||||
|
if Section.Type == 0x1B:
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
|
||||||
|
break
|
||||||
|
if Section.Type == 0x01:
|
||||||
|
CompressSections = Section._SubImages[4]
|
||||||
|
for CompressSection in CompressSections.Sections:
|
||||||
|
if CompressSection.Type == 0x1B:
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi')
|
||||||
|
break
|
||||||
|
if CompressSection.Type == 0x02:
|
||||||
|
NewSections = CompressSection._SubImages[4]
|
||||||
|
for NewSection in NewSections.Sections:
|
||||||
|
if NewSection.Type == 0x1B:
|
||||||
|
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi')
|
||||||
|
break
|
||||||
|
|
||||||
|
# Append New Ffs
|
||||||
|
if CouldBeLoaded:
|
||||||
|
IsInstalled = True
|
||||||
|
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
|
||||||
|
NewFfs.Depex = DepexString
|
||||||
|
self.OrderedFfsDict[FfsID] = NewFfs
|
||||||
|
self.LoadPpi(Db, FfsID)
|
||||||
|
else:
|
||||||
|
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
|
||||||
|
|
||||||
|
if IsInstalled:
|
||||||
|
self.DisPatchPei(Db)
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
global gIndention
|
||||||
|
gIndention += 4
|
||||||
|
FvInfo = '\n' + ' ' * gIndention
|
||||||
|
FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
|
||||||
|
FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict])
|
||||||
|
gIndention -= 4
|
||||||
|
return FvInfo + FfsInfo
|
||||||
|
|
||||||
|
def _Unpack(self):
|
||||||
|
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
|
||||||
|
self.empty()
|
||||||
|
self.extend(self._BUF_[self._OFF_:self._OFF_+Size])
|
||||||
|
|
||||||
|
# traverse the FFS
|
||||||
|
EndOfFv = Size
|
||||||
|
FfsStartAddress = self.HeaderSize
|
||||||
|
LastFfsObj = None
|
||||||
|
while FfsStartAddress < EndOfFv:
|
||||||
|
FfsObj = Ffs()
|
||||||
|
FfsObj.frombuffer(self, FfsStartAddress)
|
||||||
|
FfsId = repr(FfsObj)
|
||||||
|
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
|
||||||
|
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
|
||||||
|
if LastFfsObj is not None:
|
||||||
|
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
|
||||||
|
else:
|
||||||
|
if FfsId in self.FfsDict:
|
||||||
|
EdkLogger.error("FV", 0, "Duplicate GUID in FFS",
|
||||||
|
ExtraData="\t%s @ %s\n\t%s @ %s" \
|
||||||
|
% (FfsObj.Guid, FfsObj.Offset,
|
||||||
|
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
|
||||||
|
self.FfsDict[FfsId] = FfsObj
|
||||||
|
if LastFfsObj is not None:
|
||||||
|
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
|
||||||
|
|
||||||
|
FfsStartAddress += len(FfsObj)
|
||||||
|
#
|
||||||
|
# align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1))
|
||||||
|
# The next FFS must be at the latest next 8-byte aligned address
|
||||||
|
#
|
||||||
|
FfsStartAddress = (FfsStartAddress + 7) & (~7)
|
||||||
|
LastFfsObj = FfsObj
|
||||||
|
|
||||||
|
def _GetAttributes(self):
|
||||||
|
return self.GetField(self._ATTR_, 0)[0]
|
||||||
|
|
||||||
|
def _GetSize(self):
|
||||||
|
return self.GetField(self._LENGTH_, 0)[0]
|
||||||
|
|
||||||
|
def _GetChecksum(self):
|
||||||
|
return self.GetField(self._CHECKSUM_, 0)[0]
|
||||||
|
|
||||||
|
def _GetHeaderLength(self):
|
||||||
|
return self.GetField(self._HLEN_, 0)[0]
|
||||||
|
|
||||||
|
def _GetFileSystemGuid(self):
|
||||||
|
return gGuidStringFormat % self.GetField(self._GUID_, 0)
|
||||||
|
|
||||||
|
Attributes = property(_GetAttributes)
|
||||||
|
Size = property(_GetSize)
|
||||||
|
Checksum = property(_GetChecksum)
|
||||||
|
HeaderSize = property(_GetHeaderLength)
|
||||||
|
FileSystemGuid = property(_GetFileSystemGuid)
|
||||||
|
|
||||||
## MultipleFv() class
|
## MultipleFv() class
|
||||||
#
|
#
|
||||||
@ -1093,9 +1470,7 @@ class MultipleFv(FirmwareVolume):
|
|||||||
FirmwareVolume.__init__(self)
|
FirmwareVolume.__init__(self)
|
||||||
self.BasicInfo = []
|
self.BasicInfo = []
|
||||||
for FvPath in FvList:
|
for FvPath in FvList:
|
||||||
Fd = None
|
|
||||||
FvName = os.path.splitext(os.path.split(FvPath)[1])[0]
|
FvName = os.path.splitext(os.path.split(FvPath)[1])[0]
|
||||||
if FvPath.strip():
|
|
||||||
Fd = open(FvPath, 'rb')
|
Fd = open(FvPath, 'rb')
|
||||||
Buf = array('B')
|
Buf = array('B')
|
||||||
try:
|
try:
|
||||||
@ -1257,7 +1632,6 @@ class Eot(object):
|
|||||||
Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList)
|
Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList)
|
||||||
if os.path.isfile(Path):
|
if os.path.isfile(Path):
|
||||||
for Line in open(Path):
|
for Line in open(Path):
|
||||||
if Line.strip():
|
|
||||||
(GuidName, GuidValue) = Line.split()
|
(GuidName, GuidValue) = Line.split()
|
||||||
EotGlobalData.gGuidDict[GuidName] = GuidValue
|
EotGlobalData.gGuidDict[GuidName] = GuidValue
|
||||||
|
|
||||||
@ -1320,7 +1694,7 @@ class Eot(object):
|
|||||||
mCurrentSourceFileList = []
|
mCurrentSourceFileList = []
|
||||||
|
|
||||||
if SourceFileList:
|
if SourceFileList:
|
||||||
sfl = open(SourceFileList, 'r')
|
sfl = open(SourceFileList, 'rb')
|
||||||
for line in sfl:
|
for line in sfl:
|
||||||
line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
|
line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
|
||||||
if line[-2:].upper() == '.C' or line[-2:].upper() == '.H':
|
if line[-2:].upper() == '.C' or line[-2:].upper() == '.H':
|
||||||
@ -1596,8 +1970,6 @@ class Eot(object):
|
|||||||
def BuildMetaDataFileDatabase(self, Inf_Files):
|
def BuildMetaDataFileDatabase(self, Inf_Files):
|
||||||
EdkLogger.quiet("Building database for meta data files ...")
|
EdkLogger.quiet("Building database for meta data files ...")
|
||||||
for InfFile in Inf_Files:
|
for InfFile in Inf_Files:
|
||||||
if not InfFile:
|
|
||||||
continue
|
|
||||||
EdkLogger.quiet("Parsing %s ..." % str(InfFile))
|
EdkLogger.quiet("Parsing %s ..." % str(InfFile))
|
||||||
EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '')
|
EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '')
|
||||||
|
|
||||||
@ -1711,10 +2083,7 @@ if __name__ == '__main__':
|
|||||||
EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
|
EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
|
||||||
|
|
||||||
StartTime = time.clock()
|
StartTime = time.clock()
|
||||||
Eot = Eot(CommandLineOption=False,
|
Eot = Eot()
|
||||||
SourceFileList=r'C:\TestEot\Source.txt',
|
|
||||||
GuidList=r'C:\TestEot\Guid.txt',
|
|
||||||
FvFileList=r'C:\TestEot\FVRECOVERY.Fv')
|
|
||||||
FinishTime = time.clock()
|
FinishTime = time.clock()
|
||||||
|
|
||||||
BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
|
BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
|
@ -22,8 +22,8 @@ from Common.DataType import *
|
|||||||
from CommonDataClass.DataClass import *
|
from CommonDataClass.DataClass import *
|
||||||
from Common.Identification import *
|
from Common.Identification import *
|
||||||
from Common.StringUtils import *
|
from Common.StringUtils import *
|
||||||
from Eot.Parser import *
|
from .Parser import *
|
||||||
from Eot import Database
|
from . import Database
|
||||||
|
|
||||||
## EdkInfParser() class
|
## EdkInfParser() class
|
||||||
#
|
#
|
||||||
@ -153,3 +153,21 @@ class EdkInfParser(object):
|
|||||||
self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
|
self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
|
||||||
#End of For
|
#End of For
|
||||||
|
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# This acts like the main() function for the script, unless it is 'import'ed into another
|
||||||
|
# script.
|
||||||
|
#
|
||||||
|
if __name__ == '__main__':
|
||||||
|
EdkLogger.Initialize()
|
||||||
|
EdkLogger.SetLevel(EdkLogger.QUIET)
|
||||||
|
|
||||||
|
Db = Database.Database('Inf.db')
|
||||||
|
Db.InitDatabase()
|
||||||
|
P = EdkInfParser(os.path.normpath("C:\Framework\Edk\Sample\Platform\Nt32\Dxe\PlatformBds\PlatformBds.inf"), Db, '', '')
|
||||||
|
for Inf in P.Sources:
|
||||||
|
print(Inf)
|
||||||
|
for Item in P.Macros:
|
||||||
|
print(Item, P.Macros[Item])
|
||||||
|
|
||||||
|
Db.Close()
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# This file is used to define common parsing related functions used in parsing
|
# This file is used to define common parsing related functions used in parsing
|
||||||
# Inf/Dsc/Makefile process
|
# Inf/Dsc/Makefile process
|
||||||
#
|
#
|
||||||
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
|
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
|
||||||
# This program and the accompanying materials
|
# This program and the accompanying materials
|
||||||
# are licensed and made available under the terms and conditions of the BSD License
|
# are licensed and made available under the terms and conditions of the BSD License
|
||||||
# which accompanies this distribution. The full text of the license may be found at
|
# which accompanies this distribution. The full text of the license may be found at
|
||||||
@ -25,32 +25,6 @@ from . import EotGlobalData
|
|||||||
from Common.StringUtils import GetSplitList
|
from Common.StringUtils import GetSplitList
|
||||||
from Common.LongFilePathSupport import OpenLongFilePath as open
|
from Common.LongFilePathSupport import OpenLongFilePath as open
|
||||||
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
## DeCompress
|
|
||||||
#
|
|
||||||
# Call external decompress tool to decompress the fv section
|
|
||||||
#
|
|
||||||
def DeCompress(Method, Input):
|
|
||||||
# Write the input to a temp file
|
|
||||||
open('_Temp.bin', 'wb').write(Input)
|
|
||||||
cmd = ''
|
|
||||||
if Method == 'Lzma':
|
|
||||||
cmd = r'LzmaCompress -o _New.bin -d _Temp.bin'
|
|
||||||
if Method == 'Efi':
|
|
||||||
cmd = r'TianoCompress -d --uefi -o _New.bin _Temp.bin'
|
|
||||||
if Method == 'Framework':
|
|
||||||
cmd = r'TianoCompress -d -o _New.bin _Temp.bin'
|
|
||||||
|
|
||||||
# Call tool to create the decompressed output file
|
|
||||||
Process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
Process.communicate()[0]
|
|
||||||
|
|
||||||
# Return the beffer of New.bin
|
|
||||||
if os.path.exists('New.bin'):
|
|
||||||
return open('New.bin', 'rb').read()
|
|
||||||
|
|
||||||
|
|
||||||
## PreProcess() method
|
## PreProcess() method
|
||||||
#
|
#
|
||||||
# Pre process a file
|
# Pre process a file
|
||||||
|
@ -77,7 +77,7 @@ class Report(object):
|
|||||||
def GenerateUnDispatchedList(self):
|
def GenerateUnDispatchedList(self):
|
||||||
FvObj = self.FvObj
|
FvObj = self.FvObj
|
||||||
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.Name)
|
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.Name)
|
||||||
for Item in FvObj.UnDispatchedFfsDict.keys():
|
for Item in FvObj.UnDispatchedFfsDict:
|
||||||
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.UnDispatchedFfsDict[Item])
|
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.UnDispatchedFfsDict[Item])
|
||||||
|
|
||||||
## GenerateFv() method
|
## GenerateFv() method
|
||||||
@ -112,7 +112,7 @@ class Report(object):
|
|||||||
self.WriteLn(Content)
|
self.WriteLn(Content)
|
||||||
|
|
||||||
EotGlobalData.gOP_DISPATCH_ORDER.write('Dispatched:\n')
|
EotGlobalData.gOP_DISPATCH_ORDER.write('Dispatched:\n')
|
||||||
for FfsId in FvObj.OrderedFfsDict.keys():
|
for FfsId in FvObj.OrderedFfsDict:
|
||||||
self.GenerateFfs(FvObj.OrderedFfsDict[FfsId])
|
self.GenerateFfs(FvObj.OrderedFfsDict[FfsId])
|
||||||
Content = """ </table></td>
|
Content = """ </table></td>
|
||||||
</tr>"""
|
</tr>"""
|
||||||
@ -125,7 +125,7 @@ class Report(object):
|
|||||||
self.WriteLn(Content)
|
self.WriteLn(Content)
|
||||||
|
|
||||||
EotGlobalData.gOP_DISPATCH_ORDER.write('\nUnDispatched:\n')
|
EotGlobalData.gOP_DISPATCH_ORDER.write('\nUnDispatched:\n')
|
||||||
for FfsId in FvObj.UnDispatchedFfsDict.keys():
|
for FfsId in FvObj.UnDispatchedFfsDict:
|
||||||
self.GenerateFfs(FvObj.UnDispatchedFfsDict[FfsId])
|
self.GenerateFfs(FvObj.UnDispatchedFfsDict[FfsId])
|
||||||
Content = """ </table></td>
|
Content = """ </table></td>
|
||||||
</tr>"""
|
</tr>"""
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
from struct import *
|
from struct import *
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
@ -51,7 +52,7 @@ class AprioriSection (AprioriSectionClassObject):
|
|||||||
def GenFfs (self, FvName, Dict = {}, IsMakefile = False):
|
def GenFfs (self, FvName, Dict = {}, IsMakefile = False):
|
||||||
DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
|
DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
|
||||||
PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
|
PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
|
||||||
Buffer = BytesIO()
|
Buffer = BytesIO('')
|
||||||
AprioriFileGuid = DXE_GUID
|
AprioriFileGuid = DXE_GUID
|
||||||
if self.AprioriType == "PEI":
|
if self.AprioriType == "PEI":
|
||||||
AprioriFileGuid = PEI_GUID
|
AprioriFileGuid = PEI_GUID
|
||||||
@ -96,7 +97,7 @@ class AprioriSection (AprioriSectionClassObject):
|
|||||||
|
|
||||||
|
|
||||||
GuidPart = Guid.split('-')
|
GuidPart = Guid.split('-')
|
||||||
Buffer.write(pack('I', int(GuidPart[0], 16)))
|
Buffer.write(pack('I', long(GuidPart[0], 16)))
|
||||||
Buffer.write(pack('H', int(GuidPart[1], 16)))
|
Buffer.write(pack('H', int(GuidPart[1], 16)))
|
||||||
Buffer.write(pack('H', int(GuidPart[2], 16)))
|
Buffer.write(pack('H', int(GuidPart[2], 16)))
|
||||||
|
|
||||||
|
@ -15,13 +15,13 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
from .GenFdsGlobalVariable import GenFdsGlobalVariable
|
from .GenFdsGlobalVariable import GenFdsGlobalVariable
|
||||||
from .GenFdsGlobalVariable import FindExtendTool
|
from .GenFdsGlobalVariable import FindExtendTool
|
||||||
from CommonDataClass.FdfClass import CapsuleClassObject
|
from CommonDataClass.FdfClass import CapsuleClassObject
|
||||||
import Common.LongFilePathOs as os
|
import Common.LongFilePathOs as os
|
||||||
import subprocess
|
import subprocess
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from io import StringIO
|
|
||||||
from Common.Misc import SaveFileOnChange
|
from Common.Misc import SaveFileOnChange
|
||||||
from Common.Misc import PackRegistryFormatGuid
|
from Common.Misc import PackRegistryFormatGuid
|
||||||
import uuid
|
import uuid
|
||||||
@ -185,7 +185,7 @@ class Capsule (CapsuleClassObject) :
|
|||||||
#
|
#
|
||||||
# The real capsule header structure is 28 bytes
|
# The real capsule header structure is 28 bytes
|
||||||
#
|
#
|
||||||
Header.write(b'\x00'*(HdrSize-28))
|
Header.write('\x00'*(HdrSize-28))
|
||||||
Header.write(FwMgrHdr.getvalue())
|
Header.write(FwMgrHdr.getvalue())
|
||||||
Header.write(Content.getvalue())
|
Header.write(Content.getvalue())
|
||||||
#
|
#
|
||||||
@ -247,7 +247,7 @@ class Capsule (CapsuleClassObject) :
|
|||||||
def GenCapInf(self):
|
def GenCapInf(self):
|
||||||
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
|
self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
|
||||||
self.UiCapsuleName + "_Cap" + '.inf')
|
self.UiCapsuleName + "_Cap" + '.inf')
|
||||||
CapInfFile = StringIO() #open (self.CapInfFileName , 'w+')
|
CapInfFile = BytesIO() #open (self.CapInfFileName , 'w+')
|
||||||
|
|
||||||
CapInfFile.writelines("[options]" + T_CHAR_LF)
|
CapInfFile.writelines("[options]" + T_CHAR_LF)
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
##
|
##
|
||||||
# Import Modules
|
# Import Modules
|
||||||
#
|
#
|
||||||
|
from __future__ import absolute_import
|
||||||
from . import Ffs
|
from . import Ffs
|
||||||
from .GenFdsGlobalVariable import GenFdsGlobalVariable
|
from .GenFdsGlobalVariable import GenFdsGlobalVariable
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
@ -82,7 +83,7 @@ class CapsuleFv (CapsuleData):
|
|||||||
if self.FvName.find('.fv') == -1:
|
if self.FvName.find('.fv') == -1:
|
||||||
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
|
if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
|
||||||
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
|
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
|
||||||
FdBuffer = BytesIO()
|
FdBuffer = BytesIO('')
|
||||||
FvObj.CapsuleName = self.CapsuleName
|
FvObj.CapsuleName = self.CapsuleName
|
||||||
FvFile = FvObj.AddToBuffer(FdBuffer)
|
FvFile = FvObj.AddToBuffer(FdBuffer)
|
||||||
FvObj.CapsuleName = None
|
FvObj.CapsuleName = None
|
||||||
@ -229,7 +230,7 @@ class CapsulePayload(CapsuleData):
|
|||||||
)
|
)
|
||||||
if AuthData:
|
if AuthData:
|
||||||
Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
|
Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
|
||||||
Buffer += uuid.UUID(AuthData[4]).bytes_le
|
Buffer += uuid.UUID(AuthData[4]).get_bytes_le()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Append file content to the structure
|
# Append file content to the structure
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user