cpu/x86/64bit/mode_switch: Simplify assembly code

Drop the first argument specifying the number of arguments pushed
to the stack. Instead always push the 3 arguments to stack and use
the first one as function pointer to call while in protected mode.

While on it add more comments and simplify register restore code.

Tested:
- On qemu can call x86_32 function and pass argument and return
  value.
- Booted Lenovo X220 in x86_64 mode using x86_32 MRC.

Change-Id: I30809453a1800ba3c0df60acd7eca778841c520f
Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com>
Reviewed-on: https://review.coreboot.org/c/coreboot/+/79752
Reviewed-by: Arthur Heymans <arthur@aheymans.xyz>
Reviewed-by: Jérémy Compostella <jeremy.compostella@intel.com>
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
This commit is contained in:
Patrick Rudolph
2023-12-28 07:44:26 +01:00
committed by Lean Sheng Tan
parent b14b96d29a
commit b4283a4fbb
2 changed files with 45 additions and 49 deletions

View File

@@ -3,10 +3,18 @@
#include <stdint.h> #include <stdint.h>
#if ENV_X86_64 #if ENV_X86_64
int protected_mode_call_narg(uint32_t arg_count, /*
uint32_t func_ptr, * Assembly code that drops into protected mode and calls the function
* specified as first argument, which must have been compiled for x86_32.
* After the function returns it enters long mode again.
* The function pointer destination must be below 4GiB in physical memory.
*
* The called function has 0-3 arguments and returns an int.
*/
int protected_mode_call_3arg(uint32_t func_ptr,
uint32_t opt_arg1, uint32_t opt_arg1,
uint32_t opt_arg2); uint32_t opt_arg2,
uint32_t opt_arg3);
/* /*
* Drops into protected mode and calls the function, which must have been compiled for x86_32. * Drops into protected mode and calls the function, which must have been compiled for x86_32.
@@ -17,7 +25,7 @@ int protected_mode_call_narg(uint32_t arg_count,
*/ */
static inline int protected_mode_call(void *func) static inline int protected_mode_call(void *func)
{ {
return protected_mode_call_narg(0, (uintptr_t)func, 0, 0); return protected_mode_call_3arg((uintptr_t)func, 0, 0, 0);
} }
/* /*
@@ -30,7 +38,7 @@ static inline int protected_mode_call(void *func)
*/ */
static inline int protected_mode_call_1arg(void *func, uint32_t arg1) static inline int protected_mode_call_1arg(void *func, uint32_t arg1)
{ {
return protected_mode_call_narg(1, (uintptr_t)func, arg1, 0); return protected_mode_call_3arg((uintptr_t)func, arg1, 0, 0);
} }
/* /*
@@ -43,7 +51,7 @@ static inline int protected_mode_call_1arg(void *func, uint32_t arg1)
*/ */
static inline int protected_mode_call_2arg(void *func, uint32_t arg1, uint32_t arg2) static inline int protected_mode_call_2arg(void *func, uint32_t arg1, uint32_t arg2)
{ {
return protected_mode_call_narg(2, (uintptr_t)func, arg1, arg2); return protected_mode_call_3arg((uintptr_t)func, arg1, arg2, 0);
} }
#else #else
static inline int protected_mode_call(void *func) static inline int protected_mode_call(void *func)

View File

@@ -1,14 +1,13 @@
/* SPDX-License-Identifier: GPL-2.0-only */ /* SPDX-License-Identifier: GPL-2.0-only */
/* Calls a x86_32 function from x86_64 context */
.text .text
.code64 .code64
.section ".text.protected_mode_call", "ax", @progbits .section ".text.protected_mode_call", "ax", @progbits
.globl protected_mode_call_narg .globl protected_mode_call_3arg
protected_mode_call_narg: protected_mode_call_3arg:
push %rbp
mov %rsp, %rbp
/* Preserve registers */ /* Preserve registers */
push %rbp
push %rbx push %rbx
push %r12 push %r12
push %r13 push %r13
@@ -19,58 +18,47 @@ protected_mode_call_narg:
movl %gs, %eax movl %gs, %eax
push %rax push %rax
/* Arguments to stack */ /* Store stack pointer */
push %rdi mov %rsp, %rbp
push %rsi
push %rdx
push %rcx
/* Align stack and make space for arguments */
movabs $0xfffffffffffffff0, %rax
andq %rax, %rsp
sub $16, %rsp
/* Arguments to stack */
movl %edi, 12(%rsp)
movl %esi, 0(%rsp)
movl %edx, 4(%rsp)
movl %ecx, 8(%rsp)
/* Drop to protected mode */
#include <cpu/x86/64bit/exit32.inc> #include <cpu/x86/64bit/exit32.inc>
movl -56(%ebp), %eax /* Argument count */ /* Fetch function to call */
movl -72(%ebp), %edx /* Argument 0 */ movl 12(%esp), %ebx
movl -80(%ebp), %ecx /* Argument 1 */
/* Align the stack */ /* Call function */
andl $0xFFFFFFF0, %esp
test %eax, %eax
je 1f /* Zero arguments */
subl $1, %eax
test %eax, %eax
je 2f /* One argument */
/* Two arguments */
subl $8, %esp
pushl %ecx /* Argument 1 */
pushl %edx /* Argument 0 */
jmp 1f
2:
subl $12, %esp
pushl %edx /* Argument 0 */
1:
movl -64(%ebp), %ebx /* Function to call */
call *%ebx call *%ebx
movl %eax, %ebx movl %eax, %ebx
/* Preserves ebx */ /* Jump to long mode. Preserves ebx */
#include <cpu/x86/64bit/entry64.inc> #include <cpu/x86/64bit/entry64.inc>
/* Place return value in rax */ /* Place return value in rax */
movl %ebx, %eax movl %ebx, %eax
/* Restore registers */
mov -48(%rbp), %rbx
movl %ebx, %gs
mov -40(%rbp), %r15
mov -32(%rbp), %r14
mov -24(%rbp), %r13
mov -16(%rbp), %r12
mov -8(%rbp), %rbx
/* Restore stack pointer */ /* Restore stack pointer */
mov %rbp, %rsp mov %rbp, %rsp
/* Restore registers */
pop %rbx
movl %ebx, %gs
pop %r15
pop %r14
pop %r13
pop %r12
pop %rbx
pop %rbp pop %rbp
ret ret