arch/x86: Use ENV_X86_64 instead of _x86_64_
Tested on Intel Sandybridge x86_64 and x86_32. Change-Id: I152483d24af0512c0ee4fbbe8931b7312e487ac6 Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/44867 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Angel Pons <th3fanbus@gmail.com>
This commit is contained in:
committed by
Patrick Georgi
parent
e85e7af6d0
commit
adcf7827bd
@@ -9,7 +9,7 @@
|
||||
* Clobbers: eax, ecx, edx
|
||||
*/
|
||||
|
||||
#if defined(__x86_64__)
|
||||
#if ENV_X86_64
|
||||
.code32
|
||||
#if (CONFIG_ARCH_X86_64_PGTBL_LOC & 0xfff) > 0
|
||||
#error pagetables must be 4KiB aligned!
|
||||
|
@@ -38,7 +38,7 @@ static int lowmem_backup_size;
|
||||
static inline void setup_secondary_gdt(void)
|
||||
{
|
||||
u16 *gdt_limit;
|
||||
#ifdef __x86_64__
|
||||
#if ENV_X86_64
|
||||
u64 *gdt_base;
|
||||
#else
|
||||
u32 *gdt_base;
|
||||
|
@@ -214,7 +214,7 @@ load_msr:
|
||||
mov %eax, %cr4
|
||||
#endif
|
||||
|
||||
#ifdef __x86_64__
|
||||
#if ENV_X86_64
|
||||
/* entry64.inc preserves ebx. */
|
||||
#include <cpu/x86/64bit/entry64.inc>
|
||||
|
||||
|
@@ -185,7 +185,7 @@ apicid_end:
|
||||
/* Align stack to 16 bytes. Another 32 bytes are pushed below. */
|
||||
andl $0xfffffff0, %esp
|
||||
|
||||
#ifdef __x86_64__
|
||||
#if ENV_X86_64
|
||||
mov %ecx, %edi
|
||||
/* Backup IA32_EFER. Preserves ebx. */
|
||||
movl $(IA32_EFER), %ecx
|
||||
@@ -204,7 +204,7 @@ apicid_end:
|
||||
* struct arg = { c_handler_params, cpu_num, smm_runtime, canary };
|
||||
* c_handler(&arg)
|
||||
*/
|
||||
#ifdef __x86_64__
|
||||
#if ENV_X86_64
|
||||
push %rbx /* uintptr_t *canary */
|
||||
push %rcx /* size_t cpu */
|
||||
|
||||
|
@@ -43,7 +43,7 @@
|
||||
|
||||
#define SMM_HANDLER_OFFSET 0x0000
|
||||
|
||||
#if defined(__x86_64__)
|
||||
#if ENV_X86_64
|
||||
.bss
|
||||
ia32efer_backup_eax:
|
||||
.long 0
|
||||
@@ -166,7 +166,7 @@ untampered_lapic:
|
||||
addl $SMM_STACK_SIZE, %ebx
|
||||
movl %ebx, %esp
|
||||
|
||||
#if defined(__x86_64__)
|
||||
#if ENV_X86_64
|
||||
/* Backup IA32_EFER. Preserves ebx. */
|
||||
movl $(IA32_EFER), %ecx
|
||||
rdmsr
|
||||
@@ -180,7 +180,7 @@ untampered_lapic:
|
||||
/* Call C handler */
|
||||
call smi_handler
|
||||
|
||||
#if defined(__x86_64__)
|
||||
#if ENV_X86_64
|
||||
/*
|
||||
* The only reason to go back to protected mode is that RSM doesn't restore
|
||||
* MSR registers and MSR IA32_EFER was modified by entering long mode.
|
||||
|
Reference in New Issue
Block a user