Cosmetics, whitespace and coding-style fixes for Intel CAR (trivial).

This is abuild-tested.

Signed-off-by: Uwe Hermann <uwe@hermann-uwe.de>
Acked-by: Uwe Hermann <uwe@hermann-uwe.de>



git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5901 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
This commit is contained in:
Uwe Hermann
2010-10-01 17:37:45 +00:00
parent d3f620299c
commit 2ba2b553b5
3 changed files with 203 additions and 204 deletions

View File

@ -18,140 +18,140 @@
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/ */
#define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
#include <cpu/x86/stack.h> #include <cpu/x86/stack.h>
#include <cpu/x86/mtrr.h> #include <cpu/x86/mtrr.h>
/* Save the BIST result */ #define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
movl %eax, %ebp #define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
/* Save the BIST result. */
movl %eax, %ebp
cache_as_ram: cache_as_ram:
post_code(0x20) post_code(0x20)
/* Send INIT IPI to all excluding ourself */ /* Send INIT IPI to all excluding ourself. */
movl $0x000C4500, %eax movl $0x000C4500, %eax
movl $0xFEE00300, %esi movl $0xFEE00300, %esi
movl %eax, (%esi) movl %eax, (%esi)
/* Zero out all Fixed Range and Variable Range MTRRs */ /* Zero out all fixed range and variable range MTRRs. */
movl $mtrr_table, %esi movl $mtrr_table, %esi
movl $( (mtrr_table_end - mtrr_table) / 2), %edi movl $((mtrr_table_end - mtrr_table) / 2), %edi
xorl %eax, %eax xorl %eax, %eax
xorl %edx, %edx xorl %edx, %edx
clear_mtrrs: clear_mtrrs:
movw (%esi), %bx movw (%esi), %bx
movzx %bx, %ecx movzx %bx, %ecx
wrmsr wrmsr
add $2, %esi add $2, %esi
dec %edi dec %edi
jnz clear_mtrrs jnz clear_mtrrs
/* Configure the default memory type to uncacheable */ /* Configure the default memory type to uncacheable. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~0x00000cff), %eax andl $(~0x00000cff), %eax
wrmsr wrmsr
/* Set cache as ram base address */ /* Set Cache-as-RAM base address. */
movl $(MTRRphysBase_MSR(0)), %ecx movl $(MTRRphysBase_MSR(0)), %ecx
movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
/* Set cache as ram mask */ /* Set Cache-as-RAM mask. */
movl $(MTRRphysMask_MSR(0)), %ecx movl $(MTRRphysMask_MSR(0)), %ecx
movl $(~((CACHE_AS_RAM_SIZE-1)) | (1 << 11)), %eax movl $(~((CACHE_AS_RAM_SIZE - 1)) | (1 << 11)), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
wrmsr wrmsr
/* Enable L2 Cache */ /* Enable L2 cache. */
movl $0x11e, %ecx movl $0x11e, %ecx
rdmsr rdmsr
orl $(1 << 8), %eax orl $(1 << 8), %eax
wrmsr wrmsr
/* CR0.CD = 0, CR0.NW = 0 */ /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
invd invd
movl %eax, %cr0 movl %eax, %cr0
/* Clear the cache memory reagion */ /* Clear the cache memory reagion. */
movl $CACHE_AS_RAM_BASE, %esi movl $CACHE_AS_RAM_BASE, %esi
movl %esi, %edi movl %esi, %edi
movl $(CACHE_AS_RAM_SIZE / 4), %ecx movl $(CACHE_AS_RAM_SIZE / 4), %ecx
//movl $0x23322332, %eax // movl $0x23322332, %eax
xorl %eax, %eax xorl %eax, %eax
rep stosl rep stosl
/* Enable Cache As RAM mode by disabling cache */ /* Enable Cache-as-RAM mode by disabling cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
#if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE) #if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE)
/* Enable cache for our code in Flash because we do XIP here */ /* Enable cache for our code in Flash because we do XIP here */
movl $MTRRphysBase_MSR(1), %ecx movl $MTRRphysBase_MSR(1), %ecx
xorl %edx, %edx xorl %edx, %edx
#if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK #if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK
#define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE #define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE
#else #else
#define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE #define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE
#endif #endif
movl $REAL_XIP_ROM_BASE, %eax movl $REAL_XIP_ROM_BASE, %eax
orl $MTRR_TYPE_WRBACK, %eax orl $MTRR_TYPE_WRBACK, %eax
wrmsr wrmsr
movl $MTRRphysMask_MSR(1), %ecx movl $MTRRphysMask_MSR(1), %ecx
xorl %edx, %edx xorl %edx, %edx
movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax
wrmsr wrmsr
#endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */ #endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */
/* enable cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
movl %eax, %cr0 movl %eax, %cr0
/* Set up stack pointer */ /* Set up the stack pointer. */
#if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1) #if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1)
/* leave some space for the struct ehci_debug_info */ /* Leave some space for the struct ehci_debug_info. */
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax
#else #else
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax
#endif #endif
movl %eax, %esp movl %eax, %esp
/* Restore the BIST result */ /* Restore the BIST result. */
movl %ebp, %eax movl %ebp, %eax
movl %esp, %ebp movl %esp, %ebp
pushl %eax pushl %eax
post_code(0x23) post_code(0x23)
/* Call romstage.c main function */ /* Call romstage.c main function. */
call main call main
post_code(0x2f) post_code(0x2f)
post_code(0x30) post_code(0x30)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x31) post_code(0x31)
/* Disable MTRR */ /* Disable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~(1 << 11)), %eax andl $(~(1 << 11)), %eax
@ -175,40 +175,40 @@ clear_mtrrs:
post_code(0x33) post_code(0x33)
/* Enable Cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x36) post_code(0x36)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x38) post_code(0x38)
/* Enable Write Back and Speculative Reads for the first 1MB */ /* Enable Write Back and Speculative Reads for the first 1MB. */
movl $MTRRphysBase_MSR(0), %ecx movl $MTRRphysBase_MSR(0), %ecx
movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
movl $MTRRphysMask_MSR(0), %ecx movl $MTRRphysMask_MSR(0), %ecx
movl $(~(1024*1024 -1) | (1 << 11)), %eax movl $(~(1024 * 1024 - 1) | (1 << 11)), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
post_code(0x39) post_code(0x39)
/* And Enable Cache again after setting MTRRs */ /* And enable cache again after setting MTRRs. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x3a) post_code(0x3a)
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
@ -216,23 +216,23 @@ clear_mtrrs:
post_code(0x3b) post_code(0x3b)
/* Invalidate the cache again */ /* Invalidate the cache again. */
invd invd
post_code(0x3c) post_code(0x3c)
/* clear boot_complete flag */ /* Clear boot_complete flag. */
xorl %ebp, %ebp xorl %ebp, %ebp
__main: __main:
post_code(0x11) post_code(0x11)
cld /* clear direction flag */ cld /* Clear direction flag. */
movl %ebp, %esi movl %ebp, %esi
movl $ROMSTAGE_STACK, %esp movl $ROMSTAGE_STACK, %esp
movl %esp, %ebp movl %esp, %ebp
pushl %esi pushl %esi
call copy_and_run call copy_and_run
.Lhlt: .Lhlt:
post_code(0xee) post_code(0xee)
@ -241,14 +241,14 @@ __main:
mtrr_table: mtrr_table:
/* Fixed MTRRs */ /* Fixed MTRRs */
.word 0x250, 0x258, 0x259 .word 0x250, 0x258, 0x259
.word 0x268, 0x269, 0x26A .word 0x268, 0x269, 0x26A
.word 0x26B, 0x26C, 0x26D .word 0x26B, 0x26C, 0x26D
.word 0x26E, 0x26F .word 0x26E, 0x26F
/* Variable MTRRs */ /* Variable MTRRs */
.word 0x200, 0x201, 0x202, 0x203 .word 0x200, 0x201, 0x202, 0x203
.word 0x204, 0x205, 0x206, 0x207 .word 0x204, 0x205, 0x206, 0x207
.word 0x208, 0x209, 0x20A, 0x20B .word 0x208, 0x209, 0x20A, 0x20B
.word 0x20C, 0x20D, 0x20E, 0x20F .word 0x20C, 0x20D, 0x20E, 0x20F
mtrr_table_end: mtrr_table_end:

View File

@ -18,140 +18,140 @@
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/ */
#define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
#include <cpu/x86/stack.h> #include <cpu/x86/stack.h>
#include <cpu/x86/mtrr.h> #include <cpu/x86/mtrr.h>
/* Save the BIST result */ #define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
movl %eax, %ebp #define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
/* Save the BIST result. */
movl %eax, %ebp
cache_as_ram: cache_as_ram:
post_code(0x20) post_code(0x20)
/* Send INIT IPI to all excluding ourself */ /* Send INIT IPI to all excluding ourself. */
movl $0x000C4500, %eax movl $0x000C4500, %eax
movl $0xFEE00300, %esi movl $0xFEE00300, %esi
movl %eax, (%esi) movl %eax, (%esi)
/* Zero out all Fixed Range and Variable Range MTRRs */ /* Zero out all fixed range and variable range MTRRs. */
movl $mtrr_table, %esi movl $mtrr_table, %esi
movl $( (mtrr_table_end - mtrr_table) / 2), %edi movl $((mtrr_table_end - mtrr_table) / 2), %edi
xorl %eax, %eax xorl %eax, %eax
xorl %edx, %edx xorl %edx, %edx
clear_mtrrs: clear_mtrrs:
movw (%esi), %bx movw (%esi), %bx
movzx %bx, %ecx movzx %bx, %ecx
wrmsr wrmsr
add $2, %esi add $2, %esi
dec %edi dec %edi
jnz clear_mtrrs jnz clear_mtrrs
/* Configure the default memory type to uncacheable */ /* Configure the default memory type to uncacheable. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~0x00000cff), %eax andl $(~0x00000cff), %eax
wrmsr wrmsr
/* Set cache as ram base address */ /* Set Cache-as-RAM base address. */
movl $(MTRRphysBase_MSR(0)), %ecx movl $(MTRRphysBase_MSR(0)), %ecx
movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
/* Set cache as ram mask */ /* Set Cache-as-RAM mask. */
movl $(MTRRphysMask_MSR(0)), %ecx movl $(MTRRphysMask_MSR(0)), %ecx
movl $(~((CACHE_AS_RAM_SIZE-1)) | (1 << 11)), %eax movl $(~((CACHE_AS_RAM_SIZE - 1)) | (1 << 11)), %eax
movl $0x0000000f, %edx movl $0x0000000f, %edx
wrmsr wrmsr
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
wrmsr wrmsr
/* Enable L2 Cache */ /* Enable L2 cache. */
movl $0x11e, %ecx movl $0x11e, %ecx
rdmsr rdmsr
orl $(1 << 8), %eax orl $(1 << 8), %eax
wrmsr wrmsr
/* CR0.CD = 0, CR0.NW = 0 */ /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
invd invd
movl %eax, %cr0 movl %eax, %cr0
/* Clear the cache memory reagion */ /* Clear the cache memory reagion. */
movl $CACHE_AS_RAM_BASE, %esi movl $CACHE_AS_RAM_BASE, %esi
movl %esi, %edi movl %esi, %edi
movl $(CACHE_AS_RAM_SIZE / 4), %ecx movl $(CACHE_AS_RAM_SIZE / 4), %ecx
//movl $0x23322332, %eax // movl $0x23322332, %eax
xorl %eax, %eax xorl %eax, %eax
rep stosl rep stosl
/* Enable Cache As RAM mode by disabling cache */ /* Enable Cache-as-RAM mode by disabling cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
#if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE) #if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE)
/* Enable cache for our code in Flash because we do XIP here */ /* Enable cache for our code in Flash because we do XIP here */
movl $MTRRphysBase_MSR(1), %ecx movl $MTRRphysBase_MSR(1), %ecx
xorl %edx, %edx xorl %edx, %edx
#if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK #if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK
#define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE #define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE
#else #else
#define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE #define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE
#endif #endif
movl $REAL_XIP_ROM_BASE, %eax movl $REAL_XIP_ROM_BASE, %eax
orl $MTRR_TYPE_WRBACK, %eax orl $MTRR_TYPE_WRBACK, %eax
wrmsr wrmsr
movl $MTRRphysMask_MSR(1), %ecx movl $MTRRphysMask_MSR(1), %ecx
movl $0x0000000f, %edx movl $0x0000000f, %edx
movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax
wrmsr wrmsr
#endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */ #endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */
/* enable cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
movl %eax, %cr0 movl %eax, %cr0
/* Set up stack pointer */ /* Set up the stack pointer. */
#if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1) #if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1)
/* leave some space for the struct ehci_debug_info */ /* Leave some space for the struct ehci_debug_info. */
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax
#else #else
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax
#endif #endif
movl %eax, %esp movl %eax, %esp
/* Restore the BIST result */ /* Restore the BIST result. */
movl %ebp, %eax movl %ebp, %eax
movl %esp, %ebp movl %esp, %ebp
pushl %eax pushl %eax
post_code(0x23) post_code(0x23)
/* Call romstage.c main function */ /* Call romstage.c main function. */
call main call main
post_code(0x2f) post_code(0x2f)
post_code(0x30) post_code(0x30)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x31) post_code(0x31)
/* Disable MTRR */ /* Disable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~(1 << 11)), %eax andl $(~(1 << 11)), %eax
@ -175,41 +175,40 @@ clear_mtrrs:
post_code(0x33) post_code(0x33)
/* Enable Cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x36) post_code(0x36)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x38) post_code(0x38)
/* Enable Write Back and Speculative Reads for the first 1MB */ /* Enable Write Back and Speculative Reads for the first 1MB. */
movl $MTRRphysBase_MSR(0), %ecx movl $MTRRphysBase_MSR(0), %ecx
movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
movl $MTRRphysMask_MSR(0), %ecx movl $MTRRphysMask_MSR(0), %ecx
movl $(~(1024*1024 -1) | (1 << 11)), %eax movl $(~(1024 * 1024 - 1) | (1 << 11)), %eax
movl $0x0000000f, %edx // 36bit address space movl $0x0000000f, %edx // 36bit address space
wrmsr wrmsr
post_code(0x39) post_code(0x39)
/* And Enable Cache again after setting MTRRs */ /* And enable cache again after setting MTRRs. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x3a) post_code(0x3a)
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
@ -217,23 +216,23 @@ clear_mtrrs:
post_code(0x3b) post_code(0x3b)
/* Invalidate the cache again */ /* Invalidate the cache again. */
invd invd
post_code(0x3c) post_code(0x3c)
/* clear boot_complete flag */ /* Clear boot_complete flag. */
xorl %ebp, %ebp xorl %ebp, %ebp
__main: __main:
post_code(0x11) post_code(0x11)
cld /* clear direction flag */ cld /* Clear direction flag. */
movl %ebp, %esi movl %ebp, %esi
movl $ROMSTAGE_STACK, %esp movl $ROMSTAGE_STACK, %esp
movl %esp, %ebp movl %esp, %ebp
pushl %esi pushl %esi
call copy_and_run call copy_and_run
.Lhlt: .Lhlt:
post_code(0xee) post_code(0xee)
@ -242,14 +241,14 @@ __main:
mtrr_table: mtrr_table:
/* Fixed MTRRs */ /* Fixed MTRRs */
.word 0x250, 0x258, 0x259 .word 0x250, 0x258, 0x259
.word 0x268, 0x269, 0x26A .word 0x268, 0x269, 0x26A
.word 0x26B, 0x26C, 0x26D .word 0x26B, 0x26C, 0x26D
.word 0x26E, 0x26F .word 0x26E, 0x26F
/* Variable MTRRs */ /* Variable MTRRs */
.word 0x200, 0x201, 0x202, 0x203 .word 0x200, 0x201, 0x202, 0x203
.word 0x204, 0x205, 0x206, 0x207 .word 0x204, 0x205, 0x206, 0x207
.word 0x208, 0x209, 0x20A, 0x20B .word 0x208, 0x209, 0x20A, 0x20B
.word 0x20C, 0x20D, 0x20E, 0x20F .word 0x20C, 0x20D, 0x20E, 0x20F
mtrr_table_end: mtrr_table_end:

View File

@ -18,22 +18,22 @@
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/ */
#define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
#define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
#include <cpu/x86/stack.h> #include <cpu/x86/stack.h>
#include <cpu/x86/mtrr.h> #include <cpu/x86/mtrr.h>
/* Save the BIST result */ #define CACHE_AS_RAM_SIZE CONFIG_DCACHE_RAM_SIZE
movl %eax, %ebp #define CACHE_AS_RAM_BASE CONFIG_DCACHE_RAM_BASE
/* Save the BIST result. */
movl %eax, %ebp
cache_as_ram: cache_as_ram:
post_code(0x20) post_code(0x20)
/* Send INIT IPI to all excluding ourself */ /* Send INIT IPI to all excluding ourself. */
movl $0x000C4500, %eax movl $0x000C4500, %eax
movl $0xFEE00300, %esi movl $0xFEE00300, %esi
movl %eax, (%esi) movl %eax, (%esi)
/* Disable prefetchers */ /* Disable prefetchers */
movl $0x01a0, %ecx movl $0x01a0, %ecx
@ -42,123 +42,123 @@ cache_as_ram:
orl $((1 << 5) | (1 << 7)), %edx orl $((1 << 5) | (1 << 7)), %edx
wrmsr wrmsr
/* Zero out all Fixed Range and Variable Range MTRRs */ /* Zero out all fixed range and variable range MTRRs. */
movl $mtrr_table, %esi movl $mtrr_table, %esi
movl $( (mtrr_table_end - mtrr_table) / 2), %edi movl $((mtrr_table_end - mtrr_table) / 2), %edi
xorl %eax, %eax xorl %eax, %eax
xorl %edx, %edx xorl %edx, %edx
clear_mtrrs: clear_mtrrs:
movw (%esi), %bx movw (%esi), %bx
movzx %bx, %ecx movzx %bx, %ecx
wrmsr wrmsr
add $2, %esi add $2, %esi
dec %edi dec %edi
jnz clear_mtrrs jnz clear_mtrrs
/* Configure the default memory type to uncacheable */ /* Configure the default memory type to uncacheable. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~0x00000cff), %eax andl $(~0x00000cff), %eax
wrmsr wrmsr
/* Set cache as ram base address */ /* Set Cache-as-RAM base address. */
movl $(MTRRphysBase_MSR(0)), %ecx movl $(MTRRphysBase_MSR(0)), %ecx
movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
/* Set cache as ram mask */ /* Set Cache-as-RAM mask. */
movl $(MTRRphysMask_MSR(0)), %ecx movl $(MTRRphysMask_MSR(0)), %ecx
movl $(~((CACHE_AS_RAM_SIZE-1)) | (1 << 11)), %eax movl $(~((CACHE_AS_RAM_SIZE - 1)) | (1 << 11)), %eax
movl $0x0000000f, %edx movl $0x0000000f, %edx
wrmsr wrmsr
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
wrmsr wrmsr
/* Enable L2 Cache */ /* Enable L2 cache. */
movl $0x11e, %ecx movl $0x11e, %ecx
rdmsr rdmsr
orl $(1 << 8), %eax orl $(1 << 8), %eax
wrmsr wrmsr
/* CR0.CD = 0, CR0.NW = 0 */ /* Enable cache (CR0.CD = 0, CR0.NW = 0). */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
invd invd
movl %eax, %cr0 movl %eax, %cr0
/* Clear the cache memory reagion */ /* Clear the cache memory reagion. */
movl $CACHE_AS_RAM_BASE, %esi movl $CACHE_AS_RAM_BASE, %esi
movl %esi, %edi movl %esi, %edi
movl $(CACHE_AS_RAM_SIZE / 4), %ecx movl $(CACHE_AS_RAM_SIZE / 4), %ecx
//movl $0x23322332, %eax // movl $0x23322332, %eax
xorl %eax, %eax xorl %eax, %eax
rep stosl rep stosl
/* Enable Cache As RAM mode by disabling cache */ /* Enable Cache-as-RAM mode by disabling cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
#if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE) #if defined(CONFIG_XIP_ROM_SIZE) && defined(CONFIG_XIP_ROM_BASE)
/* Enable cache for our code in Flash because we do XIP here */ /* Enable cache for our code in Flash because we do XIP here */
movl $MTRRphysBase_MSR(1), %ecx movl $MTRRphysBase_MSR(1), %ecx
xorl %edx, %edx xorl %edx, %edx
#if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK #if defined(CONFIG_TINY_BOOTBLOCK) && CONFIG_TINY_BOOTBLOCK
#define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE #define REAL_XIP_ROM_BASE AUTO_XIP_ROM_BASE
#else #else
#define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE #define REAL_XIP_ROM_BASE CONFIG_XIP_ROM_BASE
#endif #endif
movl $REAL_XIP_ROM_BASE, %eax movl $REAL_XIP_ROM_BASE, %eax
orl $MTRR_TYPE_WRBACK, %eax orl $MTRR_TYPE_WRBACK, %eax
wrmsr wrmsr
movl $MTRRphysMask_MSR(1), %ecx movl $MTRRphysMask_MSR(1), %ecx
movl $0x0000000f, %edx movl $0x0000000f, %edx
movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax movl $(~(CONFIG_XIP_ROM_SIZE - 1) | 0x800), %eax
wrmsr wrmsr
#endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */ #endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */
/* enable cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $(~((1 << 30) | (1 << 29))), %eax
movl %eax, %cr0 movl %eax, %cr0
/* Set up stack pointer */ /* Set up the stack pointer. */
#if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1) #if defined(CONFIG_USBDEBUG) && (CONFIG_USBDEBUG == 1)
/* leave some space for the struct ehci_debug_info */ /* Leave some space for the struct ehci_debug_info. */
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4 - 128), %eax
#else #else
movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax movl $(CACHE_AS_RAM_BASE + CACHE_AS_RAM_SIZE - 4), %eax
#endif #endif
movl %eax, %esp movl %eax, %esp
/* Restore the BIST result */ /* Restore the BIST result. */
movl %ebp, %eax movl %ebp, %eax
movl %esp, %ebp movl %esp, %ebp
pushl %eax pushl %eax
post_code(0x23) post_code(0x23)
/* Call romstage.c main function */ /* Call romstage.c main function. */
call main call main
post_code(0x2f) post_code(0x2f)
post_code(0x30) post_code(0x30)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x31) post_code(0x31)
/* Disable MTRR */ /* Disable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
andl $(~(1 << 11)), %eax andl $(~(1 << 11)), %eax
@ -182,40 +182,40 @@ clear_mtrrs:
post_code(0x33) post_code(0x33)
/* Enable Cache */ /* Enable cache. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x36) post_code(0x36)
/* Disable Cache */ /* Disable cache. */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x38) post_code(0x38)
/* Enable Write Back and Speculative Reads for the first 1MB */ /* Enable Write Back and Speculative Reads for the first 1MB. */
movl $MTRRphysBase_MSR(0), %ecx movl $MTRRphysBase_MSR(0), %ecx
movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
movl $MTRRphysMask_MSR(0), %ecx movl $MTRRphysMask_MSR(0), %ecx
movl $(~(1024*1024 -1) | (1 << 11)), %eax movl $(~(1024 * 1024 - 1) | (1 << 11)), %eax
movl $0x0000000f, %edx // 36bit address space movl $0x0000000f, %edx // 36bit address space
wrmsr wrmsr
post_code(0x39) post_code(0x39)
/* And Enable Cache again after setting MTRRs */ /* And enable cache again after setting MTRRs. */
movl %cr0, %eax movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax andl $~((1 << 30) | (1 << 29)), %eax
movl %eax, %cr0 movl %eax, %cr0
post_code(0x3a) post_code(0x3a)
/* Enable MTRR */ /* Enable MTRR. */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
@ -230,23 +230,23 @@ clear_mtrrs:
andl $~((1 << 5) | (1 << 7)), %edx andl $~((1 << 5) | (1 << 7)), %edx
wrmsr wrmsr
/* Invalidate the cache again */ /* Invalidate the cache again. */
invd invd
post_code(0x3c) post_code(0x3c)
/* clear boot_complete flag */ /* Clear boot_complete flag. */
xorl %ebp, %ebp xorl %ebp, %ebp
__main: __main:
post_code(0x11) post_code(0x11)
cld /* clear direction flag */ cld /* Clear direction flag. */
movl %ebp, %esi movl %ebp, %esi
movl $ROMSTAGE_STACK, %esp movl $ROMSTAGE_STACK, %esp
movl %esp, %ebp movl %esp, %ebp
pushl %esi pushl %esi
call copy_and_run call copy_and_run
.Lhlt: .Lhlt:
post_code(0xee) post_code(0xee)
@ -255,14 +255,14 @@ __main:
mtrr_table: mtrr_table:
/* Fixed MTRRs */ /* Fixed MTRRs */
.word 0x250, 0x258, 0x259 .word 0x250, 0x258, 0x259
.word 0x268, 0x269, 0x26A .word 0x268, 0x269, 0x26A
.word 0x26B, 0x26C, 0x26D .word 0x26B, 0x26C, 0x26D
.word 0x26E, 0x26F .word 0x26E, 0x26F
/* Variable MTRRs */ /* Variable MTRRs */
.word 0x200, 0x201, 0x202, 0x203 .word 0x200, 0x201, 0x202, 0x203
.word 0x204, 0x205, 0x206, 0x207 .word 0x204, 0x205, 0x206, 0x207
.word 0x208, 0x209, 0x20A, 0x20B .word 0x208, 0x209, 0x20A, 0x20B
.word 0x20C, 0x20D, 0x20E, 0x20F .word 0x20C, 0x20D, 0x20E, 0x20F
mtrr_table_end: mtrr_table_end: