Following patch reworks car_disable into C. Tested, works here. I compared

also the GCC generated code and it looks all right. Please test on some
multicore CPU.

I added the "memory" clobber to read_cr0 / write_cr0 function as it is in Linux
Kernel. Seems that if this is missing, GCC is too smart and messes the order
of reads/writes to CR0 (not tested if really a problem here, but be safe for
future users of this function  ;) 

Signed-off-by: Rudolf Marek <r.marek@assembler.cz>
Acked-by: Stefan Reinauer <stepan@coresystems.de>



git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5562 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
This commit is contained in:
Rudolf Marek
2010-05-16 21:51:34 +00:00
parent 4bb368cc73
commit beba99045c
2 changed files with 45 additions and 37 deletions

View File

@ -1,50 +1,55 @@
/* by yhlu 6.2005 */ /*
/* be warned, this file will be used other cores and core 0 / node 0 */ * This file is part of the coreboot project.
*
* original idea yhlu 6.2005 (assembler code)
*
* Copyright (C) 2010 Rudolf Marek <r.marek@assembler.cz>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* be warned, this file will be used other cores and core 0 / node 0
*/
static inline __attribute__((always_inline)) void disable_cache_as_ram(void) static inline __attribute__((always_inline)) void disable_cache_as_ram(void)
{ {
__asm__ __volatile__ ( msr_t msr;
/* We don't need cache as ram for now on */
/* disable cache */ /* disable cache */
"movl %%cr0, %%eax\n\t" write_cr0(read_cr0() | (1 << 30));
"orl $(0x1<<30),%%eax\n\t"
"movl %%eax, %%cr0\n\t"
/* clear sth */ msr.lo = 0;
"movl $0x269, %%ecx\n\t" /* fix4k_c8000*/ msr.hi = 0;
"xorl %%edx, %%edx\n\t" wrmsr(MTRRfix4K_C8000_MSR, msr);
"xorl %%eax, %%eax\n\t"
"wrmsr\n\t"
#if CONFIG_DCACHE_RAM_SIZE > 0x8000 #if CONFIG_DCACHE_RAM_SIZE > 0x8000
"movl $0x268, %%ecx\n\t" /* fix4k_c0000*/ wrmsr(MTRRfix4K_C0000_MSR, msr);
"wrmsr\n\t"
#endif #endif
/* disable fixed mtrr from now on, it will be enabled by coreboot_ram again*/ /* disable fixed mtrr from now on, it will be enabled by coreboot_ram again*/
"movl $0xC0010010, %%ecx\n\t"
// "movl $SYSCFG_MSR, %ecx\n\t" msr = rdmsr(SYSCFG_MSR);
"rdmsr\n\t" msr.lo &= ~(SYSCFG_MSR_MtrrFixDramEn | SYSCFG_MSR_MtrrFixDramModEn);
"andl $(~(3<<18)), %%eax\n\t" wrmsr(SYSCFG_MSR, msr);
// "andl $(~(SYSCFG_MSR_MtrrFixDramModEn | SYSCFG_MSR_MtrrFixDramEn)), %eax\n\t"
"wrmsr\n\t"
/* Set the default memory type and disable fixed and enable variable MTRRs */ /* Set the default memory type and disable fixed and enable variable MTRRs */
"movl $0x2ff, %%ecx\n\t" msr.hi = 0;
// "movl $MTRRdefType_MSR, %ecx\n\t" msr.lo = (1 << 11);
"xorl %%edx, %%edx\n\t"
/* Enable Variable and Disable Fixed MTRRs */
"movl $0x00000800, %%eax\n\t"
"wrmsr\n\t"
/* enable cache */ wrmsr(MTRRdefType_MSR, msr);
"movl %%cr0, %%eax\n\t"
"andl $0x9fffffff,%%eax\n\t" enable_cache();
"movl %%eax, %%cr0\n\t"
::: "memory", "eax", "ecx", "edx"
);
} }
static void disable_cache_as_ram_bsp(void) static void disable_cache_as_ram_bsp(void)
{ {
disable_cache_as_ram(); disable_cache_as_ram();
} }

View File

@ -20,16 +20,19 @@
#ifndef CPU_X86_CACHE #ifndef CPU_X86_CACHE
#define CPU_X86_CACHE #define CPU_X86_CACHE
/* the memory clobber prevents the GCC from reordering the read/write order
of CR0 */
static inline unsigned long read_cr0(void) static inline unsigned long read_cr0(void)
{ {
unsigned long cr0; unsigned long cr0;
asm volatile ("movl %%cr0, %0" : "=r" (cr0)); asm volatile ("movl %%cr0, %0" : "=r" (cr0) :: "memory");
return cr0; return cr0;
} }
static inline void write_cr0(unsigned long cr0) static inline void write_cr0(unsigned long cr0)
{ {
asm volatile ("movl %0, %%cr0" : : "r" (cr0)); asm volatile ("movl %0, %%cr0" : : "r" (cr0) : "memory");
} }
static inline void invd(void) static inline void invd(void)
@ -39,7 +42,7 @@ static inline void invd(void)
static inline void wbinvd(void) static inline void wbinvd(void)
{ {
asm volatile ("wbinvd"); asm volatile ("wbinvd" ::: "memory");
} }
static inline void enable_cache(void) static inline void enable_cache(void)