Following patch reworks car_disable into C. Tested, works here. I compared

also the GCC generated code and it looks all right. Please test on some
multicore CPU.

I added the "memory" clobber to read_cr0 / write_cr0 function as it is in Linux
Kernel. Seems that if this is missing, GCC is too smart and messes the order
of reads/writes to CR0 (not tested if really a problem here, but be safe for
future users of this function  ;) 

Signed-off-by: Rudolf Marek <r.marek@assembler.cz>
Acked-by: Stefan Reinauer <stepan@coresystems.de>



git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5562 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
This commit is contained in:
Rudolf Marek 2010-05-16 21:51:34 +00:00
parent 4bb368cc73
commit beba99045c
2 changed files with 45 additions and 37 deletions

View File

@ -1,50 +1,55 @@
/* by yhlu 6.2005 */
/* be warned, this file will be used other cores and core 0 / node 0 */
/*
* This file is part of the coreboot project.
*
* original idea yhlu 6.2005 (assembler code)
*
* Copyright (C) 2010 Rudolf Marek <r.marek@assembler.cz>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* be warned, this file will be used other cores and core 0 / node 0
*/
static inline __attribute__((always_inline)) void disable_cache_as_ram(void)
{
__asm__ __volatile__ (
/* We don't need cache as ram for now on */
msr_t msr;
/* disable cache */
"movl %%cr0, %%eax\n\t"
"orl $(0x1<<30),%%eax\n\t"
"movl %%eax, %%cr0\n\t"
write_cr0(read_cr0() | (1 << 30));
/* clear sth */
"movl $0x269, %%ecx\n\t" /* fix4k_c8000*/
"xorl %%edx, %%edx\n\t"
"xorl %%eax, %%eax\n\t"
"wrmsr\n\t"
msr.lo = 0;
msr.hi = 0;
wrmsr(MTRRfix4K_C8000_MSR, msr);
#if CONFIG_DCACHE_RAM_SIZE > 0x8000
"movl $0x268, %%ecx\n\t" /* fix4k_c0000*/
"wrmsr\n\t"
wrmsr(MTRRfix4K_C0000_MSR, msr);
#endif
/* disable fixed mtrr from now on, it will be enabled by coreboot_ram again*/
"movl $0xC0010010, %%ecx\n\t"
// "movl $SYSCFG_MSR, %ecx\n\t"
"rdmsr\n\t"
"andl $(~(3<<18)), %%eax\n\t"
// "andl $(~(SYSCFG_MSR_MtrrFixDramModEn | SYSCFG_MSR_MtrrFixDramEn)), %eax\n\t"
"wrmsr\n\t"
msr = rdmsr(SYSCFG_MSR);
msr.lo &= ~(SYSCFG_MSR_MtrrFixDramEn | SYSCFG_MSR_MtrrFixDramModEn);
wrmsr(SYSCFG_MSR, msr);
/* Set the default memory type and disable fixed and enable variable MTRRs */
"movl $0x2ff, %%ecx\n\t"
// "movl $MTRRdefType_MSR, %ecx\n\t"
"xorl %%edx, %%edx\n\t"
/* Enable Variable and Disable Fixed MTRRs */
"movl $0x00000800, %%eax\n\t"
"wrmsr\n\t"
msr.hi = 0;
msr.lo = (1 << 11);
/* enable cache */
"movl %%cr0, %%eax\n\t"
"andl $0x9fffffff,%%eax\n\t"
"movl %%eax, %%cr0\n\t"
::: "memory", "eax", "ecx", "edx"
);
wrmsr(MTRRdefType_MSR, msr);
enable_cache();
}
static void disable_cache_as_ram_bsp(void)
{
disable_cache_as_ram();
}

View File

@ -20,16 +20,19 @@
#ifndef CPU_X86_CACHE
#define CPU_X86_CACHE
/* the memory clobber prevents the GCC from reordering the read/write order
of CR0 */
static inline unsigned long read_cr0(void)
{
unsigned long cr0;
asm volatile ("movl %%cr0, %0" : "=r" (cr0));
asm volatile ("movl %%cr0, %0" : "=r" (cr0) :: "memory");
return cr0;
}
static inline void write_cr0(unsigned long cr0)
{
asm volatile ("movl %0, %%cr0" : : "r" (cr0));
asm volatile ("movl %0, %%cr0" : : "r" (cr0) : "memory");
}
static inline void invd(void)
@ -39,7 +42,7 @@ static inline void invd(void)
static inline void wbinvd(void)
{
asm volatile ("wbinvd");
asm volatile ("wbinvd" ::: "memory");
}
static inline void enable_cache(void)