update atom car code in the same way that 6ex/6fx was updated.

Signed-off-by: Stefan Reinauer <stepan@coresystems.de>
Acked-by: Stefan Reinauer <stepan@coresystems.de>



git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5415 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
This commit is contained in:
Stefan Reinauer 2010-04-13 00:11:59 +00:00 committed by Stefan Reinauer
parent 6d1b0d84f2
commit 170679b9dd
4 changed files with 136 additions and 235 deletions

View File

@ -35,8 +35,6 @@ cache_as_ram:
movl $0xFEE00300, %esi movl $0xFEE00300, %esi
movl %eax, (%esi) movl %eax, (%esi)
post_code(0x21)
/* Zero out all Fixed Range and Variable Range MTRRs */ /* Zero out all Fixed Range and Variable Range MTRRs */
movl $mtrr_table, %esi movl $mtrr_table, %esi
movl $( (mtrr_table_end - mtrr_table) / 2), %edi movl $( (mtrr_table_end - mtrr_table) / 2), %edi
@ -49,7 +47,6 @@ clear_mtrrs:
add $2, %esi add $2, %esi
dec %edi dec %edi
jnz clear_mtrrs jnz clear_mtrrs
post_code(0x22)
/* Configure the default memory type to uncacheable */ /* Configure the default memory type to uncacheable */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
@ -57,42 +54,36 @@ clear_mtrrs:
andl $(~0x00000cff), %eax andl $(~0x00000cff), %eax
wrmsr wrmsr
post_code(0x23)
/* Set cache as ram base address */ /* Set cache as ram base address */
movl $(MTRRphysBase_MSR(0)), %ecx movl $(MTRRphysBase_MSR(0)), %ecx
movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
post_code(0x24)
/* Set cache as ram mask */ /* Set cache as ram mask */
movl $(MTRRphysMask_MSR(0)), %ecx movl $(MTRRphysMask_MSR(0)), %ecx
movl $(~((CACHE_AS_RAM_SIZE-1)) | (1 << 11)), %eax movl $(~((CACHE_AS_RAM_SIZE-1)) | (1 << 11)), %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
post_code(0x25)
/* Enable MTRR */ /* Enable MTRR */
movl $MTRRdefType_MSR, %ecx movl $MTRRdefType_MSR, %ecx
rdmsr rdmsr
orl $(1 << 11), %eax orl $(1 << 11), %eax
wrmsr wrmsr
post_code(0x26)
/* Enable L2 Cache */ /* Enable L2 Cache */
movl $0x11e, %ecx movl $0x11e, %ecx
rdmsr rdmsr
orl $(1 << 8), %eax orl $(1 << 8), %eax
wrmsr wrmsr
post_code(0x27)
/* CR0.CD = 0, CR0.NW = 0 */ /* CR0.CD = 0, CR0.NW = 0 */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $( ~( (1 << 30) | (1 << 29) ) ), %eax
invd invd
movl %eax, %cr0 movl %eax, %cr0
post_code(0x28)
/* Clear the cache memory reagion */ /* Clear the cache memory reagion */
movl $CACHE_AS_RAM_BASE, %esi movl $CACHE_AS_RAM_BASE, %esi
movl %esi, %edi movl %esi, %edi
@ -101,7 +92,6 @@ clear_mtrrs:
xorl %eax, %eax xorl %eax, %eax
rep stosl rep stosl
post_code(0x29)
/* Enable Cache As RAM mode by disabling cache */ /* Enable Cache As RAM mode by disabling cache */
movl %cr0, %eax movl %cr0, %eax
orl $(1 << 30), %eax orl $(1 << 30), %eax
@ -126,7 +116,6 @@ clear_mtrrs:
wrmsr wrmsr
#endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */ #endif /* CONFIG_XIP_ROM_SIZE && CONFIG_XIP_ROM_BASE */
post_code(0x2a)
/* enable cache */ /* enable cache */
movl %cr0, %eax movl %cr0, %eax
andl $( ~( (1 << 30) | (1 << 29) ) ), %eax andl $( ~( (1 << 30) | (1 << 29) ) ), %eax
@ -148,12 +137,143 @@ clear_mtrrs:
post_code(0x23) post_code(0x23)
call stage1_main /* Call romstage.c main function */
call main
post_code(0x2f) post_code(0x2f)
error:
post_code(0x30)
/* Disable Cache */
movl %cr0, %eax
orl $(1 << 30), %eax
movl %eax, %cr0
post_code(0x31)
/* Disable MTRR */
movl $MTRRdefType_MSR, %ecx
rdmsr
andl $(~(1 << 11)), %eax
wrmsr
post_code(0x31)
invd
#if 0
xorl %eax, %eax
xorl %edx, %edx
movl $MTRRphysBase_MSR(0), %ecx
wrmsr
movl $MTRRphysMask_MSR(0), %ecx
wrmsr
movl $MTRRphysBase_MSR(1), %ecx
wrmsr
movl $MTRRphysMask_MSR(1), %ecx
wrmsr
#endif
post_code(0x33)
#undef CLEAR_FIRST_1M_RAM
#ifdef CLEAR_FIRST_1M_RAM
post_code(0x34)
/* Enable Write Combining and Speculative Reads for the first 1MB */
movl $MTRRphysBase_MSR(0), %ecx
movl $(0x00000000 | MTRR_TYPE_WRCOMB), %eax
xorl %edx, %edx
wrmsr
movl $MTRRphysMask_MSR(0), %ecx
movl $(~(1024*1024 -1) | (1 << 11)), %eax
xorl %edx, %edx
wrmsr
post_code(0x35)
#endif
/* Enable Cache */
movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax
movl %eax, %cr0
post_code(0x36)
#ifdef CLEAR_FIRST_1M_RAM
/* Clear first 1MB of RAM */
movl $0x00000000, %edi
cld
xorl %eax, %eax
movl $((1024*1024) / 4), %ecx
rep stosl
post_code(0x37)
#endif
/* Disable Cache */
movl %cr0, %eax
orl $(1 << 30), %eax
movl %eax, %cr0
post_code(0x38)
/* Enable Write Back and Speculative Reads for the first 1MB */
movl $MTRRphysBase_MSR(0), %ecx
movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
xorl %edx, %edx
wrmsr
movl $MTRRphysMask_MSR(0), %ecx
movl $(~(1024*1024 -1) | (1 << 11)), %eax
xorl %edx, %edx
wrmsr
post_code(0x39)
/* And Enable Cache again after setting MTRRs */
movl %cr0, %eax
andl $~( (1 << 30) | (1 << 29) ), %eax
movl %eax, %cr0
post_code(0x3a)
/* Enable MTRR */
movl $MTRRdefType_MSR, %ecx
rdmsr
orl $(1 << 11), %eax
wrmsr
post_code(0x3b)
/* Invalidate the cache again */
invd
post_code(0x3c)
/* clear boot_complete flag */
xorl %ebp, %ebp
__main:
post_code(0x11)
cld /* clear direction flag */
movl %ebp, %esi
/* For now: use CONFIG_RAMBASE + 1MB - 64K (counting downwards) as stack. This
* makes sure that we stay completely within the 1M-64K of memory that we
* preserve for suspend/resume.
*/
#ifndef HIGH_MEMORY_SAVE
#warning Need a central place for HIGH_MEMORY_SAVE
#define HIGH_MEMORY_SAVE ( (1024 - 64) * 1024 )
#endif
movl $(CONFIG_RAMBASE + HIGH_MEMORY_SAVE), %esp
movl %esp, %ebp
pushl %esi
call copy_and_run
.Lhlt:
post_code(0xee)
hlt hlt
jmp error jmp .Lhlt
mtrr_table: mtrr_table:
/* Fixed MTRRs */ /* Fixed MTRRs */

View File

@ -1,94 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright (C) 2007-2008 coresystems GmbH
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
/* called from assembler code */
void stage1_main(unsigned long bist);
/* from romstage.c */
void real_main(unsigned long bist);
void stage1_main(unsigned long bist)
{
unsigned int cpu_reset = 0;
real_main(bist);
/* No servicable parts below this line .. */
{
/* Check value of esp to verify if we have enough rom for stack in Cache as RAM */
unsigned v_esp;
__asm__ volatile (
"movl %%esp, %0\n\t"
: "=a" (v_esp)
);
printk(BIOS_SPEW, "v_esp=%08x\n", v_esp);
}
printk(BIOS_SPEW, "cpu_reset = %08x\n",cpu_reset);
if(cpu_reset == 0) {
print_spew("Clearing initial memory region: ");
}
print_spew("No cache as ram now - ");
/* store cpu_reset to ebx */
__asm__ volatile (
"movl %0, %%ebx\n\t"
::"a" (cpu_reset)
);
if(cpu_reset==0) {
#define CLEAR_FIRST_1M_RAM 1
#include "cache_as_ram_post.c"
} else {
#undef CLEAR_FIRST_1M_RAM
#include "cache_as_ram_post.c"
}
__asm__ volatile (
/* set new esp */
"movl %0, %%ebp\n\t"
"movl %0, %%esp\n\t"
::"a"( CONFIG_RAMBASE + (1024-64)*1024 )
);
{
unsigned new_cpu_reset;
/* get back cpu_reset from ebx */
__asm__ volatile (
"movl %%ebx, %0\n\t"
:"=a" (new_cpu_reset)
);
#ifdef CONFIG_DEACTIVATE_CAR
print_debug("Deactivating CAR");
#include CONFIG_DEACTIVATE_CAR_FILE
print_debug(" - Done.\n");
#endif
/* Copy and execute coreboot_ram */
copy_and_run(new_cpu_reset);
/* We will not return */
}
print_debug("sorry. parachute did not open.\n");
}

View File

@ -1,123 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright (C) 2007-2008 coresystems GmbH
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
__asm__ volatile (
"movb $0x30, %al\noutb %al, $0x80\n"
/* Disable Cache */
"movl %cr0, %eax\n"
"orl $(1 << 30), %eax\n"
"movl %eax, %cr0\n"
"movb $0x31, %al\noutb %al, $0x80\n"
/* Disable MTRR */
"movl $MTRRdefType_MSR, %ecx\n"
"rdmsr\n"
"andl $(~(1 << 11)), %eax\n"
"wrmsr\n"
"movb $0x32, %al\noutb %al, $0x80\n"
"invd\n"
#if 0
"xorl %eax, %eax\n"
"xorl %edx, %edx\n"
"movl $MTRRphysBase_MSR(0), %ecx\n"
"wrmsr\n"
"movl $MTRRphysMask_MSR(0), %ecx\n"
"wrmsr\n"
"movl $MTRRphysBase_MSR(1), %ecx\n"
"wrmsr\n"
"movl $MTRRphysMask_MSR(1), %ecx\n"
"wrmsr\n"
#endif
"movb $0x33, %al\noutb %al, $0x80\n"
#ifdef CLEAR_FIRST_1M_RAM
"movb $0x34, %al\noutb %al, $0x80\n"
/* Enable Write Combining and Speculative Reads for the first 1MB */
"movl $MTRRphysBase_MSR(0), %ecx\n"
"movl $(0x00000000 | MTRR_TYPE_WRCOMB), %eax\n"
"xorl %edx, %edx\n"
"wrmsr\n"
"movl $MTRRphysMask_MSR(0), %ecx\n"
"movl $(~(1024*1024 -1) | (1 << 11)), %eax\n"
"xorl %edx, %edx\n"
"wrmsr\n"
"movb $0x35, %al\noutb %al, $0x80\n"
#endif
/* Enable Cache */
"movl %cr0, %eax\n"
"andl $~( (1 << 30) | (1 << 29) ), %eax\n"
"movl %eax, %cr0\n"
"movb $0x36, %al\noutb %al, $0x80\n"
#ifdef CLEAR_FIRST_1M_RAM
/* Clear first 1MB of RAM */
"movl $0x00000000, %edi\n"
"cld\n"
"xorl %eax, %eax\n"
"movl $((1024*1024) / 4), %ecx\n"
"rep stosl\n"
"movb $0x37, %al\noutb %al, $0x80\n"
#endif
/* Disable Cache */
"movl %cr0, %eax\n"
"orl $(1 << 30), %eax\n"
"movl %eax, %cr0\n"
"movb $0x38, %al\noutb %al, $0x80\n"
/* Enable Write Back and Speculative Reads for the first 1MB */
"movl $MTRRphysBase_MSR(0), %ecx\n"
"movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax\n"
"xorl %edx, %edx\n"
"wrmsr\n"
"movl $MTRRphysMask_MSR(0), %ecx\n"
"movl $(~(1024*1024 -1) | (1 << 11)), %eax\n"
"xorl %edx, %edx\n"
"wrmsr\n"
"movb $0x39, %al\noutb %al, $0x80\n"
/* And Enable Cache again after setting MTRRs */
"movl %cr0, %eax\n"
"andl $~( (1 << 30) | (1 << 29) ), %eax\n"
"movl %eax, %cr0\n"
"movb $0x3a, %al\noutb %al, $0x80\n"
/* Enable MTRR */
"movl $MTRRdefType_MSR, %ecx\n"
"rdmsr\n"
"orl $(1 << 11), %eax\n"
"wrmsr\n"
"movb $0x3b, %al\noutb %al, $0x80\n"
/* Invalidate the cache again */
"invd\n"
"movb $0x3c, %al\noutb %al, $0x80\n"
);

View File

@ -223,9 +223,7 @@ static void early_ich7_init(void)
// //
#include "lib/cbmem.c" #include "lib/cbmem.c"
#include "cpu/intel/model_106cx/cache_as_ram_disable.c" void main(unsigned long bist)
void real_main(unsigned long bist)
{ {
u32 reg32; u32 reg32;
int boot_mode = 0; int boot_mode = 0;