port latest model 6ex car changes to 6fx car, which is almost identical and
currently unused. Just keep it in sync, we might need it some day. Signed-off-by: Stefan Reinauer <stepan@coresystems.de> Acked-by: Stefan Reinauer <stepan@coresystems.de> git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5413 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
This commit is contained in:
parent
3e1f524566
commit
1977b891c5
|
@ -246,6 +246,7 @@ clear_mtrrs:
|
|||
/* Invalidate the cache again */
|
||||
invd
|
||||
|
||||
post_code(0x3c)
|
||||
|
||||
/* clear boot_complete flag */
|
||||
xorl %ebp, %ebp
|
||||
|
|
|
@ -144,12 +144,150 @@ clear_mtrrs:
|
|||
|
||||
post_code(0x23)
|
||||
|
||||
call stage1_main
|
||||
/* Call romstage.c main function */
|
||||
call main
|
||||
|
||||
post_code(0x2f)
|
||||
error:
|
||||
|
||||
post_code(0x30)
|
||||
|
||||
/* Disable Cache */
|
||||
movl %cr0, %eax
|
||||
orl $(1 << 30), %eax
|
||||
movl %eax, %cr0
|
||||
|
||||
post_code(0x31)
|
||||
|
||||
/* Disable MTRR */
|
||||
movl $MTRRdefType_MSR, %ecx
|
||||
rdmsr
|
||||
andl $(~(1 << 11)), %eax
|
||||
wrmsr
|
||||
|
||||
post_code(0x31)
|
||||
|
||||
invd
|
||||
#if 0
|
||||
xorl %eax, %eax
|
||||
xorl %edx, %edx
|
||||
movl $MTRRphysBase_MSR(0), %ecx
|
||||
wrmsr
|
||||
movl $MTRRphysMask_MSR(0), %ecx
|
||||
wrmsr
|
||||
movl $MTRRphysBase_MSR(1), %ecx
|
||||
wrmsr
|
||||
movl $MTRRphysMask_MSR(1), %ecx
|
||||
wrmsr
|
||||
#endif
|
||||
|
||||
post_code(0x33)
|
||||
|
||||
#undef CLEAR_FIRST_1M_RAM
|
||||
#ifdef CLEAR_FIRST_1M_RAM
|
||||
post_code(0x34)
|
||||
/* Enable Write Combining and Speculative Reads for the first 1MB */
|
||||
movl $MTRRphysBase_MSR(0), %ecx
|
||||
movl $(0x00000000 | MTRR_TYPE_WRCOMB), %eax
|
||||
xorl %edx, %edx
|
||||
wrmsr
|
||||
movl $MTRRphysMask_MSR(0), %ecx
|
||||
movl $(~(1024*1024 -1) | (1 << 11)), %eax
|
||||
movl $0x0000000f, %edx // 36bit address space
|
||||
wrmsr
|
||||
post_code(0x35)
|
||||
#endif
|
||||
|
||||
/* Enable Cache */
|
||||
movl %cr0, %eax
|
||||
andl $~( (1 << 30) | (1 << 29) ), %eax
|
||||
movl %eax, %cr0
|
||||
|
||||
|
||||
post_code(0x36)
|
||||
#ifdef CLEAR_FIRST_1M_RAM
|
||||
|
||||
/* Clear first 1MB of RAM */
|
||||
movl $0x00000000, %edi
|
||||
cld
|
||||
xorl %eax, %eax
|
||||
movl $((1024*1024) / 4), %ecx
|
||||
rep stosl
|
||||
|
||||
post_code(0x37)
|
||||
#endif
|
||||
|
||||
/* Disable Cache */
|
||||
movl %cr0, %eax
|
||||
orl $(1 << 30), %eax
|
||||
movl %eax, %cr0
|
||||
|
||||
post_code(0x38)
|
||||
|
||||
/* Enable Write Back and Speculative Reads for the first 1MB */
|
||||
movl $MTRRphysBase_MSR(0), %ecx
|
||||
movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
|
||||
xorl %edx, %edx
|
||||
wrmsr
|
||||
movl $MTRRphysMask_MSR(0), %ecx
|
||||
movl $(~(1024*1024 -1) | (1 << 11)), %eax
|
||||
movl $0x0000000f, %edx // 36bit address space
|
||||
wrmsr
|
||||
|
||||
post_code(0x39)
|
||||
|
||||
/* And Enable Cache again after setting MTRRs */
|
||||
movl %cr0, %eax
|
||||
andl $~( (1 << 30) | (1 << 29) ), %eax
|
||||
movl %eax, %cr0
|
||||
|
||||
post_code(0x3a)
|
||||
|
||||
/* Enable MTRR */
|
||||
movl $MTRRdefType_MSR, %ecx
|
||||
rdmsr
|
||||
orl $(1 << 11), %eax
|
||||
wrmsr
|
||||
|
||||
post_code(0x3b)
|
||||
|
||||
/* Enable prefetchers */
|
||||
movl $0x01a0, %ecx
|
||||
rdmsr
|
||||
andl $~((1 << 9) | (1 << 19)), %eax
|
||||
andl $~((1 << 5) | (1 << 7)), %edx
|
||||
wrmsr
|
||||
|
||||
/* Invalidate the cache again */
|
||||
invd
|
||||
|
||||
post_code(0x3c)
|
||||
|
||||
/* clear boot_complete flag */
|
||||
xorl %ebp, %ebp
|
||||
__main:
|
||||
post_code(0x11)
|
||||
cld /* clear direction flag */
|
||||
|
||||
movl %ebp, %esi
|
||||
|
||||
/* For now: use CONFIG_RAMBASE + 1MB - 64K (counting downwards) as stack. This
|
||||
* makes sure that we stay completely within the 1M-64K of memory that we
|
||||
* preserve for suspend/resume.
|
||||
*/
|
||||
|
||||
#ifndef HIGH_MEMORY_SAVE
|
||||
#warning Need a central place for HIGH_MEMORY_SAVE
|
||||
#define HIGH_MEMORY_SAVE ( (1024 - 64) * 1024 )
|
||||
#endif
|
||||
movl $(CONFIG_RAMBASE + HIGH_MEMORY_SAVE), %esp
|
||||
movl %esp, %ebp
|
||||
pushl %esi
|
||||
call copy_and_run
|
||||
|
||||
.Lhlt:
|
||||
post_code(0xee)
|
||||
hlt
|
||||
jmp error
|
||||
jmp .Lhlt
|
||||
|
||||
mtrr_table:
|
||||
/* Fixed MTRRs */
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
/*
|
||||
* This file is part of the coreboot project.
|
||||
*
|
||||
* Copyright (C) 2007-2009 coresystems GmbH
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License as
|
||||
* published by the Free Software Foundation; version 2 of
|
||||
* the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,
|
||||
* MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
|
||||
|
||||
void real_main(unsigned long bist);
|
||||
|
||||
void stage1_main(unsigned long bist)
|
||||
{
|
||||
unsigned int cpu_reset = 0;
|
||||
|
||||
real_main(bist);
|
||||
|
||||
/* No servicable parts below this line .. */
|
||||
|
||||
#ifdef CAR_DEBUG
|
||||
/* Check value of esp to verify if we have enough rom for stack in Cache as RAM */
|
||||
unsigned v_esp;
|
||||
__asm__ volatile (
|
||||
"movl %%esp, %0\n"
|
||||
: "=a" (v_esp)
|
||||
);
|
||||
printk(BIOS_SPEW, "v_esp=%08x\n", v_esp);
|
||||
#endif
|
||||
|
||||
cpu_reset_x:
|
||||
|
||||
printk(BIOS_SPEW, "cpu_reset = %08x\n", cpu_reset);
|
||||
printk(BIOS_SPEW, "No cache as ram now - ");
|
||||
|
||||
/* store cpu_reset to ebx */
|
||||
__asm__ volatile (
|
||||
"movl %0, %%ebx\n\t"
|
||||
::"a" (cpu_reset)
|
||||
);
|
||||
|
||||
#undef CLEAR_FIRST_1M_RAM
|
||||
#include "cache_as_ram_post.c"
|
||||
|
||||
/* For now: use rambase + 1MB - 64K (counting downwards) as stack. This
|
||||
* makes sure that we stay completely within the 1M of memory we
|
||||
* preserve with the memcpy above.
|
||||
*/
|
||||
|
||||
#ifndef HIGH_MEMORY_SAVE
|
||||
#define HIGH_MEMORY_SAVE ( (1024 - 64) * 1024 )
|
||||
#endif
|
||||
|
||||
__asm__ volatile (
|
||||
"movl %0, %%ebp\n"
|
||||
"movl %0, %%esp\n"
|
||||
:: "a" (CONFIG_RAMBASE + HIGH_MEMORY_SAVE)
|
||||
);
|
||||
|
||||
{
|
||||
unsigned new_cpu_reset;
|
||||
|
||||
/* get back cpu_reset from ebx */
|
||||
__asm__ volatile (
|
||||
"movl %%ebx, %0\n"
|
||||
:"=a" (new_cpu_reset)
|
||||
);
|
||||
|
||||
/* Copy and execute coreboot_ram */
|
||||
copy_and_run(new_cpu_reset);
|
||||
}
|
||||
|
||||
/* We will not return */
|
||||
printk(BIOS_DEBUG, "sorry. parachute did not open.\n");
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
/*
|
||||
* This file is part of the coreboot project.
|
||||
*
|
||||
* Copyright (C) 2007-2008 coresystems GmbH
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License as
|
||||
* published by the Free Software Foundation; version 2 of
|
||||
* the License.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,
|
||||
* MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
__asm__ volatile (
|
||||
|
||||
"movb $0x30, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* Disable Cache */
|
||||
"movl %cr0, %eax\n"
|
||||
"orl $(1 << 30), %eax\n"
|
||||
"movl %eax, %cr0\n"
|
||||
|
||||
"movb $0x31, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* Disable MTRR */
|
||||
"movl $MTRRdefType_MSR, %ecx\n"
|
||||
"rdmsr\n"
|
||||
"andl $(~(1 << 11)), %eax\n"
|
||||
"wrmsr\n"
|
||||
|
||||
"movb $0x32, %al\noutb %al, $0x80\n"
|
||||
|
||||
"invd\n"
|
||||
#if 0
|
||||
"xorl %eax, %eax\n"
|
||||
"xorl %edx, %edx\n"
|
||||
"movl $MTRRphysBase_MSR(0), %ecx\n"
|
||||
"wrmsr\n"
|
||||
"movl $MTRRphysMask_MSR(0), %ecx\n"
|
||||
"wrmsr\n"
|
||||
"movl $MTRRphysBase_MSR(1), %ecx\n"
|
||||
"wrmsr\n"
|
||||
"movl $MTRRphysMask_MSR(1), %ecx\n"
|
||||
"wrmsr\n"
|
||||
|
||||
"movb $0x33, %al\noutb %al, $0x80\n"
|
||||
#endif
|
||||
#ifdef CLEAR_FIRST_1M_RAM
|
||||
"movb $0x34, %al\noutb %al, $0x80\n"
|
||||
/* Enable Write Combining and Speculative Reads for the first 1MB */
|
||||
"movl $MTRRphysBase_MSR(0), %ecx\n"
|
||||
"movl $(0x00000000 | MTRR_TYPE_WRCOMB), %eax\n"
|
||||
"xorl %edx, %edx\n"
|
||||
"wrmsr\n"
|
||||
"movl $MTRRphysMask_MSR(0), %ecx\n"
|
||||
"movl $(~(1024*1024 -1) | (1 << 11)), %eax\n"
|
||||
"movl $0x0000000f, %edx\n" // 36bit address space
|
||||
"wrmsr\n"
|
||||
"movb $0x35, %al\noutb %al, $0x80\n"
|
||||
#endif
|
||||
|
||||
/* Enable Cache */
|
||||
"movl %cr0, %eax\n"
|
||||
"andl $~( (1 << 30) | (1 << 29) ), %eax\n"
|
||||
"movl %eax, %cr0\n"
|
||||
|
||||
"movb $0x36, %al\noutb %al, $0x80\n"
|
||||
#ifdef CLEAR_FIRST_1M_RAM
|
||||
|
||||
/* Clear first 1MB of RAM */
|
||||
"movl $0x00000000, %edi\n"
|
||||
"cld\n"
|
||||
"xorl %eax, %eax\n"
|
||||
"movl $((1024*1024) / 4), %ecx\n"
|
||||
"rep stosl\n"
|
||||
|
||||
"movb $0x37, %al\noutb %al, $0x80\n"
|
||||
#endif
|
||||
|
||||
/* Disable Cache */
|
||||
"movl %cr0, %eax\n"
|
||||
"orl $(1 << 30), %eax\n"
|
||||
"movl %eax, %cr0\n"
|
||||
|
||||
"movb $0x38, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* Enable Write Back and Speculative Reads for the first 1MB */
|
||||
"movl $MTRRphysBase_MSR(0), %ecx\n"
|
||||
"movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax\n"
|
||||
"xorl %edx, %edx\n"
|
||||
"wrmsr\n"
|
||||
"movl $MTRRphysMask_MSR(0), %ecx\n"
|
||||
"movl $(~(1024*1024 -1) | (1 << 11)), %eax\n"
|
||||
"movl $0x0000000f, %edx // 36bit address space\n"
|
||||
"wrmsr\n"
|
||||
|
||||
"movb $0x39, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* And Enable Cache again after setting MTRRs */
|
||||
"movl %cr0, %eax\n"
|
||||
"andl $~( (1 << 30) | (1 << 29) ), %eax\n"
|
||||
"movl %eax, %cr0\n"
|
||||
|
||||
"movb $0x3a, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* Enable MTRR */
|
||||
"movl $MTRRdefType_MSR, %ecx\n"
|
||||
"rdmsr\n"
|
||||
"orl $(1 << 11), %eax\n"
|
||||
"wrmsr\n"
|
||||
|
||||
"movb $0x3b, %al\noutb %al, $0x80\n"
|
||||
|
||||
/* Enable prefetchers */
|
||||
"movl $0x01a0, %ecx\n"
|
||||
"rdmsr\n"
|
||||
"andl $~((1 << 9) | (1 << 19)), %eax\n"
|
||||
"andl $~((1 << 5) | (1 << 7)), %edx\n"
|
||||
"wrmsr\n"
|
||||
|
||||
/* Invalidate the cache again */
|
||||
"invd\n"
|
||||
"movb $0x3c, %al\noutb %al, $0x80\n"
|
||||
);
|
Loading…
Reference in New Issue