cpu/intel/car/non-evict/cache_as_ram.S: Add support for longmode

* Use heap for linker script calculated constant to fix relocation
  symbols in mixed assembly code.

Tested on HPZ220:
* Still boots in x86_32.

Tested on Lenovo T410:
* Doesn't need the MMX register fix in long mode.

Change-Id: I3e72a0bebf728fb678308006ea3a3aeb92910a84
Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com>
Reviewed-on: https://review.coreboot.org/c/coreboot/+/44673
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Angel Pons <th3fanbus@gmail.com>
Reviewed-by: Arthur Heymans <arthur@aheymans.xyz>
This commit is contained in:
Patrick Rudolph 2020-09-24 18:57:43 +02:00 committed by Patrick Georgi
parent f9bc5baee5
commit 49da0cfe46
2 changed files with 39 additions and 6 deletions

View File

@ -0,0 +1,20 @@
/* SPDX-License-Identifier: GPL-2.0-only */
/* Trick the linker into supporting x86_64 relocations in 32bit code */
#if ENV_X86_64
#define uintptr_t quad
#else
#define uintptr_t long
#endif
rom_mtrr_mask:
.uintptr_t _rom_mtrr_mask
rom_mtrr_base:
.uintptr_t _rom_mtrr_base
car_mtrr_mask:
.uintptr_t _car_mtrr_mask
car_mtrr_start:
.uintptr_t _car_mtrr_start

View File

@ -9,6 +9,8 @@
.global bootblock_pre_c_entry .global bootblock_pre_c_entry
#include <cpu/intel/car/cache_as_ram_symbols.inc>
.code32 .code32
_cache_as_ram_setup: _cache_as_ram_setup:
@ -83,11 +85,10 @@ addrsize_set_high:
movl $MTRR_PHYS_MASK(1), %ecx movl $MTRR_PHYS_MASK(1), %ecx
wrmsr wrmsr
post_code(0x23) post_code(0x23)
/* Set Cache-as-RAM base address. */ /* Set Cache-as-RAM base address. */
movl $(MTRR_PHYS_BASE(0)), %ecx movl $(MTRR_PHYS_BASE(0)), %ecx
movl $_car_mtrr_start, %eax movl car_mtrr_start, %eax
orl $MTRR_TYPE_WRBACK, %eax orl $MTRR_TYPE_WRBACK, %eax
xorl %edx, %edx xorl %edx, %edx
wrmsr wrmsr
@ -96,20 +97,20 @@ addrsize_set_high:
/* Set Cache-as-RAM mask. */ /* Set Cache-as-RAM mask. */
movl $(MTRR_PHYS_MASK(0)), %ecx movl $(MTRR_PHYS_MASK(0)), %ecx
rdmsr rdmsr
movl $_car_mtrr_mask, %eax mov car_mtrr_mask, %eax
orl $MTRR_PHYS_MASK_VALID, %eax orl $MTRR_PHYS_MASK_VALID, %eax
wrmsr wrmsr
/* Enable cache for our code in Flash because we do XIP here */ /* Enable cache for our code in Flash because we do XIP here */
movl $MTRR_PHYS_BASE(1), %ecx movl $MTRR_PHYS_BASE(1), %ecx
xorl %edx, %edx xorl %edx, %edx
movl $_rom_mtrr_base, %eax mov rom_mtrr_base, %eax
orl $MTRR_TYPE_WRPROT, %eax orl $MTRR_TYPE_WRPROT, %eax
wrmsr wrmsr
movl $MTRR_PHYS_MASK(1), %ecx movl $MTRR_PHYS_MASK(1), %ecx
rdmsr rdmsr
movl $_rom_mtrr_mask, %eax mov rom_mtrr_mask, %eax
orl $MTRR_PHYS_MASK_VALID, %eax orl $MTRR_PHYS_MASK_VALID, %eax
wrmsr wrmsr
@ -207,8 +208,19 @@ end_microcode_update:
/* Need to align stack to 16 bytes at call instruction. Account for /* Need to align stack to 16 bytes at call instruction. Account for
the pushes below. */ the pushes below. */
andl $0xfffffff0, %esp andl $0xfffffff0, %esp
subl $4, %esp
#if ENV_X86_64
#include <cpu/x86/64bit/entry64.inc>
movd %mm2, %rdi
shlq $32, %rdi
movd %mm1, %rsi
or %rsi, %rdi
movd %mm0, %rsi
#else
subl $4, %esp
/* push TSC and BIST to stack */ /* push TSC and BIST to stack */
movd %mm0, %eax movd %mm0, %eax
pushl %eax /* BIST */ pushl %eax /* BIST */
@ -216,6 +228,7 @@ end_microcode_update:
pushl %eax /* tsc[63:32] */ pushl %eax /* tsc[63:32] */
movd %mm1, %eax movd %mm1, %eax
pushl %eax /* tsc[31:0] */ pushl %eax /* tsc[31:0] */
#endif
before_c_entry: before_c_entry:
post_code(0x29) post_code(0x29)