diff --git a/src/arch/x86/include/arch/cpu.h b/src/arch/x86/include/arch/cpu.h index 3e50be4234..281565bef6 100644 --- a/src/arch/x86/include/arch/cpu.h +++ b/src/arch/x86/include/arch/cpu.h @@ -214,5 +214,6 @@ static inline void get_fms(struct cpuinfo_x86 *c, uint32_t tfms) #endif #define asmlinkage __attribute__((regparm(0))) +#define alwaysinline inline __attribute__((always_inline)) #endif /* ARCH_CPU_H */ diff --git a/src/cpu/x86/lapic/lapic_cpu_init.c b/src/cpu/x86/lapic/lapic_cpu_init.c index 555187fd7e..2aef2a4bcd 100644 --- a/src/cpu/x86/lapic/lapic_cpu_init.c +++ b/src/cpu/x86/lapic/lapic_cpu_init.c @@ -20,6 +20,7 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ +#include #include #include #include @@ -401,26 +402,6 @@ void stop_this_cpu(void) } #endif -#ifdef __SSE3__ -static __inline__ __attribute__((always_inline)) unsigned long readcr4(void) -{ - unsigned long value; - __asm__ __volatile__ ( - "mov %%cr4, %[value]" - : [value] "=a" (value)); - return value; -} - -static __inline__ __attribute__((always_inline)) void writecr4(unsigned long Data) -{ - __asm__ __volatile__ ( - "mov %%eax, %%cr4" - : - : "a" (Data) - ); -} -#endif - /* C entry point of secondary cpus */ void asmlinkage secondary_cpu_init(unsigned int index) { @@ -435,9 +416,9 @@ void asmlinkage secondary_cpu_init(unsigned int index) * Turn on CR4.OSFXSR and CR4.OSXMMEXCPT when SSE options enabled */ u32 cr4_val; - cr4_val = readcr4(); - cr4_val |= (1 << 9 | 1 << 10); - writecr4(cr4_val); + cr4_val = read_cr4(); + cr4_val |= (CR4_OSFXSR | CR4_OSXMMEXCPT); + write_cr4(cr4_val); #endif cpu_initialize(index); #if CONFIG_SERIAL_CPU_INIT diff --git a/src/include/cpu/x86/cache.h b/src/include/cpu/x86/cache.h index a448228776..a4d976f156 100644 --- a/src/include/cpu/x86/cache.h +++ b/src/include/cpu/x86/cache.h @@ -20,8 +20,10 @@ #ifndef CPU_X86_CACHE #define CPU_X86_CACHE -#define CR0_CacheDisable (1 << 30) -#define CR0_NoWriteThrough (1 << 29) +#include + +#define CR0_CacheDisable (CR0_CD) +#define CR0_NoWriteThrough (CR0_NW) #if !defined(__ASSEMBLER__) @@ -33,21 +35,6 @@ #if defined(__GNUC__) -/* The memory clobber prevents the GCC from reordering the read/write order - * of CR0 - */ -static inline unsigned long read_cr0(void) -{ - unsigned long cr0; - asm volatile ("movl %%cr0, %0" : "=r" (cr0) :: "memory"); - return cr0; -} - -static inline void write_cr0(unsigned long cr0) -{ - asm volatile ("movl %0, %%cr0" : : "r" (cr0) : "memory"); -} - static inline void wbinvd(void) { asm volatile ("wbinvd" ::: "memory"); @@ -55,18 +42,6 @@ static inline void wbinvd(void) #else -static inline unsigned long read_cr0(void) -{ - unsigned long cr0; - asm volatile ("movl %%cr0, %0" : "=r" (cr0)); - return cr0; -} - -static inline void write_cr0(unsigned long cr0) -{ - asm volatile ("movl %0, %%cr0" : : "r" (cr0)); -} - static inline void wbinvd(void) { asm volatile ("wbinvd"); @@ -93,7 +68,7 @@ static inline __attribute__((always_inline)) void enable_cache(void) { unsigned long cr0; cr0 = read_cr0(); - cr0 &= 0x9fffffff; + cr0 &= ~(CR0_CD | CR0_NW); write_cr0(cr0); } @@ -102,7 +77,7 @@ static inline __attribute__((always_inline)) void disable_cache(void) /* Disable and write back the cache */ unsigned long cr0; cr0 = read_cr0(); - cr0 |= 0x40000000; + cr0 |= CR0_CD; wbinvd(); write_cr0(cr0); wbinvd(); diff --git a/src/include/cpu/x86/cr.h b/src/include/cpu/x86/cr.h new file mode 100644 index 0000000000..1d9db8807d --- /dev/null +++ b/src/include/cpu/x86/cr.h @@ -0,0 +1,113 @@ + +/* + * This file is part of the coreboot project. + * + * Copyright (C) 2013 Google Inc. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; version 2 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +#ifndef CPU_X86_CR_H +#define CPU_X86_CR_H + +#if !defined(__ASSEMBLER__) + +#include +#include + +/* ROMCC apparently chokes certain clobber registers. */ +#if defined(__ROMCC__) +#define COMPILER_BARRIER +#else +#define COMPILER_BARRIER "memory" +#endif + +static alwaysinline uint32_t read_cr0(void) +{ + uint32_t value; + __asm__ __volatile__ ( + "mov %%cr0, %0" + : "=r" (value) + : + : COMPILER_BARRIER + ); + return value; +} + +static alwaysinline void write_cr0(uint32_t data) +{ + __asm__ __volatile__ ( + "mov %0, %%cr0" + : + : "r" (data) + : COMPILER_BARRIER + ); +} + +static alwaysinline uint32_t read_cr4(void) +{ + uint32_t value; + __asm__ __volatile__ ( + "mov %%cr4, %0" + : "=r" (value) + : + : COMPILER_BARRIER + ); + return value; +} + +static alwaysinline void write_cr4(uint32_t data) +{ + __asm__ __volatile__ ( + "mov %0, %%cr4" + : + : "r" (data) + : COMPILER_BARRIER + ); +} + +#endif /* !defined(__ASSEMBLER__) */ + +/* CR0 flags */ +#define CR0_PE (1 << 0) +#define CR0_MP (1 << 1) +#define CR0_EM (1 << 2) +#define CR0_TS (1 << 3) +#define CR0_ET (1 << 4) +#define CR0_NE (1 << 5) +#define CR0_WP (1 << 16) +#define CR0_AM (1 << 18) +#define CR0_NW (1 << 29) +#define CR0_CD (1 << 30) +#define CR0_PG (1 << 31) + +/* CR4 flags */ +#define CR4_VME (1 << 0) +#define CR4_PVI (1 << 1) +#define CR4_TSD (1 << 2) +#define CR4_DE (1 << 3) +#define CR4_PSE (1 << 4) +#define CR4_PAE (1 << 5) +#define CR4_MCE (1 << 6) +#define CR4_PGE (1 << 7) +#define CR4_PCE (1 << 8) +#define CR4_OSFXSR (1 << 9) +#define CR4_OSXMMEXCPT (1 << 10) +#define CR4_VMXE (1 << 13) +#define CR4_SMXE (1 << 14) +#define CR4_FSGSBASE (1 << 16) +#define CR4_PCIDE (1 << 17) +#define CR4_OSXSAVE (1 << 18) +#define CR4_SMEP (1 << 20) + +#endif /* CPU_X86_CR_H */ diff --git a/src/northbridge/amd/amdk8/raminit_f_dqs.c b/src/northbridge/amd/amdk8/raminit_f_dqs.c index 0781c7939b..8ab1b47fe8 100644 --- a/src/northbridge/amd/amdk8/raminit_f_dqs.c +++ b/src/northbridge/amd/amdk8/raminit_f_dqs.c @@ -19,6 +19,7 @@ */ #include +#include //0: mean no debug info #define DQS_TRAIN_DEBUG 0 @@ -114,19 +115,6 @@ static unsigned Get_RcvrSysAddr(const struct mem_controller * ctrl, unsigned cha } -static inline unsigned long read_cr4(void) -{ - unsigned long cr4; - asm volatile ("movl %%cr4, %0" : "=r" (cr4)); - return cr4; -} - -static inline void write_cr4(unsigned long cr4) -{ - asm volatile ("movl %0, %%cr4" : : "r" (cr4)); -} - - static inline void enable_sse2(void) { unsigned long cr4; diff --git a/src/northbridge/amd/amdmct/mct/mct_d_gcc.h b/src/northbridge/amd/amdmct/mct/mct_d_gcc.h index 2090e0840b..e989ae37c4 100644 --- a/src/northbridge/amd/amdmct/mct/mct_d_gcc.h +++ b/src/northbridge/amd/amdmct/mct/mct_d_gcc.h @@ -99,19 +99,7 @@ static u32 bsf(u32 x) /* prevent speculative execution of following instructions */ #define _EXECFENCE asm volatile ("outb %al, $0xed") -static inline u32 read_cr4(void) -{ - u32 cr4; - __asm__ volatile ("movl %%cr4, %0" : "=r" (cr4)); - return cr4; -} - - -static inline void write_cr4(u32 cr4) -{ - __asm__ volatile ("movl %0, %%cr4" : : "r" (cr4)); -} - +#include u32 SetUpperFSbase(u32 addr_hi); diff --git a/src/northbridge/amd/amdmct/mct_ddr3/mct_d_gcc.h b/src/northbridge/amd/amdmct/mct_ddr3/mct_d_gcc.h index 4e5bca278a..abe6e7bd3f 100644 --- a/src/northbridge/amd/amdmct/mct_ddr3/mct_d_gcc.h +++ b/src/northbridge/amd/amdmct/mct_ddr3/mct_d_gcc.h @@ -93,17 +93,7 @@ static u32 bsf(u32 x) /* prevent speculative execution of following instructions */ #define _EXECFENCE asm volatile ("outb %al, $0xed") -static inline u32 read_cr4(void) -{ - u32 cr4; - __asm__ volatile ("movl %%cr4, %0" : "=r" (cr4)); - return cr4; -} - -static inline void write_cr4(u32 cr4) -{ - __asm__ volatile ("movl %0, %%cr4" : : "r" (cr4)); -} +#include u32 SetUpperFSbase(u32 addr_hi);