x86: add common definitions for control registers

The access to control registers were scattered about.
Provide a single header file to provide the correct
access function and definitions.

BUG=chrome-os-partner:22991
BRANCH=None
TEST=Built and booted using this infrastructure. Also objdump'd the
     assembly to ensure consistency (objdump -d -r -S | grep xmm).

Change-Id: Iff7a043e4e5ba930a6a77f968f1fcc14784214e9
Signed-off-by: Aaron Durbin <adurbin@chromium.org>
Reviewed-on: https://chromium-review.googlesource.com/172641
Reviewed-by: Stefan Reinauer <reinauer@google.com>
Reviewed-on: http://review.coreboot.org/4873
Tested-by: build bot (Jenkins)
Reviewed-by: Alexandru Gagniuc <mr.nuke.me@gmail.com>
This commit is contained in:
Aaron Durbin 2013-10-10 12:41:49 -05:00 committed by Aaron Durbin
parent f545abfd22
commit 029aaf627c
7 changed files with 127 additions and 91 deletions

View File

@ -214,5 +214,6 @@ static inline void get_fms(struct cpuinfo_x86 *c, uint32_t tfms)
#endif
#define asmlinkage __attribute__((regparm(0)))
#define alwaysinline inline __attribute__((always_inline))
#endif /* ARCH_CPU_H */

View File

@ -20,6 +20,7 @@
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <cpu/x86/cr.h>
#include <cpu/x86/lapic.h>
#include <delay.h>
#include <lib.h>
@ -401,26 +402,6 @@ void stop_this_cpu(void)
}
#endif
#ifdef __SSE3__
static __inline__ __attribute__((always_inline)) unsigned long readcr4(void)
{
unsigned long value;
__asm__ __volatile__ (
"mov %%cr4, %[value]"
: [value] "=a" (value));
return value;
}
static __inline__ __attribute__((always_inline)) void writecr4(unsigned long Data)
{
__asm__ __volatile__ (
"mov %%eax, %%cr4"
:
: "a" (Data)
);
}
#endif
/* C entry point of secondary cpus */
void asmlinkage secondary_cpu_init(unsigned int index)
{
@ -435,9 +416,9 @@ void asmlinkage secondary_cpu_init(unsigned int index)
* Turn on CR4.OSFXSR and CR4.OSXMMEXCPT when SSE options enabled
*/
u32 cr4_val;
cr4_val = readcr4();
cr4_val |= (1 << 9 | 1 << 10);
writecr4(cr4_val);
cr4_val = read_cr4();
cr4_val |= (CR4_OSFXSR | CR4_OSXMMEXCPT);
write_cr4(cr4_val);
#endif
cpu_initialize(index);
#if CONFIG_SERIAL_CPU_INIT

View File

@ -20,8 +20,10 @@
#ifndef CPU_X86_CACHE
#define CPU_X86_CACHE
#define CR0_CacheDisable (1 << 30)
#define CR0_NoWriteThrough (1 << 29)
#include <cpu/x86/cr.h>
#define CR0_CacheDisable (CR0_CD)
#define CR0_NoWriteThrough (CR0_NW)
#if !defined(__ASSEMBLER__)
@ -33,21 +35,6 @@
#if defined(__GNUC__)
/* The memory clobber prevents the GCC from reordering the read/write order
* of CR0
*/
static inline unsigned long read_cr0(void)
{
unsigned long cr0;
asm volatile ("movl %%cr0, %0" : "=r" (cr0) :: "memory");
return cr0;
}
static inline void write_cr0(unsigned long cr0)
{
asm volatile ("movl %0, %%cr0" : : "r" (cr0) : "memory");
}
static inline void wbinvd(void)
{
asm volatile ("wbinvd" ::: "memory");
@ -55,18 +42,6 @@ static inline void wbinvd(void)
#else
static inline unsigned long read_cr0(void)
{
unsigned long cr0;
asm volatile ("movl %%cr0, %0" : "=r" (cr0));
return cr0;
}
static inline void write_cr0(unsigned long cr0)
{
asm volatile ("movl %0, %%cr0" : : "r" (cr0));
}
static inline void wbinvd(void)
{
asm volatile ("wbinvd");
@ -93,7 +68,7 @@ static inline __attribute__((always_inline)) void enable_cache(void)
{
unsigned long cr0;
cr0 = read_cr0();
cr0 &= 0x9fffffff;
cr0 &= ~(CR0_CD | CR0_NW);
write_cr0(cr0);
}
@ -102,7 +77,7 @@ static inline __attribute__((always_inline)) void disable_cache(void)
/* Disable and write back the cache */
unsigned long cr0;
cr0 = read_cr0();
cr0 |= 0x40000000;
cr0 |= CR0_CD;
wbinvd();
write_cr0(cr0);
wbinvd();

113
src/include/cpu/x86/cr.h Normal file
View File

@ -0,0 +1,113 @@
/*
* This file is part of the coreboot project.
*
* Copyright (C) 2013 Google Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef CPU_X86_CR_H
#define CPU_X86_CR_H
#if !defined(__ASSEMBLER__)
#include <stdint.h>
#include <arch/cpu.h>
/* ROMCC apparently chokes certain clobber registers. */
#if defined(__ROMCC__)
#define COMPILER_BARRIER
#else
#define COMPILER_BARRIER "memory"
#endif
static alwaysinline uint32_t read_cr0(void)
{
uint32_t value;
__asm__ __volatile__ (
"mov %%cr0, %0"
: "=r" (value)
:
: COMPILER_BARRIER
);
return value;
}
static alwaysinline void write_cr0(uint32_t data)
{
__asm__ __volatile__ (
"mov %0, %%cr0"
:
: "r" (data)
: COMPILER_BARRIER
);
}
static alwaysinline uint32_t read_cr4(void)
{
uint32_t value;
__asm__ __volatile__ (
"mov %%cr4, %0"
: "=r" (value)
:
: COMPILER_BARRIER
);
return value;
}
static alwaysinline void write_cr4(uint32_t data)
{
__asm__ __volatile__ (
"mov %0, %%cr4"
:
: "r" (data)
: COMPILER_BARRIER
);
}
#endif /* !defined(__ASSEMBLER__) */
/* CR0 flags */
#define CR0_PE (1 << 0)
#define CR0_MP (1 << 1)
#define CR0_EM (1 << 2)
#define CR0_TS (1 << 3)
#define CR0_ET (1 << 4)
#define CR0_NE (1 << 5)
#define CR0_WP (1 << 16)
#define CR0_AM (1 << 18)
#define CR0_NW (1 << 29)
#define CR0_CD (1 << 30)
#define CR0_PG (1 << 31)
/* CR4 flags */
#define CR4_VME (1 << 0)
#define CR4_PVI (1 << 1)
#define CR4_TSD (1 << 2)
#define CR4_DE (1 << 3)
#define CR4_PSE (1 << 4)
#define CR4_PAE (1 << 5)
#define CR4_MCE (1 << 6)
#define CR4_PGE (1 << 7)
#define CR4_PCE (1 << 8)
#define CR4_OSFXSR (1 << 9)
#define CR4_OSXMMEXCPT (1 << 10)
#define CR4_VMXE (1 << 13)
#define CR4_SMXE (1 << 14)
#define CR4_FSGSBASE (1 << 16)
#define CR4_PCIDE (1 << 17)
#define CR4_OSXSAVE (1 << 18)
#define CR4_SMEP (1 << 20)
#endif /* CPU_X86_CR_H */

View File

@ -19,6 +19,7 @@
*/
#include <arch/stages.h>
#include <cpu/x86/cr.h>
//0: mean no debug info
#define DQS_TRAIN_DEBUG 0
@ -114,19 +115,6 @@ static unsigned Get_RcvrSysAddr(const struct mem_controller * ctrl, unsigned cha
}
static inline unsigned long read_cr4(void)
{
unsigned long cr4;
asm volatile ("movl %%cr4, %0" : "=r" (cr4));
return cr4;
}
static inline void write_cr4(unsigned long cr4)
{
asm volatile ("movl %0, %%cr4" : : "r" (cr4));
}
static inline void enable_sse2(void)
{
unsigned long cr4;

View File

@ -99,19 +99,7 @@ static u32 bsf(u32 x)
/* prevent speculative execution of following instructions */
#define _EXECFENCE asm volatile ("outb %al, $0xed")
static inline u32 read_cr4(void)
{
u32 cr4;
__asm__ volatile ("movl %%cr4, %0" : "=r" (cr4));
return cr4;
}
static inline void write_cr4(u32 cr4)
{
__asm__ volatile ("movl %0, %%cr4" : : "r" (cr4));
}
#include <cpu/x86/cr.h>
u32 SetUpperFSbase(u32 addr_hi);

View File

@ -93,17 +93,7 @@ static u32 bsf(u32 x)
/* prevent speculative execution of following instructions */
#define _EXECFENCE asm volatile ("outb %al, $0xed")
static inline u32 read_cr4(void)
{
u32 cr4;
__asm__ volatile ("movl %%cr4, %0" : "=r" (cr4));
return cr4;
}
static inline void write_cr4(u32 cr4)
{
__asm__ volatile ("movl %0, %%cr4" : : "r" (cr4));
}
#include <cpu/x86/cr.h>
u32 SetUpperFSbase(u32 addr_hi);