arm64: refactor stage entry

Provide a common entry point arm64 cores coming out of reset. Also,
take into account CONFIG_ARM64_CPUS_START_IN_ELx to set the
correct SCTLR_ELx register. The SCR_EL3 initialization was removed
as that can be done in policy code in C later. Part of this refactor
allows for greater code reuse for the secure monitor.

BUG=chrome-os-partner:31545
BRANCH=None
TEST=built and booted to linux on ryu

Change-Id: I429f8fd0cdae78318ac171722fa1377924665401
Signed-off-by: Patrick Georgi <pgeorgi@chromium.org>
Original-Commit-Id: f92a5a01f07bc370735d75d695aedd8e2ab25608
Original-Change-Id: If16b3f979923ec8add59854db6bad4aaed35e3aa
Original-Signed-off-by: Aaron Durbin <adurbin@chromium.org>
Original-Reviewed-on: https://chromium-review.googlesource.com/214668
Original-Reviewed-by: Furquan Shaikh <furquan@chromium.org>
Reviewed-on: http://review.coreboot.org/9012
Tested-by: build bot (Jenkins)
Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
This commit is contained in:
Aaron Durbin 2014-08-27 12:50:26 -05:00 committed by Patrick Georgi
parent ce513c9732
commit 1c6512962a
2 changed files with 39 additions and 32 deletions

View File

@ -20,19 +20,25 @@
#ifndef __ARM_ARM64_ASM_H #ifndef __ARM_ARM64_ASM_H
#define __ARM_ARM64_ASM_H #define __ARM_ARM64_ASM_H
#define ALIGN .align 0
#define ENDPROC(name) \ #define ENDPROC(name) \
.type name, %function; \ .type name, %function; \
END(name) END(name)
#define ENTRY(name) \ #define ENTRY_WITH_ALIGN(name, bits) \
.section .text.name, "ax", %progbits; \ .section .text.name, "ax", %progbits; \
.global name; \ .global name; \
ALIGN; \ .align bits; \
name: name:
#define ENTRY(name) ENTRY_WITH_ALIGN(name, 0)
#define END(name) \ #define END(name) \
.size name, .-name .size name, .-name
/*
* Certain SoCs have an alignment requiremnt for the CPU reset vector.
* Align to a 64 byte typical cacheline for now.
*/
#define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6)
#endif /* __ARM_ARM64_ASM_H */ #endif /* __ARM_ARM64_ASM_H */

View File

@ -20,6 +20,16 @@
#include <arch/asm.h> #include <arch/asm.h>
#if CONFIG_ARM64_CPUS_START_IN_EL3
#define SCTLR_ELx sctlr_el3
#elif CONFIG_ARM64_CPUS_START_IN_EL2
#define SCTLR_ELx sctlr_el2
#elif CONFIG_ARM64_CPUS_START_IN_EL1
#define SCTLR_ELx sctlr_el1
#else
#error Need to know what ELx processor starts up in.
#endif
ENTRY(seed_stack) ENTRY(seed_stack)
/* /*
* Initialize the stack to a known value. This is used to check for * Initialize the stack to a known value. This is used to check for
@ -43,19 +53,12 @@ load_stack:
.quad _stack .quad _stack
ENDPROC(seed_stack) ENDPROC(seed_stack)
ENTRY(arm64_el3_startup) /*
/* Set all bits in SCTLR_EL3 to 0 except RES1 and RES0 */ * Boot strap the processor into a C environemnt. That consists of providing
mrs x0, SCTLR_EL3 * 16-byte aligned stack. The programming enviroment uses SP_EL0 as its main
ldr x1, .SCTLR_MASK * stack while keeping SP_ELx reserved for exception entry.
and x0, x0, x1 */
msr SCTLR_EL3, x0 ENTRY(arm64_c_environment)
/* Set all bits in SCR_EL3 to 0 except RES1 and RES0 */
mrs x0, SCR_EL3
ldr x1, .SCR_MASK
and x0, x0, x1
msr SCR_EL3, x0
/* Initialize SP_EL3 as exception stack */
ldr x0, .exception_stack_top ldr x0, .exception_stack_top
cmp x0, #0 cmp x0, #0
b.eq 2f b.eq 2f
@ -80,27 +83,25 @@ ENTRY(arm64_el3_startup)
br x1 br x1
.align 4 .align 4
.SCTLR_MASK:
.quad 0x0FFFFEFF0
.SCR_MASK:
.quad 0x0FFFFC070
.align 4
/*
* By default branch to main() and initialize the stack according
* to the Kconfig option for cpu0. However, this code can be relocated
* and reused to start up secondary cpus.
*/
.exception_stack_top: .exception_stack_top:
.quad CONFIG_EXCEPTION_STACK_TOP .quad CONFIG_EXCEPTION_STACK_TOP
.stack_top: .stack_top:
.quad _estack .quad _estack
.entry: .entry:
.quad seed_stack .quad seed_stack
ENDPROC(arm64_el3_startup) ENDPROC(arm64_c_environment)
.global arm64_el3_startup_end
arm64_el3_startup_end: CPU_RESET_ENTRY(arm64_cpu_startup)
mrs x0, SCTLR_ELx
bic x0, x0, #(1 << 25) /* Little Endian */
bic x0, x0, #(1 << 19) /* XN not enforced */
bic x0, x0, #(1 << 12) /* Disable Instruction Cache */
bic x0, x0, #0xf /* Clear SA, C, A, and M */
msr SCTLR_ELx, x0
isb
b arm64_c_environment
ENDPROC(arm64_cpu_startup)
ENTRY(stage_entry) ENTRY(stage_entry)
b arm64_el3_startup b arm64_cpu_startup
ENDPROC(stage_entry) ENDPROC(stage_entry)