libpayload: arm64: Conform to new coreboot lib_helpers.h and assume EL2

This patch adds the new, faster architectural register accessors to
libpayload that were already added to coreboot in CB:27881. It also
hardcodes the assumption that coreboot payloads run at EL2, which has
already been hardcoded in coreboot with CB:27880 (see rationale there).
This means we can drop all the read_current/write_current stuff which
added a lot of unnecessary helpers to check the current exception level.

This patch breaks payloads that used read_current/write_current
accessors, but it seems unlikely that many payloads deal with this stuff
anyway, and it should be a trivial fix (just replace them with the
respective _el2 versions).

Also add accessors for a couple of more registers that are required to
enable debug mode while I'm here.

Change-Id: Ic9dfa48411f3805747613f03611f8a134a51cc46
Signed-off-by: Julius Werner <jwerner@chromium.org>
Reviewed-on: https://review.coreboot.org/29017
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Aaron Durbin <adurbin@chromium.org>
Reviewed-by: Patrick Rudolph <patrick.rudolph@9elements.com>
This commit is contained in:
Julius Werner 2018-10-10 15:31:36 -07:00
parent e1b1ec7154
commit ca52a25882
13 changed files with 244 additions and 2155 deletions

View File

@ -30,8 +30,6 @@
CFLAGS += -march=armv8-a CFLAGS += -march=armv8-a
arm64_asm_flags = arm64_asm_flags =
subdirs-y += lib/
head.o-y += head.S head.o-y += head.S
libc-y += main.c sysinfo.c libc-y += main.c sysinfo.c
libc-y += timer.c coreboot.c util.S libc-y += timer.c coreboot.c util.S

View File

@ -39,7 +39,7 @@
void tlb_invalidate_all(void) void tlb_invalidate_all(void)
{ {
/* TLBIALL includes dTLB and iTLB on systems that have them. */ /* TLBIALL includes dTLB and iTLB on systems that have them. */
tlbiall_current(); tlbiall_el2();
dsb(); dsb();
isb(); isb();
} }
@ -119,7 +119,7 @@ void dcache_invalidate_by_mva(void const *addr, size_t len)
void cache_sync_instructions(void) void cache_sync_instructions(void)
{ {
uint32_t sctlr = raw_read_sctlr_current(); uint32_t sctlr = raw_read_sctlr_el2();
if (sctlr & SCTLR_C) if (sctlr & SCTLR_C)
dcache_clean_all(); /* includes trailing DSB (assembly) */ dcache_clean_all(); /* includes trailing DSB (assembly) */
else if (sctlr & SCTLR_I) else if (sctlr & SCTLR_I)

View File

@ -81,8 +81,8 @@ static void print_regs(struct exception_state *state)
printf("ELR = 0x%016llx ESR = 0x%08llx\n", printf("ELR = 0x%016llx ESR = 0x%08llx\n",
state->elr, state->esr); state->elr, state->esr);
printf("FAR = 0x%016llx SPSR = 0x%08x\n", printf("FAR = 0x%016llx SPSR = 0x%08llx\n",
raw_read_far_current(), raw_read_spsr_current()); raw_read_far_el2(), raw_read_spsr_el2());
for (i = 0; i < 30; i += 2) { for (i = 0; i < 30; i += 2) {
printf("X%02d = 0x%016llx X%02d = 0x%016llx\n", printf("X%02d = 0x%016llx X%02d = 0x%016llx\n",
i, state->regs[i], i + 1, state->regs[i + 1]); i, state->regs[i], i + 1, state->regs[i + 1]);

View File

@ -27,11 +27,6 @@
* SUCH DAMAGE. * SUCH DAMAGE.
*/ */
#define __ASSEMBLY__
#include <arch/lib_helpers.h>
.text
/* Macro for exception entry /* Macro for exception entry
* Store x30 before any branch * Store x30 before any branch
* Branch to exception_prologue to save rest of the registers * Branch to exception_prologue to save rest of the registers
@ -92,10 +87,10 @@ exception_prologue:
stp x0, x1, [sp, #-16]! stp x0, x1, [sp, #-16]!
/* Save the exception reason on stack */ /* Save the exception reason on stack */
read_current x1, esr mrs x1, esr_el2
/* Save the return address on stack */ /* Save the return address on stack */
read_current x0, elr mrs x0, elr_el2
stp x0, x1, [sp, #-16]! stp x0, x1, [sp, #-16]!
ret ret
@ -109,8 +104,8 @@ exception_handler:
/* Pop return address saved on stack */ /* Pop return address saved on stack */
ldp x0, x1, [sp], #16 ldp x0, x1, [sp], #16
write_current elr, x0, x2 msr elr_el2, x0
write_current esr, x1, x2 msr esr_el2, x1
/* Pop exception reason saved on stack, followed by regs x0-x30 */ /* Pop exception reason saved on stack, followed by regs x0-x30 */
ldp x0, x1, [sp], #16 ldp x0, x1, [sp], #16
ldp x2, x3, [sp], #16 ldp x2, x3, [sp], #16
@ -132,5 +127,5 @@ exception_handler:
.global set_vbar .global set_vbar
set_vbar: set_vbar:
write_current vbar, x0, x1 msr vbar_el2, x0
ret ret

View File

@ -1,33 +0,0 @@
#####################################################################################
## This file is part of the coreboot project.
##
## Copyright 2014 Google Inc.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions
## are met:
## 1. Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## 2. Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## 3. The name of the author may not be used to endorse or promote products
## derived from this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
## ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
## FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
## DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
## OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
## LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
## OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
## SUCH DAMAGE.
##
#####################################################################################
lib_access = pstate.c sysctrl.c cache.c tlb.c clock.c
libc-y += $(lib_access)

View File

@ -1,90 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright 2014 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* cache.c: Cache Maintenance Instructions
* Reference: ARM Architecture Reference Manual, ARMv8-A edition
*/
#include <stdint.h>
#include <arch/lib_helpers.h>
void dccisw(uint64_t cisw)
{
__asm__ __volatile__("dc cisw, %0\n\t" : : "r" (cisw) :"memory");
}
void dccivac(uint64_t civac)
{
__asm__ __volatile__("dc civac, %0\n\t" : : "r" (civac) :"memory");
}
void dccsw(uint64_t csw)
{
__asm__ __volatile__("dc csw, %0\n\t" : : "r" (csw) :"memory");
}
void dccvac(uint64_t cvac)
{
__asm__ __volatile__("dc cvac, %0\n\t" : : "r" (cvac) :"memory");
}
void dccvau(uint64_t cvau)
{
__asm__ __volatile__("dc cvau, %0\n\t" : : "r" (cvau) :"memory");
}
void dcisw(uint64_t isw)
{
__asm__ __volatile__("dc isw, %0\n\t" : : "r" (isw) :"memory");
}
void dcivac(uint64_t ivac)
{
__asm__ __volatile__("dc ivac, %0\n\t" : : "r" (ivac) :"memory");
}
void dczva(uint64_t zva)
{
__asm__ __volatile__("dc zva, %0\n\t" : : "r" (zva) :"memory");
}
void iciallu(void)
{
__asm__ __volatile__("ic iallu\n\t" : : :"memory");
}
void icialluis(void)
{
__asm__ __volatile__("ic ialluis\n\t" : : :"memory");
}
void icivau(uint64_t ivau)
{
__asm__ __volatile__("ic ivau, %0\n\t" : : "r" (ivau) :"memory");
}

View File

@ -1,40 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright 2014 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* clock.c: Functions for accessing clock and timer related registers
* Reference: ARM Architecture Reference Manual, ARMv8-A edition
*/
#include <stdint.h>
#include <arch/lib_helpers.h>
void set_cntfrq(uint32_t freq)
{
__asm__ __volatile__("msr cntfrq_el0, %0" :: "r"(freq));
}

View File

@ -1,455 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright 2014 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* Reference: ARM Architecture Reference Manual, ARMv8-A edition
* pstate.c: This file defines all the library functions for accessing
* PSTATE and special purpose registers
*/
#include <stdint.h>
#include <arch/lib_helpers.h>
/* CurrentEL */
uint32_t raw_read_current_el(void)
{
uint32_t current_el;
__asm__ __volatile__("mrs %0, CurrentEL\n\t" : "=r" (current_el) : : "memory");
return current_el;
}
uint32_t get_current_el(void)
{
uint32_t current_el = raw_read_current_el();
return ((current_el >> CURRENT_EL_SHIFT) & CURRENT_EL_MASK);
}
/* DAIF */
uint32_t raw_read_daif(void)
{
uint32_t daif;
__asm__ __volatile__("mrs %0, DAIF\n\t" : "=r" (daif) : : "memory");
return daif;
}
void raw_write_daif(uint32_t daif)
{
__asm__ __volatile__("msr DAIF, %0\n\t" : : "r" (daif) : "memory");
}
void enable_debug_exceptions(void)
{
__asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
}
void enable_serror_exceptions(void)
{
__asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
}
void enable_irq(void)
{
__asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
}
void enable_fiq(void)
{
__asm__ __volatile__("msr DAIFClr, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
}
void disable_debug_exceptions(void)
{
__asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_DBG_BIT) : "memory");
}
void disable_serror_exceptions(void)
{
__asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_ABT_BIT) : "memory");
}
void disable_irq(void)
{
__asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_IRQ_BIT) : "memory");
}
void disable_fiq(void)
{
__asm__ __volatile__("msr DAIFSet, %0\n\t" : : "i" (DAIF_FIQ_BIT) : "memory");
}
/* DLR_EL0 */
uint64_t raw_read_dlr_el0(void)
{
uint64_t dlr_el0;
__asm__ __volatile__("mrs %0, DLR_EL0\n\t" : "=r" (dlr_el0) : : "memory");
return dlr_el0;
}
void raw_write_dlr_el0(uint64_t dlr_el0)
{
__asm__ __volatile__("msr DLR_EL0, %0\n\t" : : "r" (dlr_el0) : "memory");
}
/* DSPSR_EL0 */
uint64_t raw_read_dspsr_el0(void)
{
uint64_t dspsr_el0;
__asm__ __volatile__("mrs %0, DSPSR_EL0\n\t" : "=r" (dspsr_el0) : : "memory");
return dspsr_el0;
}
void raw_write_dspsr_el0(uint64_t dspsr_el0)
{
__asm__ __volatile__("msr DSPSR_EL0, %0\n\t" : : "r" (dspsr_el0) : "memory");
}
/* ELR */
uint64_t raw_read_elr_el1(void)
{
uint64_t elr_el1;
__asm__ __volatile__("mrs %0, ELR_EL1\n\t" : "=r" (elr_el1) : : "memory");
return elr_el1;
}
void raw_write_elr_el1(uint64_t elr_el1)
{
__asm__ __volatile__("msr ELR_EL1, %0\n\t" : : "r" (elr_el1) : "memory");
}
uint64_t raw_read_elr_el2(void)
{
uint64_t elr_el2;
__asm__ __volatile__("mrs %0, ELR_EL2\n\t" : "=r" (elr_el2) : : "memory");
return elr_el2;
}
void raw_write_elr_el2(uint64_t elr_el2)
{
__asm__ __volatile__("msr ELR_EL2, %0\n\t" : : "r" (elr_el2) : "memory");
}
uint64_t raw_read_elr_el3(void)
{
uint64_t elr_el3;
__asm__ __volatile__("mrs %0, ELR_EL3\n\t" : "=r" (elr_el3) : : "memory");
return elr_el3;
}
void raw_write_elr_el3(uint64_t elr_el3)
{
__asm__ __volatile__("msr ELR_EL3, %0\n\t" : : "r" (elr_el3) : "memory");
}
uint64_t raw_read_elr_current(void)
{
uint32_t el = get_current_el();
return raw_read_elr(el);
}
void raw_write_elr_current(uint64_t elr)
{
uint32_t el = get_current_el();
raw_write_elr(elr, el);
}
uint64_t raw_read_elr(uint32_t el)
{
SWITCH_CASE_READ(raw_read_elr, elr, uint64_t, el);
}
void raw_write_elr(uint64_t elr, uint32_t el)
{
SWITCH_CASE_WRITE(raw_write_elr, elr, el);
}
/* FPCR */
uint32_t raw_read_fpcr(void)
{
uint32_t fpcr;
__asm__ __volatile__("mrs %0, FPCR\n\t" : "=r" (fpcr) : : "memory");
return fpcr;
}
void raw_write_fpcr(uint32_t fpcr)
{
__asm__ __volatile__("msr FPCR, %0\n\t" : : "r" (fpcr) : "memory");
}
/* FPSR */
uint32_t raw_read_fpsr(void)
{
uint32_t fpsr;
__asm__ __volatile__("mrs %0, FPSR\n\t" : "=r" (fpsr) : : "memory");
return fpsr;
}
void raw_write_fpsr(uint32_t fpsr)
{
__asm__ __volatile__("msr FPSR, %0\n\t" : : "r" (fpsr) : "memory");
}
/* NZCV */
uint32_t raw_read_nzcv(void)
{
uint32_t nzcv;
__asm__ __volatile__("mrs %0, NZCV\n\t" : "=r" (nzcv) : : "memory");
return nzcv;
}
void raw_write_nzcv(uint32_t nzcv)
{
__asm__ __volatile__("msr NZCV, %0\n\t" : : "r" (nzcv) : "memory");
}
/* SP */
uint64_t raw_read_sp_el0(void)
{
uint64_t sp_el0;
__asm__ __volatile__("mrs %0, SP_EL0\n\t" : "=r" (sp_el0) : : "memory");
return sp_el0;
}
void raw_write_sp_el0(uint64_t sp_el0)
{
__asm__ __volatile__("msr SP_EL0, %0\n\t" : : "r" (sp_el0) : "memory");
}
uint64_t raw_read_sp_el1(void)
{
uint64_t sp_el1;
__asm__ __volatile__("mrs %0, SP_EL1\n\t" : "=r" (sp_el1) : : "memory");
return sp_el1;
}
void raw_write_sp_el1(uint64_t sp_el1)
{
__asm__ __volatile__("msr SP_EL1, %0\n\t" : : "r" (sp_el1) : "memory");
}
uint64_t raw_read_sp_el2(void)
{
uint64_t sp_el2;
__asm__ __volatile__("mrs %0, SP_EL2\n\t" : "=r" (sp_el2) : : "memory");
return sp_el2;
}
void raw_write_sp_el2(uint64_t sp_el2)
{
__asm__ __volatile__("msr SP_EL2, %0\n\t" : : "r" (sp_el2) : "memory");
}
/* SPSel */
uint32_t raw_read_spsel(void)
{
uint32_t spsel;
__asm__ __volatile__("mrs %0, SPSel\n\t" : "=r" (spsel) : : "memory");
return spsel;
}
void raw_write_spsel(uint32_t spsel)
{
__asm__ __volatile__("msr SPSel, %0\n\t" : : "r" (spsel) : "memory");
}
uint64_t raw_read_sp_el3(void)
{
uint64_t sp_el3;
uint32_t spsel;
spsel = raw_read_spsel();
if (!spsel)
raw_write_spsel(1);
__asm__ __volatile__("mov %0, sp\n\t" : "=r" (sp_el3) : : "memory");
if (!spsel)
raw_write_spsel(spsel);
return sp_el3;
}
void raw_write_sp_el3(uint64_t sp_el3)
{
uint32_t spsel;
spsel = raw_read_spsel();
if (!spsel)
raw_write_spsel(1);
__asm__ __volatile__("mov sp, %0\n\t" : "=r" (sp_el3) : : "memory");
if (!spsel)
raw_write_spsel(spsel);
}
/* SPSR */
uint32_t raw_read_spsr_abt(void)
{
uint32_t spsr_abt;
__asm__ __volatile__("mrs %0, SPSR_abt\n\t" : "=r" (spsr_abt) : : "memory");
return spsr_abt;
}
void raw_write_spsr_abt(uint32_t spsr_abt)
{
__asm__ __volatile__("msr SPSR_abt, %0\n\t" : : "r" (spsr_abt) : "memory");
}
uint32_t raw_read_spsr_el1(void)
{
uint32_t spsr_el1;
__asm__ __volatile__("mrs %0, SPSR_EL1\n\t" : "=r" (spsr_el1) : : "memory");
return spsr_el1;
}
void raw_write_spsr_el1(uint32_t spsr_el1)
{
__asm__ __volatile__("msr SPSR_EL1, %0\n\t" : : "r" (spsr_el1) : "memory");
}
uint32_t raw_read_spsr_el2(void)
{
uint32_t spsr_el2;
__asm__ __volatile__("mrs %0, SPSR_EL2\n\t" : "=r" (spsr_el2) : : "memory");
return spsr_el2;
}
void raw_write_spsr_el2(uint32_t spsr_el2)
{
__asm__ __volatile__("msr SPSR_EL2, %0\n\t" : : "r" (spsr_el2) : "memory");
}
uint32_t raw_read_spsr_el3(void)
{
uint32_t spsr_el3;
__asm__ __volatile__("mrs %0, SPSR_EL3\n\t" : "=r" (spsr_el3) : : "memory");
return spsr_el3;
}
void raw_write_spsr_el3(uint32_t spsr_el3)
{
__asm__ __volatile__("msr SPSR_EL3, %0\n\t" : : "r" (spsr_el3) : "memory");
}
uint32_t raw_read_spsr_current(void)
{
uint32_t el = get_current_el();
return raw_read_spsr(el);
}
void raw_write_spsr_current(uint32_t spsr)
{
uint32_t el = get_current_el();
raw_write_spsr(spsr, el);
}
uint32_t raw_read_spsr(uint32_t el)
{
SWITCH_CASE_READ(raw_read_spsr, spsr, uint32_t, el);
}
void raw_write_spsr(uint32_t spsr, uint32_t el)
{
SWITCH_CASE_WRITE(raw_write_spsr, spsr, el);
}
uint32_t raw_read_spsr_fiq(void)
{
uint32_t spsr_fiq;
__asm__ __volatile__("mrs %0, SPSR_fiq\n\t" : "=r" (spsr_fiq) : : "memory");
return spsr_fiq;
}
void raw_write_spsr_fiq(uint32_t spsr_fiq)
{
__asm__ __volatile__("msr SPSR_fiq, %0\n\t" : : "r" (spsr_fiq) : "memory");
}
uint32_t raw_read_spsr_irq(void)
{
uint32_t spsr_irq;
__asm__ __volatile__("mrs %0, SPSR_irq\n\t" : "=r" (spsr_irq) : : "memory");
return spsr_irq;
}
void raw_write_spsr_irq(uint32_t spsr_irq)
{
__asm__ __volatile__("msr SPSR_irq, %0\n\t" : : "r" (spsr_irq) : "memory");
}
uint32_t raw_read_spsr_und(void)
{
uint32_t spsr_und;
__asm__ __volatile__("mrs %0, SPSR_und\n\t" : "=r" (spsr_und) : : "memory");
return spsr_und;
}
void raw_write_spsr_und(uint32_t spsr_und)
{
__asm__ __volatile__("msr SPSR_und, %0\n\t" : : "r" (spsr_und) : "memory");
}

File diff suppressed because it is too large Load Diff

View File

@ -1,95 +0,0 @@
/*
* This file is part of the coreboot project.
*
* Copyright 2014 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* tlb.c: System intructions for TLB maintenance.
* Reference: ARM Architecture Reference Manual, ARMv8-A edition
*/
#include <stdint.h>
#include <arch/lib_helpers.h>
/* TLBIALL */
void tlbiall_el1(void)
{
__asm__ __volatile__("tlbi alle1\n\t" : : : "memory");
}
void tlbiall_el2(void)
{
__asm__ __volatile__("tlbi alle2\n\t" : : : "memory");
}
void tlbiall_el3(void)
{
__asm__ __volatile__("tlbi alle3\n\t" : : : "memory");
}
void tlbiall_current(void)
{
uint32_t el = get_current_el();
tlbiall(el);
}
void tlbiall(uint32_t el)
{
SWITCH_CASE_TLBI(tlbiall, el);
}
/* TLBIALLIS */
void tlbiallis_el1(void)
{
__asm__ __volatile__("tlbi alle1is\n\t" : : : "memory");
}
void tlbiallis_el2(void)
{
__asm__ __volatile__("tlbi alle2is\n\t" : : : "memory");
}
void tlbiallis_el3(void)
{
__asm__ __volatile__("tlbi alle3is\n\t" : : : "memory");
}
void tlbiallis_current(void)
{
uint32_t el = get_current_el();
tlbiallis(el);
}
void tlbiallis(uint32_t el)
{
SWITCH_CASE_TLBI(tlbiallis, el);
}
/* TLBIVAA */
void tlbivaa_el1(uint64_t va)
{
__asm__ __volatile__("tlbi vaae1, %0\n\t" : : "r" (va) : "memory");
}

View File

@ -252,7 +252,7 @@ void mmu_config_range(void *start, size_t size, uint64_t tag)
/* ARMv8 MMUs snoop L1 data cache, no need to flush it. */ /* ARMv8 MMUs snoop L1 data cache, no need to flush it. */
dsb(); dsb();
tlbiall_current(); tlbiall_el2();
dsb(); dsb();
isb(); isb();
} }
@ -298,7 +298,7 @@ static uint32_t is_mmu_enabled(void)
{ {
uint32_t sctlr; uint32_t sctlr;
sctlr = raw_read_sctlr_current(); sctlr = raw_read_sctlr_el2();
return (sctlr & SCTLR_M); return (sctlr & SCTLR_M);
} }
@ -309,19 +309,18 @@ static uint32_t is_mmu_enabled(void)
*/ */
void mmu_disable(void) void mmu_disable(void)
{ {
uint32_t el = get_current_el();
uint32_t sctlr; uint32_t sctlr;
sctlr = raw_read_sctlr(el); sctlr = raw_read_sctlr_el2();
sctlr &= ~(SCTLR_C | SCTLR_M | SCTLR_I); sctlr &= ~(SCTLR_C | SCTLR_M | SCTLR_I);
tlbiall_current(); tlbiall_el2();
dcache_clean_invalidate_all(); dcache_clean_invalidate_all();
dsb(); dsb();
isb(); isb();
raw_write_sctlr(sctlr, el); raw_write_sctlr_el2(sctlr);
dcache_clean_invalidate_all(); dcache_clean_invalidate_all();
dsb(); dsb();
@ -338,26 +337,26 @@ void mmu_enable(void)
uint32_t sctlr; uint32_t sctlr;
/* Initialize MAIR indices */ /* Initialize MAIR indices */
raw_write_mair_current(MAIR_ATTRIBUTES); raw_write_mair_el2(MAIR_ATTRIBUTES);
/* Invalidate TLBs */ /* Invalidate TLBs */
tlbiall_current(); tlbiall_el2();
/* Initialize TCR flags */ /* Initialize TCR flags */
raw_write_tcr_current(TCR_TOSZ | TCR_IRGN0_NM_WBWAC | TCR_ORGN0_NM_WBWAC | raw_write_tcr_el2(TCR_TOSZ | TCR_IRGN0_NM_WBWAC | TCR_ORGN0_NM_WBWAC |
TCR_SH0_IS | TCR_TG0_4KB | TCR_PS_256TB | TCR_SH0_IS | TCR_TG0_4KB | TCR_PS_256TB |
TCR_TBI_USED); TCR_TBI_USED);
/* Initialize TTBR */ /* Initialize TTBR */
raw_write_ttbr0_current((uintptr_t)xlat_addr); raw_write_ttbr0_el2((uintptr_t)xlat_addr);
/* Ensure system register writes are committed before enabling MMU */ /* Ensure system register writes are committed before enabling MMU */
isb(); isb();
/* Enable MMU */ /* Enable MMU */
sctlr = raw_read_sctlr_current(); sctlr = raw_read_sctlr_el2();
sctlr |= SCTLR_C | SCTLR_M | SCTLR_I; sctlr |= SCTLR_C | SCTLR_M | SCTLR_I;
raw_write_sctlr_current(sctlr); raw_write_sctlr_el2(sctlr);
isb(); isb();

View File

@ -1,7 +1,7 @@
/* /*
* This file is part of the coreboot project. * This file is part of the libpayload project.
* *
* Copyright 2014 Google Inc. * Copyright 2018 Google Inc
* *
* Redistribution and use in source and binary forms, with or without * Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions * modification, are permitted provided that the following conditions
@ -25,369 +25,241 @@
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE. * SUCH DAMAGE.
*
* lib_helpers.h: All library function prototypes and macros are defined in this
* file.
*/ */
#ifndef __ARCH_LIB_HELPERS_H__ #ifndef __ARCH_LIB_HELPERS_H__
#define __ARCH_LIB_HELPERS_H__ #define __ARCH_LIB_HELPERS_H__
#ifdef __ASSEMBLY__
/* Macro to switch to label based on current el */
.macro switch_el xreg label1 label2 label3
mrs \xreg, CurrentEL
/* Currently at EL1 */
cmp \xreg, 0x4
b.eq \label1
/* Currently at EL2 */
cmp \xreg, 0x8
b.eq \label2
/* Currently at EL3 */
cmp \xreg, 0xc
b.eq \label3
.endm
/* Macro to read sysreg at current EL
xreg - reg in which read value needs to be stored
sysreg - system reg that is to be read
*/
.macro read_current xreg sysreg
switch_el \xreg, 101f, 102f, 103f
101:
mrs \xreg, \sysreg\()_el1
b 104f
102:
mrs \xreg, \sysreg\()_el2
b 104f
103:
mrs \xreg, \sysreg\()_el3
b 104f
104:
.endm
/* Macro to write sysreg at current EL
xreg - reg from which value needs to be written
sysreg - system reg that is to be written
temp - temp reg that can be used to read current EL
*/
.macro write_current sysreg xreg temp
switch_el \temp, 101f, 102f, 103f
101:
msr \sysreg\()_el1, \xreg
b 104f
102:
msr \sysreg\()_el2, \xreg
b 104f
103:
msr \sysreg\()_el3, \xreg
b 104f
104:
.endm
#else
#define EL0 0
#define EL1 1
#define EL2 2
#define EL3 3
#define CURRENT_EL_MASK 0x3
#define CURRENT_EL_SHIFT 2
#include <stdint.h>
#define DAIF_DBG_BIT (1 << 3) #define DAIF_DBG_BIT (1 << 3)
#define DAIF_ABT_BIT (1 << 2) #define DAIF_ABT_BIT (1 << 2)
#define DAIF_IRQ_BIT (1 << 1) #define DAIF_IRQ_BIT (1 << 1)
#define DAIF_FIQ_BIT (1 << 0) #define DAIF_FIQ_BIT (1 << 0)
#define SWITCH_CASE_READ(func, var, type, el) do { \ #include <stdint.h>
type var = -1; \
switch (el) { \ #define MAKE_REGISTER_ACCESSORS(reg) \
case EL1: \ static inline uint64_t raw_read_##reg(void) \
var = func##_el1(); \ { \
break; \ uint64_t value; \
case EL2: \ __asm__ __volatile__("mrs %0, " #reg "\n\t" \
var = func##_el2(); \ : "=r" (value) : : "memory"); \
break; \ return value; \
case EL3: \
var = func##_el3(); \
break; \
} \ } \
return var; \ static inline void raw_write_##reg(uint64_t value) \
} while (0) { \
__asm__ __volatile__("msr " #reg ", %0\n\t" \
: : "r" (value) : "memory"); \
}
#define SWITCH_CASE_WRITE(func, var, el) do { \ #define MAKE_REGISTER_ACCESSORS_EL12(reg) \
switch (el) { \ MAKE_REGISTER_ACCESSORS(reg##_el1) \
case EL1: \ MAKE_REGISTER_ACCESSORS(reg##_el2)
func##_el1(var); \
break; \
case EL2: \
func##_el2(var); \
break; \
case EL3: \
func##_el3(var); \
break; \
} \
} while (0)
#define SWITCH_CASE_TLBI(func, el) do { \ /* Architectural register accessors */
switch (el) { \ MAKE_REGISTER_ACCESSORS_EL12(actlr)
case EL1: \ MAKE_REGISTER_ACCESSORS_EL12(afsr0)
func##_el1(); \ MAKE_REGISTER_ACCESSORS_EL12(afsr1)
break; \ MAKE_REGISTER_ACCESSORS(aidr_el1)
case EL2: \ MAKE_REGISTER_ACCESSORS_EL12(amair)
func##_el2(); \ MAKE_REGISTER_ACCESSORS(ccsidr_el1)
break; \ MAKE_REGISTER_ACCESSORS(clidr_el1)
case EL3: \ MAKE_REGISTER_ACCESSORS(cntfrq_el0)
func##_el3(); \ MAKE_REGISTER_ACCESSORS(cnthctl_el2)
break; \ MAKE_REGISTER_ACCESSORS(cnthp_ctl_el2)
} \ MAKE_REGISTER_ACCESSORS(cnthp_cval_el2)
} while (0) MAKE_REGISTER_ACCESSORS(cnthp_tval_el2)
MAKE_REGISTER_ACCESSORS(cntkctl_el1)
MAKE_REGISTER_ACCESSORS(cntp_ctl_el0)
MAKE_REGISTER_ACCESSORS(cntp_cval_el0)
MAKE_REGISTER_ACCESSORS(cntp_tval_el0)
MAKE_REGISTER_ACCESSORS(cntpct_el0)
MAKE_REGISTER_ACCESSORS(cntps_ctl_el1)
MAKE_REGISTER_ACCESSORS(cntps_cval_el1)
MAKE_REGISTER_ACCESSORS(cntps_tval_el1)
MAKE_REGISTER_ACCESSORS(cntv_ctl_el0)
MAKE_REGISTER_ACCESSORS(cntv_cval_el0)
MAKE_REGISTER_ACCESSORS(cntv_tval_el0)
MAKE_REGISTER_ACCESSORS(cntvct_el0)
MAKE_REGISTER_ACCESSORS(cntvoff_el2)
MAKE_REGISTER_ACCESSORS(contextidr_el1)
MAKE_REGISTER_ACCESSORS(cpacr_el1)
MAKE_REGISTER_ACCESSORS(cptr_el2)
MAKE_REGISTER_ACCESSORS(csselr_el1)
MAKE_REGISTER_ACCESSORS(ctr_el0)
MAKE_REGISTER_ACCESSORS(currentel)
MAKE_REGISTER_ACCESSORS(daif)
MAKE_REGISTER_ACCESSORS(dczid_el0)
MAKE_REGISTER_ACCESSORS_EL12(elr)
MAKE_REGISTER_ACCESSORS_EL12(esr)
MAKE_REGISTER_ACCESSORS_EL12(far)
MAKE_REGISTER_ACCESSORS(fpcr)
MAKE_REGISTER_ACCESSORS(fpsr)
MAKE_REGISTER_ACCESSORS(hacr_el2)
MAKE_REGISTER_ACCESSORS(hcr_el2)
MAKE_REGISTER_ACCESSORS(hpfar_el2)
MAKE_REGISTER_ACCESSORS(hstr_el2)
MAKE_REGISTER_ACCESSORS(isr_el1)
MAKE_REGISTER_ACCESSORS_EL12(mair)
MAKE_REGISTER_ACCESSORS_EL12(mdcr)
MAKE_REGISTER_ACCESSORS(mdscr_el1)
MAKE_REGISTER_ACCESSORS(midr_el1)
MAKE_REGISTER_ACCESSORS(mpidr_el1)
MAKE_REGISTER_ACCESSORS(nzcv)
MAKE_REGISTER_ACCESSORS(oslar_el1)
MAKE_REGISTER_ACCESSORS(oslsr_el1)
MAKE_REGISTER_ACCESSORS(par_el1)
MAKE_REGISTER_ACCESSORS(revdir_el1)
MAKE_REGISTER_ACCESSORS_EL12(rmr)
MAKE_REGISTER_ACCESSORS_EL12(rvbar)
MAKE_REGISTER_ACCESSORS_EL12(sctlr)
MAKE_REGISTER_ACCESSORS(sp_el0)
MAKE_REGISTER_ACCESSORS(sp_el1)
MAKE_REGISTER_ACCESSORS(spsel)
MAKE_REGISTER_ACCESSORS_EL12(spsr)
MAKE_REGISTER_ACCESSORS(spsr_abt)
MAKE_REGISTER_ACCESSORS(spsr_fiq)
MAKE_REGISTER_ACCESSORS(spsr_irq)
MAKE_REGISTER_ACCESSORS(spsr_und)
MAKE_REGISTER_ACCESSORS_EL12(tcr)
MAKE_REGISTER_ACCESSORS_EL12(tpidr)
MAKE_REGISTER_ACCESSORS_EL12(ttbr0)
MAKE_REGISTER_ACCESSORS(ttbr1_el1)
MAKE_REGISTER_ACCESSORS_EL12(vbar)
MAKE_REGISTER_ACCESSORS(vmpidr_el2)
MAKE_REGISTER_ACCESSORS(vpidr_el2)
MAKE_REGISTER_ACCESSORS(vtcr_el2)
MAKE_REGISTER_ACCESSORS(vttbr_el2)
/* PSTATE and special purpose register access functions */ /* Special DAIF accessor functions */
uint32_t raw_read_current_el(void); static inline void enable_debug_exceptions(void)
uint32_t get_current_el(void); {
uint32_t raw_read_daif(void); __asm__ __volatile__("msr DAIFClr, %0\n\t"
void raw_write_daif(uint32_t daif); : : "i" (DAIF_DBG_BIT) : "memory");
void enable_debug_exceptions(void); }
void enable_serror_exceptions(void);
void enable_irq(void);
void enable_fiq(void);
void disable_debug_exceptions(void);
void disable_serror_exceptions(void);
void disable_irq(void);
void disable_fiq(void);
uint64_t raw_read_dlr_el0(void);
void raw_write_dlr_el0(uint64_t dlr_el0);
uint64_t raw_read_dspsr_el0(void);
void raw_write_dspsr_el0(uint64_t dspsr_el0);
uint64_t raw_read_elr_el1(void);
void raw_write_elr_el1(uint64_t elr_el1);
uint64_t raw_read_elr_el2(void);
void raw_write_elr_el2(uint64_t elr_el2);
uint64_t raw_read_elr_el3(void);
void raw_write_elr_el3(uint64_t elr_el3);
uint64_t raw_read_elr_current(void);
void raw_write_elr_current(uint64_t elr);
uint64_t raw_read_elr(uint32_t el);
void raw_write_elr(uint64_t elr, uint32_t el);
uint32_t raw_read_fpcr(void);
void raw_write_fpcr(uint32_t fpcr);
uint32_t raw_read_fpsr(void);
void raw_write_fpsr(uint32_t fpsr);
uint32_t raw_read_nzcv(void);
void raw_write_nzcv(uint32_t nzcv);
uint64_t raw_read_sp_el0(void);
void raw_write_sp_el0(uint64_t sp_el0);
uint64_t raw_read_sp_el1(void);
void raw_write_sp_el1(uint64_t sp_el1);
uint64_t raw_read_sp_el2(void);
void raw_write_sp_el2(uint64_t sp_el2);
uint32_t raw_read_spsel(void);
void raw_write_spsel(uint32_t spsel);
uint64_t raw_read_sp_el3(void);
void raw_write_sp_el3(uint64_t sp_el3);
uint32_t raw_read_spsr_abt(void);
void raw_write_spsr_abt(uint32_t spsr_abt);
uint32_t raw_read_spsr_el1(void);
void raw_write_spsr_el1(uint32_t spsr_el1);
uint32_t raw_read_spsr_el2(void);
void raw_write_spsr_el2(uint32_t spsr_el2);
uint32_t raw_read_spsr_el3(void);
void raw_write_spsr_el3(uint32_t spsr_el3);
uint32_t raw_read_spsr_current(void);
void raw_write_spsr_current(uint32_t spsr);
uint32_t raw_read_spsr(uint32_t el);
void raw_write_spsr(uint32_t spsr, uint32_t el);
uint32_t raw_read_spsr_fiq(void);
void raw_write_spsr_fiq(uint32_t spsr_fiq);
uint32_t raw_read_spsr_irq(void);
void raw_write_spsr_irq(uint32_t spsr_irq);
uint32_t raw_read_spsr_und(void);
void raw_write_spsr_und(uint32_t spsr_und);
/* System control register access */ static inline void enable_serror_exceptions(void)
uint32_t raw_read_actlr_el1(void); {
void raw_write_actlr_el1(uint32_t actlr_el1); __asm__ __volatile__("msr DAIFClr, %0\n\t"
uint32_t raw_read_actlr_el2(void); : : "i" (DAIF_ABT_BIT) : "memory");
void raw_write_actlr_el2(uint32_t actlr_el2); }
uint32_t raw_read_actlr_el3(void);
void raw_write_actlr_el3(uint32_t actlr_el3); static inline void enable_irq(void)
uint32_t raw_read_actlr_current(void); {
void raw_write_actlr_current(uint32_t actlr); __asm__ __volatile__("msr DAIFClr, %0\n\t"
uint32_t raw_read_actlr(uint32_t el); : : "i" (DAIF_IRQ_BIT) : "memory");
void raw_write_actlr(uint32_t actlr, uint32_t el); }
uint32_t raw_read_afsr0_el1(void);
void raw_write_afsr0_el1(uint32_t afsr0_el1); static inline void enable_fiq(void)
uint32_t raw_read_afsr0_el2(void); {
void raw_write_afsr0_el2(uint32_t afsr0_el2); __asm__ __volatile__("msr DAIFClr, %0\n\t"
uint32_t raw_read_afsr0_el3(void); : : "i" (DAIF_FIQ_BIT) : "memory");
void raw_write_afsr0_el3(uint32_t afsr0_el3); }
uint32_t raw_read_afsr0_current(void);
void raw_write_afsr0_current(uint32_t afsr0); static inline void disable_debug_exceptions(void)
uint32_t raw_read_afsr0(uint32_t el); {
void raw_write_afsr0(uint32_t afsr0, uint32_t el); __asm__ __volatile__("msr DAIFSet, %0\n\t"
uint32_t raw_read_afsr1_el1(void); : : "i" (DAIF_DBG_BIT) : "memory");
void raw_write_afsr1_el1(uint32_t afsr1_el1); }
uint32_t raw_read_afsr1_el2(void);
void raw_write_afsr1_el2(uint32_t afsr1_el2); static inline void disable_serror_exceptions(void)
uint32_t raw_read_afsr1_el3(void); {
void raw_write_afsr1_el3(uint32_t afsr1_el3); __asm__ __volatile__("msr DAIFSet, %0\n\t"
uint32_t raw_read_afsr1_current(void); : : "i" (DAIF_ABT_BIT) : "memory");
void raw_write_afsr1_current(uint32_t afsr1); }
uint32_t raw_read_afsr1(uint32_t el);
void raw_write_afsr1(uint32_t afsr1, uint32_t el); static inline void disable_irq(void)
uint32_t raw_read_aidr_el1(void); {
uint64_t raw_read_amair_el1(void); __asm__ __volatile__("msr DAIFSet, %0\n\t"
void raw_write_amair_el1(uint64_t amair_el1); : : "i" (DAIF_IRQ_BIT) : "memory");
uint64_t raw_read_amair_el2(void); }
void raw_write_amair_el2(uint64_t amair_el2);
uint64_t raw_read_amair_el3(void); static inline void disable_fiq(void)
void raw_write_amair_el3(uint64_t amair_el3); {
uint64_t raw_read_amair_current(void); __asm__ __volatile__("msr DAIFSet, %0\n\t"
void raw_write_amair_current(uint64_t amair); : : "i" (DAIF_FIQ_BIT) : "memory");
uint64_t raw_read_amair(uint32_t el); }
void raw_write_amair(uint64_t amair, uint32_t el);
uint32_t raw_read_ccsidr_el1(void);
uint32_t raw_read_clidr_el1(void);
uint32_t raw_read_cpacr_el1(void);
void raw_write_cpacr_el1(uint32_t cpacr_el1);
uint32_t raw_read_cptr_el2(void);
void raw_write_cptr_el2(uint32_t cptr_el2);
uint32_t raw_read_cptr_el3(void);
void raw_write_cptr_el3(uint32_t cptr_el3);
uint32_t raw_read_csselr_el1(void);
void raw_write_csselr_el1(uint32_t csselr_el1);
uint32_t raw_read_ctr_el0(void);
uint32_t raw_read_esr_el1(void);
void raw_write_esr_el1(uint32_t esr_el1);
uint32_t raw_read_esr_el2(void);
void raw_write_esr_el2(uint32_t esr_el2);
uint32_t raw_read_esr_el3(void);
void raw_write_esr_el3(uint32_t esr_el3);
uint32_t raw_read_esr_current(void);
void raw_write_esr_current(uint32_t esr);
uint32_t raw_read_esr(uint32_t el);
void raw_write_esr(uint32_t esr, uint32_t el);
uint64_t raw_read_far_el1(void);
void raw_write_far_el1(uint64_t far_el1);
uint64_t raw_read_far_el2(void);
void raw_write_far_el2(uint64_t far_el2);
uint64_t raw_read_far_el3(void);
void raw_write_far_el3(uint64_t far_el3);
uint64_t raw_read_far_current(void);
void raw_write_far_current(uint64_t far);
uint64_t raw_read_far(uint32_t el);
void raw_write_far(uint64_t far, uint32_t el);
uint64_t raw_read_hcr_el2(void);
void raw_write_hcr_el2(uint64_t hcr_el2);
uint64_t raw_read_aa64pfr0_el1(void);
uint64_t raw_read_mair_el1(void);
void raw_write_mair_el1(uint64_t mair_el1);
uint64_t raw_read_mair_el2(void);
void raw_write_mair_el2(uint64_t mair_el2);
uint64_t raw_read_mair_el3(void);
void raw_write_mair_el3(uint64_t mair_el3);
uint64_t raw_read_mair_current(void);
void raw_write_mair_current(uint64_t mair);
uint64_t raw_read_mair(uint32_t el);
void raw_write_mair(uint64_t mair, uint32_t el);
uint64_t raw_read_mpidr_el1(void);
uint32_t raw_read_rmr_el1(void);
void raw_write_rmr_el1(uint32_t rmr_el1);
uint32_t raw_read_rmr_el2(void);
void raw_write_rmr_el2(uint32_t rmr_el2);
uint32_t raw_read_rmr_el3(void);
void raw_write_rmr_el3(uint32_t rmr_el3);
uint32_t raw_read_rmr_current(void);
void raw_write_rmr_current(uint32_t rmr);
uint32_t raw_read_rmr(uint32_t el);
void raw_write_rmr(uint32_t rmr, uint32_t el);
uint64_t raw_read_rvbar_el1(void);
void raw_write_rvbar_el1(uint64_t rvbar_el1);
uint64_t raw_read_rvbar_el2(void);
void raw_write_rvbar_el2(uint64_t rvbar_el2);
uint64_t raw_read_rvbar_el3(void);
void raw_write_rvbar_el3(uint64_t rvbar_el3);
uint64_t raw_read_rvbar_current(void);
void raw_write_rvbar_current(uint64_t rvbar);
uint64_t raw_read_rvbar(uint32_t el);
void raw_write_rvbar(uint64_t rvbar, uint32_t el);
uint32_t raw_read_scr_el3(void);
void raw_write_scr_el3(uint32_t scr_el3);
uint32_t raw_read_sctlr_el1(void);
void raw_write_sctlr_el1(uint32_t sctlr_el1);
uint32_t raw_read_sctlr_el2(void);
void raw_write_sctlr_el2(uint32_t sctlr_el2);
uint32_t raw_read_sctlr_el3(void);
void raw_write_sctlr_el3(uint32_t sctlr_el3);
uint32_t raw_read_sctlr_current(void);
void raw_write_sctlr_current(uint32_t sctlr);
uint32_t raw_read_sctlr(uint32_t el);
void raw_write_sctlr(uint32_t sctlr, uint32_t el);
uint64_t raw_read_tcr_el1(void);
void raw_write_tcr_el1(uint64_t tcr_el1);
uint32_t raw_read_tcr_el2(void);
void raw_write_tcr_el2(uint32_t tcr_el2);
uint32_t raw_read_tcr_el3(void);
void raw_write_tcr_el3(uint32_t tcr_el3);
uint64_t raw_read_tcr_current(void);
void raw_write_tcr_current(uint64_t tcr);
uint64_t raw_read_tcr(uint32_t el);
void raw_write_tcr(uint64_t tcr, uint32_t el);
uint64_t raw_read_ttbr0_el1(void);
void raw_write_ttbr0_el1(uint64_t ttbr0_el1);
uint64_t raw_read_ttbr0_el2(void);
void raw_write_ttbr0_el2(uint64_t ttbr0_el2);
uint64_t raw_read_ttbr0_el3(void);
void raw_write_ttbr0_el3(uint64_t ttbr0_el3);
uint64_t raw_read_ttbr0_current(void);
void raw_write_ttbr0_current(uint64_t ttbr0);
uint64_t raw_read_ttbr0(uint32_t el);
void raw_write_ttbr0(uint64_t ttbr0, uint32_t el);
uint64_t raw_read_ttbr1_el1(void);
void raw_write_ttbr1_el1(uint64_t ttbr1_el1);
uint64_t raw_read_vbar_el1(void);
void raw_write_vbar_el1(uint64_t vbar_el1);
uint64_t raw_read_vbar_el2(void);
void raw_write_vbar_el2(uint64_t vbar_el2);
uint64_t raw_read_vbar_el3(void);
void raw_write_vbar_el3(uint64_t vbar_el3);
uint64_t raw_read_vbar_current(void);
void raw_write_vbar_current(uint64_t vbar);
uint64_t raw_read_vbar(uint32_t el);
void raw_write_vbar(uint64_t vbar, uint32_t el);
uint64_t raw_read_cntpct_el0(void);
uint32_t raw_read_cntfrq_el0(void);
/* Cache maintenance system instructions */ /* Cache maintenance system instructions */
void dccisw(uint64_t cisw); static inline void dccisw(uint64_t cisw)
void dccivac(uint64_t civac); {
void dccsw(uint64_t csw); __asm__ __volatile__("dc cisw, %0\n\t" : : "r" (cisw) : "memory");
void dccvac(uint64_t cvac); }
void dccvau(uint64_t cvau);
void dcisw(uint64_t isw); static inline void dccivac(uint64_t civac)
void dcivac(uint64_t ivac); {
void dczva(uint64_t zva); __asm__ __volatile__("dc civac, %0\n\t" : : "r" (civac) : "memory");
void iciallu(void); }
void icialluis(void);
void icivau(uint64_t ivau); static inline void dccsw(uint64_t csw)
{
__asm__ __volatile__("dc csw, %0\n\t" : : "r" (csw) : "memory");
}
static inline void dccvac(uint64_t cvac)
{
__asm__ __volatile__("dc cvac, %0\n\t" : : "r" (cvac) : "memory");
}
static inline void dccvau(uint64_t cvau)
{
__asm__ __volatile__("dc cvau, %0\n\t" : : "r" (cvau) : "memory");
}
static inline void dcisw(uint64_t isw)
{
__asm__ __volatile__("dc isw, %0\n\t" : : "r" (isw) : "memory");
}
static inline void dcivac(uint64_t ivac)
{
__asm__ __volatile__("dc ivac, %0\n\t" : : "r" (ivac) : "memory");
}
static inline void dczva(uint64_t zva)
{
__asm__ __volatile__("dc zva, %0\n\t" : : "r" (zva) : "memory");
}
static inline void iciallu(void)
{
__asm__ __volatile__("ic iallu\n\t" : : : "memory");
}
static inline void icialluis(void)
{
__asm__ __volatile__("ic ialluis\n\t" : : : "memory");
}
static inline void icivau(uint64_t ivau)
{
__asm__ __volatile__("ic ivau, %0\n\t" : : "r" (ivau) : "memory");
}
/* TLB maintenance instructions */ /* TLB maintenance instructions */
void tlbiall_el1(void); static inline void tlbiall_el1(void)
void tlbiall_el2(void); {
void tlbiall_el3(void); __asm__ __volatile__("tlbi alle1\n\t" : : : "memory");
void tlbiall_current(void); }
void tlbiall(uint32_t el);
void tlbiallis_el1(void); static inline void tlbiall_el2(void)
void tlbiallis_el2(void); {
void tlbiallis_el3(void); __asm__ __volatile__("tlbi alle2\n\t" : : : "memory");
void tlbiallis_current(void); }
void tlbiallis(uint32_t el);
void tlbivaa_el1(uint64_t va); static inline void tlbiallis_el1(void)
{
__asm__ __volatile__("tlbi alle1is\n\t" : : : "memory");
}
static inline void tlbiallis_el2(void)
{
__asm__ __volatile__("tlbi alle2is\n\t" : : : "memory");
}
static inline void tlbivaa_el1(uint64_t va)
{
__asm__ __volatile__("tlbi vaae1, %0\n\t" : : "r" (va) : "memory");
}
/* Memory barrier */ /* Memory barrier */
/* data memory barrier */ /* data memory barrier */
@ -401,9 +273,4 @@ void tlbivaa_el1(uint64_t va);
#define dsb() dsb_opt(sy) #define dsb() dsb_opt(sy)
#define isb() isb_opt() #define isb() isb_opt()
/* Clock */ #endif /* __ARCH_LIB_HELPERS_H__ */
void set_cntfrq(uint32_t freq);
#endif // __ASSEMBLY__
#endif //__ARCH_LIB_HELPERS_H__

View File

@ -210,9 +210,13 @@ MAKE_REGISTER_ACCESSORS(hpfar_el2)
MAKE_REGISTER_ACCESSORS(hstr_el2) MAKE_REGISTER_ACCESSORS(hstr_el2)
MAKE_REGISTER_ACCESSORS(isr_el1) MAKE_REGISTER_ACCESSORS(isr_el1)
MAKE_REGISTER_ACCESSORS_EL123(mair) MAKE_REGISTER_ACCESSORS_EL123(mair)
MAKE_REGISTER_ACCESSORS_EL123(mdcr)
MAKE_REGISTER_ACCESSORS(mdscr)
MAKE_REGISTER_ACCESSORS(midr_el1) MAKE_REGISTER_ACCESSORS(midr_el1)
MAKE_REGISTER_ACCESSORS(mpidr_el1) MAKE_REGISTER_ACCESSORS(mpidr_el1)
MAKE_REGISTER_ACCESSORS(nzcv) MAKE_REGISTER_ACCESSORS(nzcv)
MAKE_REGISTER_ACCESSORS(oslar_el1)
MAKE_REGISTER_ACCESSORS(oslsr_el1)
MAKE_REGISTER_ACCESSORS(par_el1) MAKE_REGISTER_ACCESSORS(par_el1)
MAKE_REGISTER_ACCESSORS(revdir_el1) MAKE_REGISTER_ACCESSORS(revdir_el1)
MAKE_REGISTER_ACCESSORS_EL123(rmr) MAKE_REGISTER_ACCESSORS_EL123(rmr)