arm: Thumb ALL the things!

This patch switches every last part of Coreboot on ARM over to Thumb
mode: libpayload, the internal libgcc, and assorted assembly files. In
combination with the respective depthcharge patch, this will switch to
Thumb mode right after the entry point of the bootblock and not switch
back to ARM until the final assembly stub that jumps to the kernel.

The required changes to make this work include some new headers and
Makefile flags to handle assembly files (using the unified syntax and
the same helper macros as Linux), modifying our custom-written libgcc
code for 64-bit division to support Thumb (removing some stale old files
that were never really used for clarity), and flipping the general
CFLAGS to Thumb (some more cleanup there as well while I'm at it).

BUG=None
TEST=Snow and Nyan still boot.

Original-Change-Id: I80c04281e3adbf74f9f477486a96b9fafeb455b3
Original-Signed-off-by: Julius Werner <jwerner@chromium.org>
Original-Reviewed-on: https://chromium-review.googlesource.com/182212
Original-Reviewed-by: Gabe Black <gabeblack@chromium.org>
(cherry picked from commit 5f65c17cbfae165a95354146ae79e06c512c2c5a)

Conflicts:
	payloads/libpayload/include/arm/arch/asm.h
	src/arch/arm/Makefile.inc
	src/arch/arm/armv7/Makefile.inc

*** There is an issue with what to do with ramstage-S-ccopts, and
*** will need to be covered in additional ARM cleanup patches.

Change-Id: I80c04281e3adbf74f9f477486a96b9fafeb455b3
Signed-off-by: Marc Jones <marc.jones@se-eng.com>
Reviewed-on: http://review.coreboot.org/6930
Tested-by: build bot (Jenkins)
Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
This commit is contained in:
Julius Werner 2014-01-13 13:24:30 -08:00 committed by Marc Jones
parent a38ccfdee1
commit 25a282dabc
10 changed files with 77 additions and 393 deletions

View File

@ -27,7 +27,8 @@
## SUCH DAMAGE.
##
CFLAGS += -mfloat-abi=hard -marm -mabi=aapcs-linux -march=armv7-a
CFLAGS += -mthumb -march=armv7-a
arm_asm_flags = -Wa,-mthumb -Wa,-mimplicit-it=always -Wa,-mno-warn-deprecated
head.o-y += head.S
libc-y += main.c sysinfo.c
@ -37,3 +38,7 @@ libc-y += memcpy.S memset.S memmove.S
libc-y += exception_asm.S exception.c
libc-y += cache.c cpu.S
libcbfs-$(CONFIG_LP_CBFS) += dummy_media.c
# Add other classes here when you put assembly files into them!
head.o-S-ccopts += $(arm_asm_flags)
libc-S-ccopts += $(arm_asm_flags)

View File

@ -20,16 +20,17 @@
#ifndef __ARM_ASM_H
#define __ARM_ASM_H
#if defined __arm__
# define ARM(x...) x
# define THUMB(x...)
# define W(instr) instr
#elif defined __thumb__
/* __arm__ is defined regardless of Thumb mode, so need to order this right */
#if defined __thumb2__
# define ARM(x...)
# define THUMB(x...) x
# define W(instr) instr.w
#elif defined __thumb__
# error You are not compiling Thumb2, this won't work!
#else
# error Not in ARM or thumb mode!
# define ARM(x...) x
# define THUMB(x...)
# define W(instr) instr
#endif
#define ALIGN .align 0
@ -46,4 +47,10 @@
#define END(name) \
.size name, .-name
/* Everything should go into the text section by default. */
.text
/* Thumb code uses the (new) unified assembly syntax. */
THUMB( .syntax unified )
#endif /* __ARM_ASM_H */

View File

@ -66,7 +66,7 @@ $(objcbfs)/bootblock.debug: $(src)/arch/arm/bootblock.ld $(obj)/ldoptions $$(boo
ifeq ($(CONFIG_COMPILER_LLVM_CLANG),y)
$(LD_bootblock) -m armelf_linux_eabi --gc-sections -static -o $@ -L$(obj) $< -T $(src)/arch/arm/bootblock.ld
else
$(CC_bootblock) $(CFLAGS_bootblock) -nostartfiles -Wl,--gc-sections -static -o $@ -L$(obj) -T $(src)/arch/arm/bootblock.ld -Wl,--start-group $(bootblock-objs) -Wl,--end-group
$(CC_bootblock) $(CFLAGS_bootblock) -nostdlib -Wl,--gc-sections -static -o $@ -L$(obj) -T $(src)/arch/arm/bootblock.ld -Wl,--start-group $(bootblock-objs) -Wl,--end-group
endif
endif # CONFIG_ARCH_BOOTBLOCK_ARM
@ -89,9 +89,9 @@ VBOOT_STUB_DEPS += $(obj)/arch/arm/eabi_compat.rmodules_arm.o
$(objcbfs)/romstage.debug: $$(romstage-objs) $(src)/arch/arm/romstage.ld $(obj)/ldoptions
@printf " LINK $(subst $(obj)/,,$(@))\n"
ifeq ($(CONFIG_COMPILER_LLVM_CLANG),y)
$(LD_romstage) -nostdlib -nostartfiles --gc-sections -static -o $@ -L$(obj) $(romstage-objs) -T $(src)/arch/arm/romstage.ld
$(LD_romstage) -nostdlib --gc-sections -static -o $@ -L$(obj) $(romstage-objs) -T $(src)/arch/arm/romstage.ld
else
$(CC_romstage) $(CFLAGS_romstage) -nostartfiles -Wl,--gc-sections -static -o $@ -L$(obj) -T $(src)/arch/arm/romstage.ld -Wl,--start-group $(romstage-objs) -Wl,--end-group
$(CC_romstage) $(CFLAGS_romstage) -nostdlib -Wl,--gc-sections -static -o $@ -L$(obj) -T $(src)/arch/arm/romstage.ld -Wl,--start-group $(romstage-objs) -Wl,--end-group
endif
endif # CONFIG_ARCH_ROMSTAGE_ARM
@ -116,7 +116,7 @@ ramstage-srcs += $(wildcard src/mainboard/$(MAINBOARDDIR)/mainboard.c)
$(objcbfs)/ramstage.debug: $$(ramstage-objs) $(src)/arch/arm/ramstage.ld $(obj)/ldoptions
@printf " CC $(subst $(obj)/,,$(@))\n"
ifeq ($(CONFIG_COMPILER_LLVM_CLANG),y)
$(LD_ramstage) -m armelf_linux_eabi --gc-sections -o $@ -L$(obj) $< -T $(src)/arch/arm/ramstage.ld
$(LD_ramstage) -nostdlib -m armelf_linux_eabi --gc-sections -o $@ -L$(obj) $< -T $(src)/arch/arm/ramstage.ld
else
$(CC_ramstage) $(CFLAGS_ramstage) -nostdlib -Wl,--gc-sections -static -o $@ -L$(obj) -Wl,--start-group $(ramstage-objs) -Wl,--end-group -T $(src)/arch/arm/ramstage.ld
endif
@ -124,7 +124,7 @@ endif
$(objgenerated)/ramstage.o: $(stages_o) $$(ramstage-objs)
@printf " CC $(subst $(obj)/,,$(@))\n"
ifeq ($(CONFIG_COMPILER_LLVM_CLANG),y)
$(LD_ramstage) -m -m armelf_linux_eabi --gc-sections -r -o $@ --wrap __divdi3 --wrap __udivdi3 --wrap __moddi3 --wrap __umoddi3 --wrap __uidiv --start-group $(ramstage-objs) --end-group
$(LD_ramstage) -nostdlib -m armelf_linux_eabi --gc-sections -r -o $@ --wrap __divdi3 --wrap __udivdi3 --wrap __moddi3 --wrap __umoddi3 --wrap __uidiv --start-group $(ramstage-objs) --end-group
else
$(CC_ramstage) $(CFLAGS_ramstage) $(CPPFLAGS_ramstage) -nostdlib -Wl,--gc-sections -r -o $@ -Wl,--start-group $(stages_o) $(ramstage-objs) -Wl,--end-group
endif

View File

@ -19,8 +19,10 @@
##
###############################################################################
armv7_flags = -march=armv7-a -mthumb -mthumb-interwork \
armv7_flags = -march=armv7-a -mthumb \
-I$(src)/arch/arm/include/armv7/ -D__COREBOOT_ARM_ARCH__=7
armv7_asm_flags = $(armv7_flags) -Wa,-mthumb -Wa,-mimplicit-it=always \
-Wa,-mno-warn-deprecated
###############################################################################
# bootblock
@ -40,7 +42,7 @@ bootblock-$(CONFIG_BOOTBLOCK_CONSOLE) += exception_asm.S
bootblock-y += mmu.c
bootblock-c-ccopts += $(armv7_flags)
bootblock-S-ccopts += $(armv7_flags)
bootblock-S-ccopts += $(armv7_asm_flags)
endif # CONFIG_ARCH_BOOTBLOCK_ARMV7
@ -57,7 +59,7 @@ romstage-y += exception_asm.S
romstage-y += mmu.c
romstage-c-ccopts += $(armv7_flags)
romstage-S-ccopts += $(armv7_flags)
romstage-S-ccopts += $(armv7_asm_flags)
endif # CONFIG_ARCH_ROMSTAGE_ARMV7
@ -74,6 +76,6 @@ ramstage-y += exception_asm.S
ramstage-y += mmu.c
ramstage-c-ccopts += $(armv7_flags)
ramstage-S-ccopts += $(armv7_flags)
ramstage-S-ccopts += $(armv7_asm_flags)
endif # CONFIG_ARCH_RAMSTAGE_ARMV7

View File

@ -32,6 +32,7 @@
#include <arch/asm.h>
.section ".start", "a", %progbits
.arm
ENTRY(_start)
/*
* Set the cpu to System mode with IRQ and FIQ disabled. Prefetch/Data
@ -40,7 +41,11 @@ ENTRY(_start)
* causes it.
*/
msr cpsr_cxf, #0xdf
bl _thumb_start
ENDPROC(_start)
.thumb
ENTRY(_thumb_start)
/*
* From Cortex-A Series Programmer's Guide:
* Only CPU 0 performs initialization. Other CPUs go into WFI
@ -72,25 +77,17 @@ call_bootblock:
ldr sp, .Stack /* Set up stack pointer */
ldr r0,=0x00000000
/*
* The current design of cpu_info places the
* struct at the top of the stack. The number of
* words pushed must be at least as large as that
* struct.
* The current design of cpu_info places the struct at the top of the
* stack. Free enough space to accomodate for that, but make sure it's
* 8-byte aligned for ABI compliance.
*/
push {r0-r2}
bic sp, sp, #7 /* 8-byte alignment for ABI compliance */
/*
* Use "bl" instead of "b" even though we do not intend to return.
* "bl" gets compiled to "blx" if we're transitioning from ARM to
* Thumb. However, "b" will not and GCC may attempt to create a
* wrapper which is currently broken.
*/
sub sp, sp, #16
bl main
wait_for_interrupt:
wfi
mov pc, lr @ back to my caller
ENDPROC(_start)
ENDPROC(_thumb_start)
/* we do it this way because it's a 32-bit constant and
* in some cases too far away to be loaded as just an offset

View File

@ -20,19 +20,20 @@
#ifndef __ARM_ASM_H
#define __ARM_ASM_H
#if defined __arm__
# define ARM(x...) x
# define THUMB(x...)
# define W(instr) instr
#elif defined __thumb__
/* __arm__ is defined regardless of Thumb mode, so need to order this right */
#if defined __thumb2__
# define ARM(x...)
# define THUMB(x...) x
# define W(instr) instr.w
# if __COREBOOT_ARM_ARCH__ < 7
# error thumb mode has not been tested with ARM < v7!
# endif
#elif defined __thumb__
# error You are not compiling Thumb2, this won't work!
#else
# error Not in ARM or thumb mode!
# define ARM(x...) x
# define THUMB(x...)
# define W(instr) instr
#endif
#define ALIGN .align 0
@ -49,4 +50,10 @@
#define END(name) \
.size name, .-name
/* Everything should go into the text section by default. */
.text
/* Thumb code uses the (new) unified assembly syntax. */
THUMB( .syntax unified )
#endif /* __ARM_ASM_H */

View File

@ -1,214 +0,0 @@
/* Miscellaneous BPABI functions.
Copyright (C) 2003, 2004 Free Software Foundation, Inc.
Contributed by CodeSourcery, LLC.
This file is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2, or (at your option) any
later version.
In addition to the permissions in the GNU General Public License, the
Free Software Foundation gives you unlimited permission to link the
compiled version of this file into combinations with other programs,
and to distribute those combinations without any restriction coming
from the use of this file. (The General Public License restrictions
do apply in other respects; for example, they cover modification of
the file, and distribution when not linked into a combine
executable.)
This file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; see the file COPYING. If not, write to
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA. */
#if defined __GNUC__
#include <stdint.h>
uint64_t __udivmoddi4(uint64_t n, uint64_t d, uint64_t *rp);
extern int64_t __divdi3(int64_t, int64_t);
extern uint64_t __udivdi3(uint64_t, uint64_t);
extern int64_t __gnu_ldivmod_helper(int64_t, int64_t, int64_t *);
extern uint64_t __gnu_uldivmod_helper(uint64_t, uint64_t, uint64_t *);
typedef union
{
struct {
int32_t low;
int32_t high;
} s;
int64_t ll;
} DWunion;
uint64_t
__udivmoddi4(uint64_t n, uint64_t d, uint64_t *rp)
{
const DWunion nn = {.ll = n};
const DWunion dd = {.ll = d};
DWunion rr;
uint32_t d0, d1, n0, n1, n2;
uint32_t q0, q1;
uint32_t b, bm;
d0 = dd.s.low;
d1 = dd.s.high;
n0 = nn.s.low;
n1 = nn.s.high;
if (d1 == 0) {
if (d0 > n1) {
/* 0q = nn / 0D */
udiv_qrnnd(q0, n0, n1, n0, d0);
q1 = 0;
/* Remainder in n0. */
} else {
/* qq = NN / 0d */
if (d0 == 0)
d0 = 1 / d0; /* Divide intentionally by zero. */
udiv_qrnnd(q1, n1, 0, n1, d0);
udiv_qrnnd(q0, n0, n1, n0, d0);
/* Remainder in n0. */
}
if (rp != 0) {
rr.s.low = n0;
rr.s.high = 0;
*rp = rr.ll;
}
} else {
if (d1 > n1) {
/* 00 = nn / DD */
q0 = 0;
q1 = 0;
/* Remainder in n1n0. */
if (rp != 0) {
rr.s.low = n0;
rr.s.high = n1;
*rp = rr.ll;
}
} else {
/* 0q = NN / dd */
count_leading_zeros(bm, d1);
if (bm == 0) {
/* From (n1 >= d1) /\ (the most significant
bit of d1 is set), conclude (the most
significant bit of n1 is set) /\ (the
quotient digit q0 = 0 or 1).
This special case is necessary, not an
optimization. */
/* The condition on the next line takes
advantage of that n1 >= d1 (true due to
program flow). */
if (n1 > d1 || n0 >= d0) {
q0 = 1;
sub_ddmmss(n1, n0, n1, n0, d1, d0);
} else
q0 = 0;
q1 = 0;
if (rp != 0) {
rr.s.low = n0;
rr.s.high = n1;
*rp = rr.ll;
}
} else {
uint32_t m1, m0;
/* Normalize. */
b = 32 - bm;
d1 = (d1 << bm) | (d0 >> b);
d0 = d0 << bm;
n2 = n1 >> b;
n1 = (n1 << bm) | (n0 >> b);
n0 = n0 << bm;
udiv_qrnnd(q0, n1, n2, n1, d1);
umul_ppmm(m1, m0, q0, d0);
if (m1 > n1 || (m1 == n1 && m0 > n0)) {
q0--;
sub_ddmmss(m1, m0, m1, m0, d1, d0);
}
q1 = 0;
/* Remainder in (n1n0 - m1m0) >> bm. */
if (rp != 0) {
sub_ddmmss(n1, n0, n1, n0, m1, m0);
rr.s.low = (n1 << b) | (n0 >> bm);
rr.s.high = n1 >> bm;
*rp = rr.ll;
}
}
}
}
const DWunion ww = {{.low = q0, .high = q1}};
return ww.ll;
}
int64_t
__divdi3(int64_t u, int64_t v)
{
int32_t c = 0;
DWunion uu = {.ll = u};
DWunion vv = {.ll = v};
int64_t w;
if (uu.s.high < 0) {
c = ~c;
uu.ll = -uu.ll;
}
if (vv.s.high < 0) {
c = ~c;
vv.ll = -vv.ll;
}
w = __udivmoddi4(uu.ll, vv.ll, (uint64_t *)0);
if (c)
w = -w;
return w;
}
int64_t
__gnu_ldivmod_helper (int64_t a, int64_t b, int64_t *remainder)
{
int64_t quotient;
quotient = __divdi3(a, b);
*remainder = a - b * quotient;
return quotient;
}
uint64_t
__udivdi3(uint64_t n, uint64_t d)
{
return __udivmoddi4(n, d, (uint64_t *)0);
}
uint64_t
__gnu_uldivmod_helper(uint64_t a, uint64_t b, uint64_t *remainder)
{
uint64_t quotient;
quotient = __udivdi3(a, b);
*remainder = a - b * quotient;
return quotient;
}
#endif

View File

@ -1,130 +0,0 @@
/* Miscellaneous BPABI functions.
Copyright (C) 2003, 2004, 2007, 2008 Free Software Foundation, Inc.
Contributed by CodeSourcery, LLC.
This file is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2, or (at your option) any
later version.
In addition to the permissions in the GNU General Public License, the
Free Software Foundation gives you unlimited permission to link the
compiled version of this file into combinations with other programs,
and to distribute those combinations without any restriction coming
from the use of this file. (The General Public License restrictions
do apply in other respects; for example, they cover modification of
the file, and distribution when not linked into a combine
executable.)
This file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; see the file COPYING. If not, write to
the Free Software Foundation, 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA. */
#include <arch/asm.h>
#ifdef __ARMEB__
#define xxh r0
#define xxl r1
#define yyh r2
#define yyl r3
#else
#define xxh r1
#define xxl r0
#define yyh r3
#define yyl r2
#endif
#if defined __thumb2__
.macro do_it cond
it \cond
.endm
#define do_push push
#define do_pop pop
#else
.macro do_it cond
.endm
#define do_push stmfd sp!,
#define do_pop ldmfd sp!,
#endif
ENTRY(__aeabi_lcmp)
cmp xxh, yyh
do_it lt
movlt r0, #-1
do_it gt
movgt r0, #1
do_it ne
movne pc, lr
subs r0, xxl, yyl
do_it lo
movlo r0, #-1
do_it hi
movhi r0, #1
mov pc, lr
ENDPROC(__aeabi_lcmp)
ENTRY(__aeabi_ulcmp)
cmp xxh, yyh
do_it lo
movlo r0, #-1
do_it hi
movhi r0, #1
do_it ne
movne pc, lr
cmp xxl, yyl
do_it lo
movlo r0, #-1
do_it hi
movhi r0, #1
do_it eq
moveq r0, #0
mov pc, lr
ENDPROC(__aeabi_ulcmp)
ENTRY(__aeabi_ldivmod)
sub sp, sp, #8
#if defined(__thumb2__)
mov ip, sp
push {ip, lr}
#else
do_push {sp, lr}
#endif
bl __gnu_ldivmod_helper
ldr lr, [sp, #4]
add sp, sp, #8
do_pop {r2, r3}
mov pc, lr
ENDPROC(__aeabi_ldivmod)
ENTRY(__aeabi_uldivmod)
sub sp, sp, #8
#if defined(__thumb2__)
mov ip, sp
push {ip, lr}
#else
do_push {sp, lr}
#endif
bl __gnu_uldivmod_helper
ldr lr, [sp, #4]
add sp, sp, #8
do_pop {r2, r3}
mov pc, lr
ENDPROC(__aeabi_uldivmod)

View File

@ -55,8 +55,12 @@ Q_1 .req r1
R_0 .req r2
R_1 .req r3
THUMB(
TMP .req r8
)
ENTRY(__aeabi_uldivmod)
stmfd sp!, {r4, r5, r6, r7, lr}
stmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) lr}
@ Test if B == 0
orrs ip, B_0, B_1 @ Z set -> B == 0
beq L_div_by_0
@ -92,12 +96,16 @@ L_div_64_64:
subs D_1, D_0, #32
rsb ip, D_0, #32
movmi B_1, B_1, lsl D_0
orrmi B_1, B_1, B_0, lsr ip
ARM( orrmi B_1, B_1, B_0, lsr ip )
THUMB( lsrmi TMP, B_0, ip )
THUMB( orrmi B_1, B_1, TMP )
movpl B_1, B_0, lsl D_1
mov B_0, B_0, lsl D_0
@ C = 1 << (clz B - clz A)
movmi C_1, C_1, lsl D_0
orrmi C_1, C_1, C_0, lsr ip
ARM( orrmi C_1, C_1, C_0, lsr ip )
THUMB( lsrmi TMP, C_0, ip )
THUMB( orrmi C_1, C_1, TMP )
movpl C_1, C_0, lsl D_1
mov C_0, C_0, lsl D_0
L_done_shift:
@ -170,7 +178,7 @@ L_exit:
mov R_1, A_1
mov Q_0, D_0
mov Q_1, D_1
ldmfd sp!, {r4, r5, r6, r7, pc}
ldmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) pc}
L_div_32_32:
@ Note: A_0 & r0 are aliases
@ -180,7 +188,7 @@ L_div_32_32:
mov R_0, r1
mov R_1, #0
mov Q_1, #0
ldmfd sp!, {r4, r5, r6, r7, pc}
ldmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) pc}
L_pow2:
/* CLZ only exists in ARM architecture version 5 and above. */
@ -201,12 +209,14 @@ L_pow2:
add D_0, D_0, #32
L_1:
movpl A_0, A_0, lsr D_0
orrpl A_0, A_0, A_1, lsl D_1
ARM( orrpl A_0, A_0, A_1, lsl D_1 )
THUMB( lslpl TMP, A_1, D_1 )
THUMB( orrpl A_0, A_0, TMP )
mov A_1, A_1, lsr D_0
@ Mov back C to R
mov R_0, C_0
mov R_1, C_1
ldmfd sp!, {r4, r5, r6, r7, pc}
ldmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) pc}
#else
@ Note: A, B and Q, R are aliases
@ R = A & (B - 1)
@ -244,7 +254,7 @@ L_1:
@ Move C to R
mov R_0, C_0
mov R_1, C_1
ldmfd sp!, {r4, r5, r6, r7, pc}
ldmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) pc}
#endif
L_div_by_0:
@ -254,5 +264,5 @@ L_div_by_0:
mov Q_1, #0
mov R_0, #0
mov R_1, #0
ldmfd sp!, {r4, r5, r6, r7, pc}
ldmfd sp!, {r4, r5, r6, r7, THUMB(TMP,) pc}
ENDPROC(__aeabi_uldivmod)

View File

@ -27,8 +27,9 @@
* SUCH DAMAGE.
*/
#include <arch/asm.h>
.align 2
.arm
.global maincpu_stack_pointer
maincpu_stack_pointer:
@ -38,10 +39,8 @@ maincpu_stack_pointer:
maincpu_entry_point:
.word 0
.global maincpu_setup
.type maincpu_setup, function
maincpu_setup:
.arm
ENTRY(maincpu_setup)
/*
* Set the cpu to System mode with IRQ and FIQ disabled. Prefetch/Data
* aborts may happen early and crash before the abort handlers are
@ -54,3 +53,4 @@ maincpu_entry_point:
eor lr, lr
ldr r0, maincpu_entry_point
bx r0
ENDPROC(maincpu_setup)