Revert "arm64: remove assembly code string functions"

This reverts commit 00263d0d8e
to reintroduce optimized string handling functions.

BUG=chrome-os-partner:41185
BRANCH=None
TEST=Compiles successfully and boots to kernel prompt on Smaug

Change-Id: I47f8d8afa5c9ff3fca67d4d0f393336fef03402b
Signed-off-by: Patrick Georgi <pgeorgi@chromium.org>
Original-Commit-Id: eeb38afea828a2727d815e4fb5762cfdd09a2b3a
Original-Change-Id: Id053cbcea8b5e7ae29bdd6bb8b6f5e5011c42b00
Original-Signed-off-by: Furquan Shaikh <furquan@google.com>
Original-Reviewed-on: https://chromium-review.googlesource.com/275865
Original-Trybot-Ready: Furquan Shaikh <furquan@chromium.org>
Original-Tested-by: Furquan Shaikh <furquan@chromium.org>
Original-Reviewed-by: Aaron Durbin <adurbin@chromium.org>
Original-Commit-Queue: Furquan Shaikh <furquan@chromium.org>
Reviewed-on: http://review.coreboot.org/10564
Tested-by: build bot (Jenkins)
Reviewed-by: Marc Jones <marc.jones@se-eng.com>
This commit is contained in:
Furquan Shaikh 2015-06-08 10:31:55 -07:00 committed by Patrick Georgi
parent 3cec871eaa
commit 464f5ca6d6
4 changed files with 177 additions and 18 deletions

View File

@ -66,9 +66,9 @@ bootblock-y += eabi_compat.c
bootblock-y += transition.c transition_asm.S bootblock-y += transition.c transition_asm.S
bootblock-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c bootblock-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c
bootblock-y += ../../lib/memset.c bootblock-y += memset.S
bootblock-y += ../../lib/memcpy.c bootblock-y += memcpy.S
bootblock-y += ../../lib/memmove.c bootblock-y += memmove.S
# Build the bootblock # Build the bootblock
@ -112,15 +112,15 @@ romstage-y += cpu-stubs.c
romstage-y += stages.c romstage-y += stages.c
romstage-y += div0.c romstage-y += div0.c
romstage-y += eabi_compat.c romstage-y += eabi_compat.c
romstage-y += ../../lib/memset.c romstage-y += memset.S
romstage-y += ../../lib/memcpy.c romstage-y += memcpy.S
romstage-y += ../../lib/memmove.c romstage-y += memmove.S
romstage-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c romstage-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c
romstage-y += transition.c transition_asm.S romstage-y += transition.c transition_asm.S
rmodules_arm64-y += ../../lib/memset.c rmodules_arm64-y += memset.S
rmodules_arm64-y += ../../lib/memcpy.c rmodules_arm64-y += memcpy.S
rmodules_arm64-y += ../../lib/memmove.c rmodules_arm64-y += memmove.S
rmodules_arm64-y += eabi_compat.c rmodules_arm64-y += eabi_compat.c
# Build the romstage # Build the romstage
@ -147,27 +147,27 @@ ramstage-y += cpu_ramstage.c
ramstage-y += eabi_compat.c ramstage-y += eabi_compat.c
ramstage-y += boot.c ramstage-y += boot.c
ramstage-y += tables.c ramstage-y += tables.c
ramstage-y += ../../lib/memset.c ramstage-y += memset.S
ramstage-y += ../../lib/memcpy.c ramstage-y += memcpy.S
ramstage-y += ../../lib/memmove.c ramstage-y += memmove.S
ramstage-y += stage_entry.S ramstage-y += stage_entry.S
ramstage-y += cpu-stubs.c ramstage-y += cpu-stubs.c
ramstage-$(CONFIG_ARM64_USE_SPINTABLE) += spintable.c spintable_asm.S ramstage-$(CONFIG_ARM64_USE_SPINTABLE) += spintable.c spintable_asm.S
ramstage-$(CONFIG_ARM64_USE_ARM_TRUSTED_FIRMWARE) += arm_tf.c ramstage-$(CONFIG_ARM64_USE_ARM_TRUSTED_FIRMWARE) += arm_tf.c
ramstage-y += transition.c transition_asm.S ramstage-y += transition.c transition_asm.S
rmodules_arm64-y += ../../lib/memset.c rmodules_arm64-y += memset.S
rmodules_arm64-y += ../../lib/memcpy.c rmodules_arm64-y += memcpy.S
rmodules_arm64-y += ../../lib/memmove.c rmodules_arm64-y += memmove.S
rmodules_arm64-y += eabi_compat.c rmodules_arm64-y += eabi_compat.c
secmon-y += stage_entry.S secmon-y += stage_entry.S
secmon-y += cpu-stubs.c secmon-y += cpu-stubs.c
secmon-y += startup.c secmon-y += startup.c
secmon-y += ../../lib/malloc.c secmon-y += ../../lib/malloc.c
secmon-y += ../../lib/memset.c secmon-y += memset.S
secmon-y += ../../lib/memcmp.c secmon-y += memmove.S
secmon-y += ../../lib/memcpy.c secmon-y += memcpy.S
ramstage-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c ramstage-$(CONFIG_COLLECT_TIMESTAMPS) += timestamp.c

52
src/arch/arm64/memcpy.S Normal file
View File

@ -0,0 +1,52 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <arch/asm.h>
/*
* Copy a buffer from src to dest (alignment handled by the hardware)
*
* Parameters:
* x0 - dest
* x1 - src
* x2 - n
* Returns:
* x0 - dest
*/
ENTRY(memcpy)
mov x4, x0
subs x2, x2, #8
b.mi 2f
1: ldr x3, [x1], #8
subs x2, x2, #8
str x3, [x4], #8
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
ldr w3, [x1], #4
sub x2, x2, #4
str w3, [x4], #4
3: adds x2, x2, #2
b.mi 4f
ldrh w3, [x1], #2
sub x2, x2, #2
strh w3, [x4], #2
4: adds x2, x2, #1
b.mi 5f
ldrb w3, [x1]
strb w3, [x4]
5: ret
ENDPROC(memcpy)

55
src/arch/arm64/memmove.S Normal file
View File

@ -0,0 +1,55 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <arch/asm.h>
/*
* Move a buffer from src to test (alignment handled by the hardware).
* If dest <= src, call memcpy, otherwise copy in reverse order.
*
* Parameters:
* x0 - dest
* x1 - src
* x2 - n
* Returns:
* x0 - dest
*/
ENTRY(memmove)
cmp x0, x1
b.ls memcpy
add x4, x0, x2
add x1, x1, x2
subs x2, x2, #8
b.mi 2f
1: ldr x3, [x1, #-8]!
subs x2, x2, #8
str x3, [x4, #-8]!
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
ldr w3, [x1, #-4]!
sub x2, x2, #4
str w3, [x4, #-4]!
3: adds x2, x2, #2
b.mi 4f
ldrh w3, [x1, #-2]!
sub x2, x2, #2
strh w3, [x4, #-2]!
4: adds x2, x2, #1
b.mi 5f
ldrb w3, [x1, #-1]
strb w3, [x4, #-1]
5: ret
ENDPROC(memmove)

52
src/arch/arm64/memset.S Normal file
View File

@ -0,0 +1,52 @@
/*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <arch/asm.h>
/*
* Fill in the buffer with character c (alignment handled by the hardware)
*
* Parameters:
* x0 - buf
* x1 - c
* x2 - n
* Returns:
* x0 - buf
*/
ENTRY(memset)
mov x4, x0
and w1, w1, #0xff
orr w1, w1, w1, lsl #8
orr w1, w1, w1, lsl #16
orr x1, x1, x1, lsl #32
subs x2, x2, #8
b.mi 2f
1: str x1, [x4], #8
subs x2, x2, #8
b.pl 1b
2: adds x2, x2, #4
b.mi 3f
sub x2, x2, #4
str w1, [x4], #4
3: adds x2, x2, #2
b.mi 4f
sub x2, x2, #2
strh w1, [x4], #2
4: adds x2, x2, #1
b.mi 5f
strb w1, [x4]
5: ret
ENDPROC(memset)