arm64: add more barrier support
The load-acquire/store-release operations (including exclusive variants) form a basis for atomic operations. Also remove the dmb, dsb, and isb functions from lib_helpers as barrier.h already included these. Lastly, utilize barrier.h. BUG=chrome-os-partner:31761 BRANCH=None TEST=Built and ran SMP bringup using barriers. Change-Id: I6304a478d769dc2626443005b4eec4325d8a06f4 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: 8fac8d46b09d449d59f1b4f492d363392dcc4118 Original-Change-Id: I77ff160c635297a2c7cab71cb0d3f49f2536f6ff Original-Signed-off-by: Aaron Durbin <adurbin@chromium.org> Original-Reviewed-on: https://chromium-review.googlesource.com/216921 Original-Reviewed-by: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/9038 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
This commit is contained in:
parent
8ff6c215a1
commit
9ebddf29b3
|
@ -20,7 +20,7 @@
|
|||
##
|
||||
################################################################################
|
||||
|
||||
lib_access = pstate.c sysctrl.c cache.c tlb.c misc.c clock.c
|
||||
lib_access = pstate.c sysctrl.c cache.c tlb.c clock.c
|
||||
|
||||
ifeq ($(CONFIG_ARCH_BOOTBLOCK_ARMV8_64),y)
|
||||
bootblock-y += $(lib_access)
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* This file is part of the coreboot project.
|
||||
*
|
||||
* Copyright 2014 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
||||
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
* SUCH DAMAGE.
|
||||
*
|
||||
* Reference: ARM Architecture Reference Manual, ARMv8-A edition
|
||||
* misc.c: Memory barrier functions
|
||||
*/
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <arch/lib_helpers.h>
|
||||
|
||||
|
||||
/*
|
||||
* Sync primitives
|
||||
*/
|
||||
|
||||
/* data memory barrier */
|
||||
void dmb(void)
|
||||
{
|
||||
asm volatile ("dmb sy" : : : "memory");
|
||||
}
|
||||
|
||||
/* data sync barrier */
|
||||
void dsb(void)
|
||||
{
|
||||
asm volatile ("dsb sy" : : : "memory");
|
||||
}
|
||||
|
||||
/* instruction sync barrier */
|
||||
void isb(void)
|
||||
{
|
||||
asm volatile ("isb sy" : : : "memory");
|
||||
}
|
|
@ -24,6 +24,7 @@
|
|||
#define __ASM_ARM64_ARCH_IO_H
|
||||
|
||||
#include <stdint.h>
|
||||
#include <arch/barrier.h>
|
||||
#include <arch/lib_helpers.h>
|
||||
|
||||
static inline uint8_t read8(const void *addr)
|
||||
|
|
|
@ -20,33 +20,92 @@
|
|||
|
||||
#ifndef __ASSEMBLY__
|
||||
|
||||
#define sevl() asm volatile("sevl" : : : "memory")
|
||||
#define sev() asm volatile("sev" : : : "memory")
|
||||
#define wfe() asm volatile("wfe" : : : "memory")
|
||||
#define wfi() asm volatile("wfi" : : : "memory")
|
||||
|
||||
#define isb() asm volatile("isb" : : : "memory")
|
||||
#define dsb() asm volatile("dsb sy" : : : "memory")
|
||||
#define dmb() asm volatile("dmb sy" : : : "memory")
|
||||
|
||||
#define mb() dsb()
|
||||
#define rmb() asm volatile("dsb ld" : : : "memory")
|
||||
#define wmb() asm volatile("dsb st" : : : "memory")
|
||||
|
||||
#ifndef CONFIG_SMP
|
||||
#define smp_mb() barrier()
|
||||
#define smp_rmb() barrier()
|
||||
#define smp_wmb() barrier()
|
||||
#else
|
||||
#define smp_mb() asm volatile("dmb ish" : : : "memory")
|
||||
#define smp_rmb() asm volatile("dmb ishld" : : : "memory")
|
||||
#define smp_wmb() asm volatile("dmb ishst" : : : "memory")
|
||||
#if IS_ENABLED(CONFIG_SMP)
|
||||
#define barrier() __asm__ __volatile__("": : :"memory")
|
||||
#endif
|
||||
|
||||
#define read_barrier_depends() do { } while(0)
|
||||
#define smp_read_barrier_depends() do { } while(0)
|
||||
|
||||
#define set_mb(var, value) do { var = value; smp_mb(); } while (0)
|
||||
#define nop() asm volatile("nop");
|
||||
|
||||
#define force_read(x) (*(volatile typeof(x) *)&(x))
|
||||
|
||||
#define load_acquire(p) \
|
||||
({ \
|
||||
typeof(*p) ___p1; \
|
||||
switch (sizeof(*p)) { \
|
||||
case 4: \
|
||||
asm volatile ("ldar %w0, %1" \
|
||||
: "=r" (___p1) : "Q" (*p) : "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm volatile ("ldar %0, %1" \
|
||||
: "=r" (___p1) : "Q" (*p) : "memory"); \
|
||||
break; \
|
||||
} \
|
||||
___p1; \
|
||||
})
|
||||
|
||||
#define store_release(p, v) \
|
||||
do { \
|
||||
switch (sizeof(*p)) { \
|
||||
case 4: \
|
||||
asm volatile ("stlr %w1, %0" \
|
||||
: "=Q" (*p) : "r" (v) : "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm volatile ("stlr %1, %0" \
|
||||
: "=Q" (*p) : "r" (v) : "memory"); \
|
||||
break; \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#define load_acquire_exclusive(p) \
|
||||
({ \
|
||||
typeof(*p) ___p1; \
|
||||
switch (sizeof(*p)) { \
|
||||
case 4: \
|
||||
asm volatile ("ldaxr %w0, %1" \
|
||||
: "=r" (___p1) : "Q" (*p) : "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm volatile ("ldaxr %0, %1" \
|
||||
: "=r" (___p1) : "Q" (*p) : "memory"); \
|
||||
break; \
|
||||
} \
|
||||
___p1; \
|
||||
})
|
||||
|
||||
/* Returns 1 on success. */
|
||||
#define store_release_exclusive(p, v) \
|
||||
({ \
|
||||
int ret; \
|
||||
switch (sizeof(*p)) { \
|
||||
case 4: \
|
||||
asm volatile ("stlxr %w0, %w2, %1" \
|
||||
: "=&r" (ret), "=Q" (*p) : "r" (v) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
case 8: \
|
||||
asm volatile ("stlxr %w0, %2, %1" \
|
||||
: "=&r" (ret), "=Q" (*p) : "r" (v) \
|
||||
: "memory"); \
|
||||
break; \
|
||||
} \
|
||||
!ret; \
|
||||
})
|
||||
|
||||
#endif /* __ASSEMBLY__ */
|
||||
|
||||
#endif /* __ASM_ARM_BARRIER_H */
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include <config.h>
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <arch/barrier.h>
|
||||
|
||||
/* SCTLR_ELx common bits */
|
||||
#define SCTLR_M (1 << 0) /* MMU enable */
|
||||
|
|
|
@ -290,8 +290,3 @@ void tlbiallis_el2(void);
|
|||
void tlbiallis_el3(void);
|
||||
void tlbiallis_current(void);
|
||||
void tlbivaa_el1(uint64_t va);
|
||||
|
||||
/* Memory barrier */
|
||||
void dmb(void);
|
||||
void dsb(void);
|
||||
void isb(void);
|
||||
|
|
Loading…
Reference in New Issue