diff options
Diffstat (limited to 'src/arch')
-rw-r--r-- | src/arch/arm64/armv8/lib/Makefile.inc | 2 | ||||
-rw-r--r-- | src/arch/arm64/armv8/lib/misc.c | 58 | ||||
-rw-r--r-- | src/arch/arm64/include/armv8/arch/arch_io.h | 1 | ||||
-rw-r--r-- | src/arch/arm64/include/armv8/arch/barrier.h | 83 | ||||
-rw-r--r-- | src/arch/arm64/include/armv8/arch/cache.h | 1 | ||||
-rw-r--r-- | src/arch/arm64/include/armv8/arch/lib_helpers.h | 5 |
6 files changed, 74 insertions, 76 deletions
diff --git a/src/arch/arm64/armv8/lib/Makefile.inc b/src/arch/arm64/armv8/lib/Makefile.inc index 3e393bbe23..fe08662841 100644 --- a/src/arch/arm64/armv8/lib/Makefile.inc +++ b/src/arch/arm64/armv8/lib/Makefile.inc @@ -20,7 +20,7 @@ ## ################################################################################ -lib_access = pstate.c sysctrl.c cache.c tlb.c misc.c clock.c +lib_access = pstate.c sysctrl.c cache.c tlb.c clock.c ifeq ($(CONFIG_ARCH_BOOTBLOCK_ARMV8_64),y) bootblock-y += $(lib_access) diff --git a/src/arch/arm64/armv8/lib/misc.c b/src/arch/arm64/armv8/lib/misc.c deleted file mode 100644 index da02a331b0..0000000000 --- a/src/arch/arm64/armv8/lib/misc.c +++ /dev/null @@ -1,58 +0,0 @@ -/* - * This file is part of the coreboot project. - * - * Copyright 2014 Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * Reference: ARM Architecture Reference Manual, ARMv8-A edition - * misc.c: Memory barrier functions - */ - -#include <stdint.h> - -#include <arch/lib_helpers.h> - - -/* - * Sync primitives - */ - -/* data memory barrier */ -void dmb(void) -{ - asm volatile ("dmb sy" : : : "memory"); -} - -/* data sync barrier */ -void dsb(void) -{ - asm volatile ("dsb sy" : : : "memory"); -} - -/* instruction sync barrier */ -void isb(void) -{ - asm volatile ("isb sy" : : : "memory"); -} diff --git a/src/arch/arm64/include/armv8/arch/arch_io.h b/src/arch/arm64/include/armv8/arch/arch_io.h index 2876c8b78c..7d97c31dff 100644 --- a/src/arch/arm64/include/armv8/arch/arch_io.h +++ b/src/arch/arm64/include/armv8/arch/arch_io.h @@ -24,6 +24,7 @@ #define __ASM_ARM64_ARCH_IO_H #include <stdint.h> +#include <arch/barrier.h> #include <arch/lib_helpers.h> static inline uint8_t read8(const void *addr) diff --git a/src/arch/arm64/include/armv8/arch/barrier.h b/src/arch/arm64/include/armv8/arch/barrier.h index dfcf5a5268..313900ab22 100644 --- a/src/arch/arm64/include/armv8/arch/barrier.h +++ b/src/arch/arm64/include/armv8/arch/barrier.h @@ -20,33 +20,92 @@ #ifndef __ASSEMBLY__ +#define sevl() asm volatile("sevl" : : : "memory") #define sev() asm volatile("sev" : : : "memory") #define wfe() asm volatile("wfe" : : : "memory") #define wfi() asm volatile("wfi" : : : "memory") #define isb() asm volatile("isb" : : : "memory") #define dsb() asm volatile("dsb sy" : : : "memory") +#define dmb() asm volatile("dmb sy" : : : "memory") #define mb() dsb() #define rmb() asm volatile("dsb ld" : : : "memory") #define wmb() asm volatile("dsb st" : : : "memory") -#ifndef CONFIG_SMP -#define smp_mb() barrier() -#define smp_rmb() barrier() -#define smp_wmb() barrier() -#else -#define smp_mb() asm volatile("dmb ish" : : : "memory") -#define smp_rmb() asm volatile("dmb ishld" : : : "memory") -#define smp_wmb() asm volatile("dmb ishst" : : : "memory") +#if IS_ENABLED(CONFIG_SMP) +#define barrier() __asm__ __volatile__("": : :"memory") #endif -#define read_barrier_depends() do { } while(0) -#define smp_read_barrier_depends() do { } while(0) - -#define set_mb(var, value) do { var = value; smp_mb(); } while (0) #define nop() asm volatile("nop"); +#define force_read(x) (*(volatile typeof(x) *)&(x)) + +#define load_acquire(p) \ +({ \ + typeof(*p) ___p1; \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("ldar %w0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + case 8: \ + asm volatile ("ldar %0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + } \ + ___p1; \ +}) + +#define store_release(p, v) \ +do { \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("stlr %w1, %0" \ + : "=Q" (*p) : "r" (v) : "memory"); \ + break; \ + case 8: \ + asm volatile ("stlr %1, %0" \ + : "=Q" (*p) : "r" (v) : "memory"); \ + break; \ + } \ +} while (0) + +#define load_acquire_exclusive(p) \ +({ \ + typeof(*p) ___p1; \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("ldaxr %w0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + case 8: \ + asm volatile ("ldaxr %0, %1" \ + : "=r" (___p1) : "Q" (*p) : "memory"); \ + break; \ + } \ + ___p1; \ +}) + +/* Returns 1 on success. */ +#define store_release_exclusive(p, v) \ +({ \ + int ret; \ + switch (sizeof(*p)) { \ + case 4: \ + asm volatile ("stlxr %w0, %w2, %1" \ + : "=&r" (ret), "=Q" (*p) : "r" (v) \ + : "memory"); \ + break; \ + case 8: \ + asm volatile ("stlxr %w0, %2, %1" \ + : "=&r" (ret), "=Q" (*p) : "r" (v) \ + : "memory"); \ + break; \ + } \ + !ret; \ +}) + #endif /* __ASSEMBLY__ */ #endif /* __ASM_ARM_BARRIER_H */ diff --git a/src/arch/arm64/include/armv8/arch/cache.h b/src/arch/arm64/include/armv8/arch/cache.h index 2d4201198a..6ee6101256 100644 --- a/src/arch/arm64/include/armv8/arch/cache.h +++ b/src/arch/arm64/include/armv8/arch/cache.h @@ -35,6 +35,7 @@ #include <config.h> #include <stddef.h> #include <stdint.h> +#include <arch/barrier.h> /* SCTLR_ELx common bits */ #define SCTLR_M (1 << 0) /* MMU enable */ diff --git a/src/arch/arm64/include/armv8/arch/lib_helpers.h b/src/arch/arm64/include/armv8/arch/lib_helpers.h index 723fc15831..57b63d8684 100644 --- a/src/arch/arm64/include/armv8/arch/lib_helpers.h +++ b/src/arch/arm64/include/armv8/arch/lib_helpers.h @@ -290,8 +290,3 @@ void tlbiallis_el2(void); void tlbiallis_el3(void); void tlbiallis_current(void); void tlbivaa_el1(uint64_t va); - -/* Memory barrier */ -void dmb(void); -void dsb(void); -void isb(void); |