aboutsummaryrefslogtreecommitdiff
path: root/src/arch/arm64/include/armv8
diff options
context:
space:
mode:
authorAaron Durbin <adurbin@chromium.org>2014-09-06 01:10:02 -0500
committerPatrick Georgi <pgeorgi@google.com>2015-03-27 08:04:45 +0100
commit9ebddf29b37459d65ebb8fb07830d79ce4f61bef (patch)
tree2d606917fcc202dfcc4e2c011c1af8f34a23be1d /src/arch/arm64/include/armv8
parent8ff6c215a1c759ed4506a0bdbe4aa6a2fe9bd74f (diff)
arm64: add more barrier support
The load-acquire/store-release operations (including exclusive variants) form a basis for atomic operations. Also remove the dmb, dsb, and isb functions from lib_helpers as barrier.h already included these. Lastly, utilize barrier.h. BUG=chrome-os-partner:31761 BRANCH=None TEST=Built and ran SMP bringup using barriers. Change-Id: I6304a478d769dc2626443005b4eec4325d8a06f4 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: 8fac8d46b09d449d59f1b4f492d363392dcc4118 Original-Change-Id: I77ff160c635297a2c7cab71cb0d3f49f2536f6ff Original-Signed-off-by: Aaron Durbin <adurbin@chromium.org> Original-Reviewed-on: https://chromium-review.googlesource.com/216921 Original-Reviewed-by: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/9038 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
Diffstat (limited to 'src/arch/arm64/include/armv8')
-rw-r--r--src/arch/arm64/include/armv8/arch/arch_io.h1
-rw-r--r--src/arch/arm64/include/armv8/arch/barrier.h83
-rw-r--r--src/arch/arm64/include/armv8/arch/cache.h1
-rw-r--r--src/arch/arm64/include/armv8/arch/lib_helpers.h5
4 files changed, 73 insertions, 17 deletions
diff --git a/src/arch/arm64/include/armv8/arch/arch_io.h b/src/arch/arm64/include/armv8/arch/arch_io.h
index 2876c8b78c..7d97c31dff 100644
--- a/src/arch/arm64/include/armv8/arch/arch_io.h
+++ b/src/arch/arm64/include/armv8/arch/arch_io.h
@@ -24,6 +24,7 @@
#define __ASM_ARM64_ARCH_IO_H
#include <stdint.h>
+#include <arch/barrier.h>
#include <arch/lib_helpers.h>
static inline uint8_t read8(const void *addr)
diff --git a/src/arch/arm64/include/armv8/arch/barrier.h b/src/arch/arm64/include/armv8/arch/barrier.h
index dfcf5a5268..313900ab22 100644
--- a/src/arch/arm64/include/armv8/arch/barrier.h
+++ b/src/arch/arm64/include/armv8/arch/barrier.h
@@ -20,33 +20,92 @@
#ifndef __ASSEMBLY__
+#define sevl() asm volatile("sevl" : : : "memory")
#define sev() asm volatile("sev" : : : "memory")
#define wfe() asm volatile("wfe" : : : "memory")
#define wfi() asm volatile("wfi" : : : "memory")
#define isb() asm volatile("isb" : : : "memory")
#define dsb() asm volatile("dsb sy" : : : "memory")
+#define dmb() asm volatile("dmb sy" : : : "memory")
#define mb() dsb()
#define rmb() asm volatile("dsb ld" : : : "memory")
#define wmb() asm volatile("dsb st" : : : "memory")
-#ifndef CONFIG_SMP
-#define smp_mb() barrier()
-#define smp_rmb() barrier()
-#define smp_wmb() barrier()
-#else
-#define smp_mb() asm volatile("dmb ish" : : : "memory")
-#define smp_rmb() asm volatile("dmb ishld" : : : "memory")
-#define smp_wmb() asm volatile("dmb ishst" : : : "memory")
+#if IS_ENABLED(CONFIG_SMP)
+#define barrier() __asm__ __volatile__("": : :"memory")
#endif
-#define read_barrier_depends() do { } while(0)
-#define smp_read_barrier_depends() do { } while(0)
-
-#define set_mb(var, value) do { var = value; smp_mb(); } while (0)
#define nop() asm volatile("nop");
+#define force_read(x) (*(volatile typeof(x) *)&(x))
+
+#define load_acquire(p) \
+({ \
+ typeof(*p) ___p1; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("ldar %w0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("ldar %0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ } \
+ ___p1; \
+})
+
+#define store_release(p, v) \
+do { \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("stlr %w1, %0" \
+ : "=Q" (*p) : "r" (v) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("stlr %1, %0" \
+ : "=Q" (*p) : "r" (v) : "memory"); \
+ break; \
+ } \
+} while (0)
+
+#define load_acquire_exclusive(p) \
+({ \
+ typeof(*p) ___p1; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("ldaxr %w0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("ldaxr %0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ } \
+ ___p1; \
+})
+
+/* Returns 1 on success. */
+#define store_release_exclusive(p, v) \
+({ \
+ int ret; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("stlxr %w0, %w2, %1" \
+ : "=&r" (ret), "=Q" (*p) : "r" (v) \
+ : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("stlxr %w0, %2, %1" \
+ : "=&r" (ret), "=Q" (*p) : "r" (v) \
+ : "memory"); \
+ break; \
+ } \
+ !ret; \
+})
+
#endif /* __ASSEMBLY__ */
#endif /* __ASM_ARM_BARRIER_H */
diff --git a/src/arch/arm64/include/armv8/arch/cache.h b/src/arch/arm64/include/armv8/arch/cache.h
index 2d4201198a..6ee6101256 100644
--- a/src/arch/arm64/include/armv8/arch/cache.h
+++ b/src/arch/arm64/include/armv8/arch/cache.h
@@ -35,6 +35,7 @@
#include <config.h>
#include <stddef.h>
#include <stdint.h>
+#include <arch/barrier.h>
/* SCTLR_ELx common bits */
#define SCTLR_M (1 << 0) /* MMU enable */
diff --git a/src/arch/arm64/include/armv8/arch/lib_helpers.h b/src/arch/arm64/include/armv8/arch/lib_helpers.h
index 723fc15831..57b63d8684 100644
--- a/src/arch/arm64/include/armv8/arch/lib_helpers.h
+++ b/src/arch/arm64/include/armv8/arch/lib_helpers.h
@@ -290,8 +290,3 @@ void tlbiallis_el2(void);
void tlbiallis_el3(void);
void tlbiallis_current(void);
void tlbivaa_el1(uint64_t va);
-
-/* Memory barrier */
-void dmb(void);
-void dsb(void);
-void isb(void);