summaryrefslogtreecommitdiff
path: root/src/arch/arm64/include/armv8
diff options
context:
space:
mode:
Diffstat (limited to 'src/arch/arm64/include/armv8')
-rw-r--r--src/arch/arm64/include/armv8/arch/arch_io.h1
-rw-r--r--src/arch/arm64/include/armv8/arch/barrier.h83
-rw-r--r--src/arch/arm64/include/armv8/arch/cache.h1
-rw-r--r--src/arch/arm64/include/armv8/arch/lib_helpers.h5
4 files changed, 73 insertions, 17 deletions
diff --git a/src/arch/arm64/include/armv8/arch/arch_io.h b/src/arch/arm64/include/armv8/arch/arch_io.h
index 2876c8b78c..7d97c31dff 100644
--- a/src/arch/arm64/include/armv8/arch/arch_io.h
+++ b/src/arch/arm64/include/armv8/arch/arch_io.h
@@ -24,6 +24,7 @@
#define __ASM_ARM64_ARCH_IO_H
#include <stdint.h>
+#include <arch/barrier.h>
#include <arch/lib_helpers.h>
static inline uint8_t read8(const void *addr)
diff --git a/src/arch/arm64/include/armv8/arch/barrier.h b/src/arch/arm64/include/armv8/arch/barrier.h
index dfcf5a5268..313900ab22 100644
--- a/src/arch/arm64/include/armv8/arch/barrier.h
+++ b/src/arch/arm64/include/armv8/arch/barrier.h
@@ -20,33 +20,92 @@
#ifndef __ASSEMBLY__
+#define sevl() asm volatile("sevl" : : : "memory")
#define sev() asm volatile("sev" : : : "memory")
#define wfe() asm volatile("wfe" : : : "memory")
#define wfi() asm volatile("wfi" : : : "memory")
#define isb() asm volatile("isb" : : : "memory")
#define dsb() asm volatile("dsb sy" : : : "memory")
+#define dmb() asm volatile("dmb sy" : : : "memory")
#define mb() dsb()
#define rmb() asm volatile("dsb ld" : : : "memory")
#define wmb() asm volatile("dsb st" : : : "memory")
-#ifndef CONFIG_SMP
-#define smp_mb() barrier()
-#define smp_rmb() barrier()
-#define smp_wmb() barrier()
-#else
-#define smp_mb() asm volatile("dmb ish" : : : "memory")
-#define smp_rmb() asm volatile("dmb ishld" : : : "memory")
-#define smp_wmb() asm volatile("dmb ishst" : : : "memory")
+#if IS_ENABLED(CONFIG_SMP)
+#define barrier() __asm__ __volatile__("": : :"memory")
#endif
-#define read_barrier_depends() do { } while(0)
-#define smp_read_barrier_depends() do { } while(0)
-
-#define set_mb(var, value) do { var = value; smp_mb(); } while (0)
#define nop() asm volatile("nop");
+#define force_read(x) (*(volatile typeof(x) *)&(x))
+
+#define load_acquire(p) \
+({ \
+ typeof(*p) ___p1; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("ldar %w0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("ldar %0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ } \
+ ___p1; \
+})
+
+#define store_release(p, v) \
+do { \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("stlr %w1, %0" \
+ : "=Q" (*p) : "r" (v) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("stlr %1, %0" \
+ : "=Q" (*p) : "r" (v) : "memory"); \
+ break; \
+ } \
+} while (0)
+
+#define load_acquire_exclusive(p) \
+({ \
+ typeof(*p) ___p1; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("ldaxr %w0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("ldaxr %0, %1" \
+ : "=r" (___p1) : "Q" (*p) : "memory"); \
+ break; \
+ } \
+ ___p1; \
+})
+
+/* Returns 1 on success. */
+#define store_release_exclusive(p, v) \
+({ \
+ int ret; \
+ switch (sizeof(*p)) { \
+ case 4: \
+ asm volatile ("stlxr %w0, %w2, %1" \
+ : "=&r" (ret), "=Q" (*p) : "r" (v) \
+ : "memory"); \
+ break; \
+ case 8: \
+ asm volatile ("stlxr %w0, %2, %1" \
+ : "=&r" (ret), "=Q" (*p) : "r" (v) \
+ : "memory"); \
+ break; \
+ } \
+ !ret; \
+})
+
#endif /* __ASSEMBLY__ */
#endif /* __ASM_ARM_BARRIER_H */
diff --git a/src/arch/arm64/include/armv8/arch/cache.h b/src/arch/arm64/include/armv8/arch/cache.h
index 2d4201198a..6ee6101256 100644
--- a/src/arch/arm64/include/armv8/arch/cache.h
+++ b/src/arch/arm64/include/armv8/arch/cache.h
@@ -35,6 +35,7 @@
#include <config.h>
#include <stddef.h>
#include <stdint.h>
+#include <arch/barrier.h>
/* SCTLR_ELx common bits */
#define SCTLR_M (1 << 0) /* MMU enable */
diff --git a/src/arch/arm64/include/armv8/arch/lib_helpers.h b/src/arch/arm64/include/armv8/arch/lib_helpers.h
index 723fc15831..57b63d8684 100644
--- a/src/arch/arm64/include/armv8/arch/lib_helpers.h
+++ b/src/arch/arm64/include/armv8/arch/lib_helpers.h
@@ -290,8 +290,3 @@ void tlbiallis_el2(void);
void tlbiallis_el3(void);
void tlbiallis_current(void);
void tlbivaa_el1(uint64_t va);
-
-/* Memory barrier */
-void dmb(void);
-void dsb(void);
-void isb(void);