summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/arch/arm64/armv8/cpu.S15
-rw-r--r--src/arch/arm64/armv8/mmu.c14
-rw-r--r--src/arch/arm64/include/armv8/arch/cache.h11
3 files changed, 22 insertions, 18 deletions
diff --git a/src/arch/arm64/armv8/cpu.S b/src/arch/arm64/armv8/cpu.S
index 1bb8c838ad..935f1fe7bd 100644
--- a/src/arch/arm64/armv8/cpu.S
+++ b/src/arch/arm64/armv8/cpu.S
@@ -15,6 +15,7 @@
*/
#include <arch/asm.h>
+#include <arch/cache.h>
.macro dcache_apply_all crm
dsb sy
@@ -83,6 +84,20 @@ ENTRY(dcache_clean_invalidate_all)
dcache_apply_all crm=cisw
ENDPROC(dcache_clean_invalidate_all)
+/* This must be implemented in assembly to ensure there are no accesses to
+ memory (e.g. the stack) in between disabling and flushing the cache. */
+ENTRY(mmu_disable)
+ str x30, [sp, #-0x8]
+ mrs x0, sctlr_el3
+ mov x1, #~(SCTLR_C | SCTLR_M)
+ and x0, x0, x1
+ msr sctlr_el3, x0
+ isb
+ bl dcache_clean_invalidate_all
+ ldr x30, [sp, #-0x8]
+ ret
+ENDPROC(mmu_disable)
+
/*
* Bring an ARMv8 processor we just gained control of (e.g. from IROM) into a
* known state regarding caches/SCTLR/PSTATE. Completely invalidates
diff --git a/src/arch/arm64/armv8/mmu.c b/src/arch/arm64/armv8/mmu.c
index 48f77ace83..606a9b30ad 100644
--- a/src/arch/arm64/armv8/mmu.c
+++ b/src/arch/arm64/armv8/mmu.c
@@ -321,17 +321,3 @@ void mmu_enable(void)
raw_write_sctlr_el3(sctlr);
isb();
}
-
-/*
- * CAUTION: This implementation assumes that coreboot never uses non-identity
- * page tables for pages containing executed code. If you ever want to violate
- * this assumption, have fun figuring out the associated problems on your own.
- */
-void mmu_disable(void)
-{
- dcache_clean_invalidate_all();
- uint32_t sctlr = raw_read_sctlr_el3();
- sctlr &= ~(SCTLR_C | SCTLR_M);
- raw_write_sctlr_el3(sctlr);
- isb();
-}
diff --git a/src/arch/arm64/include/armv8/arch/cache.h b/src/arch/arm64/include/armv8/arch/cache.h
index 3647290866..b31c3b0153 100644
--- a/src/arch/arm64/include/armv8/arch/cache.h
+++ b/src/arch/arm64/include/armv8/arch/cache.h
@@ -32,10 +32,6 @@
#ifndef ARM_ARM64_CACHE_H
#define ARM_ARM64_CACHE_H
-#include <stddef.h>
-#include <stdint.h>
-#include <arch/barrier.h>
-
/* SCTLR_ELx common bits */
#define SCTLR_M (1 << 0) /* MMU enable */
#define SCTLR_A (1 << 1) /* Alignment check enable */
@@ -57,6 +53,11 @@
#define SCTLR_EL1_E0E (1 << 24) /* Exception endianness at EL0 */
#define SCTLR_EL1_UCI (1 << 26) /* EL0 access to cache instructions */
+#ifndef __ASSEMBLER__
+
+#include <stddef.h>
+#include <stdint.h>
+#include <arch/barrier.h>
/* dcache clean by virtual address to PoC */
void dcache_clean_by_mva(void const *addr, size_t len);
@@ -92,4 +93,6 @@ static inline void icache_invalidate_all(void)
: : : "memory");
}
+#endif /* __ASSEMBLER__ */
+
#endif /* ARM_ARM64_CACHE_H */