aboutsummaryrefslogtreecommitdiff
path: root/src/arch/arm64
diff options
context:
space:
mode:
authorAaron Durbin <adurbin@chromium.org>2014-08-27 12:50:26 -0500
committerPatrick Georgi <pgeorgi@google.com>2015-03-27 08:04:05 +0100
commit1c6512962aeaf07dfcb200231cc73bcc64859c7f (patch)
tree0a113e6c8cd658c4ae9785f0693bdf7cc68a2a63 /src/arch/arm64
parentce513c9732e23b4ad522ed937876f19912a89a91 (diff)
arm64: refactor stage entry
Provide a common entry point arm64 cores coming out of reset. Also, take into account CONFIG_ARM64_CPUS_START_IN_ELx to set the correct SCTLR_ELx register. The SCR_EL3 initialization was removed as that can be done in policy code in C later. Part of this refactor allows for greater code reuse for the secure monitor. BUG=chrome-os-partner:31545 BRANCH=None TEST=built and booted to linux on ryu Change-Id: I429f8fd0cdae78318ac171722fa1377924665401 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: f92a5a01f07bc370735d75d695aedd8e2ab25608 Original-Change-Id: If16b3f979923ec8add59854db6bad4aaed35e3aa Original-Signed-off-by: Aaron Durbin <adurbin@chromium.org> Original-Reviewed-on: https://chromium-review.googlesource.com/214668 Original-Reviewed-by: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/9012 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
Diffstat (limited to 'src/arch/arm64')
-rw-r--r--src/arch/arm64/include/arch/asm.h14
-rw-r--r--src/arch/arm64/stage_entry.S57
2 files changed, 39 insertions, 32 deletions
diff --git a/src/arch/arm64/include/arch/asm.h b/src/arch/arm64/include/arch/asm.h
index 7760bad850..96c9d13957 100644
--- a/src/arch/arm64/include/arch/asm.h
+++ b/src/arch/arm64/include/arch/asm.h
@@ -20,19 +20,25 @@
#ifndef __ARM_ARM64_ASM_H
#define __ARM_ARM64_ASM_H
-#define ALIGN .align 0
-
#define ENDPROC(name) \
.type name, %function; \
END(name)
-#define ENTRY(name) \
+#define ENTRY_WITH_ALIGN(name, bits) \
.section .text.name, "ax", %progbits; \
.global name; \
- ALIGN; \
+ .align bits; \
name:
+#define ENTRY(name) ENTRY_WITH_ALIGN(name, 0)
+
#define END(name) \
.size name, .-name
+/*
+ * Certain SoCs have an alignment requiremnt for the CPU reset vector.
+ * Align to a 64 byte typical cacheline for now.
+ */
+#define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6)
+
#endif /* __ARM_ARM64_ASM_H */
diff --git a/src/arch/arm64/stage_entry.S b/src/arch/arm64/stage_entry.S
index 4c26f65707..2d4b26dac9 100644
--- a/src/arch/arm64/stage_entry.S
+++ b/src/arch/arm64/stage_entry.S
@@ -20,6 +20,16 @@
#include <arch/asm.h>
+#if CONFIG_ARM64_CPUS_START_IN_EL3
+#define SCTLR_ELx sctlr_el3
+#elif CONFIG_ARM64_CPUS_START_IN_EL2
+#define SCTLR_ELx sctlr_el2
+#elif CONFIG_ARM64_CPUS_START_IN_EL1
+#define SCTLR_ELx sctlr_el1
+#else
+#error Need to know what ELx processor starts up in.
+#endif
+
ENTRY(seed_stack)
/*
* Initialize the stack to a known value. This is used to check for
@@ -43,19 +53,12 @@ load_stack:
.quad _stack
ENDPROC(seed_stack)
-ENTRY(arm64_el3_startup)
- /* Set all bits in SCTLR_EL3 to 0 except RES1 and RES0 */
- mrs x0, SCTLR_EL3
- ldr x1, .SCTLR_MASK
- and x0, x0, x1
- msr SCTLR_EL3, x0
- /* Set all bits in SCR_EL3 to 0 except RES1 and RES0 */
- mrs x0, SCR_EL3
- ldr x1, .SCR_MASK
- and x0, x0, x1
- msr SCR_EL3, x0
-
- /* Initialize SP_EL3 as exception stack */
+/*
+ * Boot strap the processor into a C environemnt. That consists of providing
+ * 16-byte aligned stack. The programming enviroment uses SP_EL0 as its main
+ * stack while keeping SP_ELx reserved for exception entry.
+ */
+ENTRY(arm64_c_environment)
ldr x0, .exception_stack_top
cmp x0, #0
b.eq 2f
@@ -80,27 +83,25 @@ ENTRY(arm64_el3_startup)
br x1
.align 4
- .SCTLR_MASK:
- .quad 0x0FFFFEFF0
-
- .SCR_MASK:
- .quad 0x0FFFFC070
- .align 4
- /*
- * By default branch to main() and initialize the stack according
- * to the Kconfig option for cpu0. However, this code can be relocated
- * and reused to start up secondary cpus.
- */
.exception_stack_top:
.quad CONFIG_EXCEPTION_STACK_TOP
.stack_top:
.quad _estack
.entry:
.quad seed_stack
-ENDPROC(arm64_el3_startup)
-.global arm64_el3_startup_end
-arm64_el3_startup_end:
+ENDPROC(arm64_c_environment)
+
+CPU_RESET_ENTRY(arm64_cpu_startup)
+ mrs x0, SCTLR_ELx
+ bic x0, x0, #(1 << 25) /* Little Endian */
+ bic x0, x0, #(1 << 19) /* XN not enforced */
+ bic x0, x0, #(1 << 12) /* Disable Instruction Cache */
+ bic x0, x0, #0xf /* Clear SA, C, A, and M */
+ msr SCTLR_ELx, x0
+ isb
+ b arm64_c_environment
+ENDPROC(arm64_cpu_startup)
ENTRY(stage_entry)
- b arm64_el3_startup
+ b arm64_cpu_startup
ENDPROC(stage_entry)