summaryrefslogtreecommitdiff
path: root/src/arch/arm64/include
diff options
context:
space:
mode:
authorAaron Durbin <adurbin@chromium.org>2014-08-27 12:50:26 -0500
committerPatrick Georgi <pgeorgi@google.com>2015-03-27 08:04:05 +0100
commit1c6512962aeaf07dfcb200231cc73bcc64859c7f (patch)
tree0a113e6c8cd658c4ae9785f0693bdf7cc68a2a63 /src/arch/arm64/include
parentce513c9732e23b4ad522ed937876f19912a89a91 (diff)
arm64: refactor stage entry
Provide a common entry point arm64 cores coming out of reset. Also, take into account CONFIG_ARM64_CPUS_START_IN_ELx to set the correct SCTLR_ELx register. The SCR_EL3 initialization was removed as that can be done in policy code in C later. Part of this refactor allows for greater code reuse for the secure monitor. BUG=chrome-os-partner:31545 BRANCH=None TEST=built and booted to linux on ryu Change-Id: I429f8fd0cdae78318ac171722fa1377924665401 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: f92a5a01f07bc370735d75d695aedd8e2ab25608 Original-Change-Id: If16b3f979923ec8add59854db6bad4aaed35e3aa Original-Signed-off-by: Aaron Durbin <adurbin@chromium.org> Original-Reviewed-on: https://chromium-review.googlesource.com/214668 Original-Reviewed-by: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/9012 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
Diffstat (limited to 'src/arch/arm64/include')
-rw-r--r--src/arch/arm64/include/arch/asm.h14
1 files changed, 10 insertions, 4 deletions
diff --git a/src/arch/arm64/include/arch/asm.h b/src/arch/arm64/include/arch/asm.h
index 7760bad850..96c9d13957 100644
--- a/src/arch/arm64/include/arch/asm.h
+++ b/src/arch/arm64/include/arch/asm.h
@@ -20,19 +20,25 @@
#ifndef __ARM_ARM64_ASM_H
#define __ARM_ARM64_ASM_H
-#define ALIGN .align 0
-
#define ENDPROC(name) \
.type name, %function; \
END(name)
-#define ENTRY(name) \
+#define ENTRY_WITH_ALIGN(name, bits) \
.section .text.name, "ax", %progbits; \
.global name; \
- ALIGN; \
+ .align bits; \
name:
+#define ENTRY(name) ENTRY_WITH_ALIGN(name, 0)
+
#define END(name) \
.size name, .-name
+/*
+ * Certain SoCs have an alignment requiremnt for the CPU reset vector.
+ * Align to a 64 byte typical cacheline for now.
+ */
+#define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6)
+
#endif /* __ARM_ARM64_ASM_H */