From 1c6512962aeaf07dfcb200231cc73bcc64859c7f Mon Sep 17 00:00:00 2001 From: Aaron Durbin Date: Wed, 27 Aug 2014 12:50:26 -0500 Subject: arm64: refactor stage entry Provide a common entry point arm64 cores coming out of reset. Also, take into account CONFIG_ARM64_CPUS_START_IN_ELx to set the correct SCTLR_ELx register. The SCR_EL3 initialization was removed as that can be done in policy code in C later. Part of this refactor allows for greater code reuse for the secure monitor. BUG=chrome-os-partner:31545 BRANCH=None TEST=built and booted to linux on ryu Change-Id: I429f8fd0cdae78318ac171722fa1377924665401 Signed-off-by: Patrick Georgi Original-Commit-Id: f92a5a01f07bc370735d75d695aedd8e2ab25608 Original-Change-Id: If16b3f979923ec8add59854db6bad4aaed35e3aa Original-Signed-off-by: Aaron Durbin Original-Reviewed-on: https://chromium-review.googlesource.com/214668 Original-Reviewed-by: Furquan Shaikh Reviewed-on: http://review.coreboot.org/9012 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer --- src/arch/arm64/include/arch/asm.h | 14 +++++++--- src/arch/arm64/stage_entry.S | 57 ++++++++++++++++++++------------------- 2 files changed, 39 insertions(+), 32 deletions(-) diff --git a/src/arch/arm64/include/arch/asm.h b/src/arch/arm64/include/arch/asm.h index 7760bad850..96c9d13957 100644 --- a/src/arch/arm64/include/arch/asm.h +++ b/src/arch/arm64/include/arch/asm.h @@ -20,19 +20,25 @@ #ifndef __ARM_ARM64_ASM_H #define __ARM_ARM64_ASM_H -#define ALIGN .align 0 - #define ENDPROC(name) \ .type name, %function; \ END(name) -#define ENTRY(name) \ +#define ENTRY_WITH_ALIGN(name, bits) \ .section .text.name, "ax", %progbits; \ .global name; \ - ALIGN; \ + .align bits; \ name: +#define ENTRY(name) ENTRY_WITH_ALIGN(name, 0) + #define END(name) \ .size name, .-name +/* + * Certain SoCs have an alignment requiremnt for the CPU reset vector. + * Align to a 64 byte typical cacheline for now. + */ +#define CPU_RESET_ENTRY(name) ENTRY_WITH_ALIGN(name, 6) + #endif /* __ARM_ARM64_ASM_H */ diff --git a/src/arch/arm64/stage_entry.S b/src/arch/arm64/stage_entry.S index 4c26f65707..2d4b26dac9 100644 --- a/src/arch/arm64/stage_entry.S +++ b/src/arch/arm64/stage_entry.S @@ -20,6 +20,16 @@ #include +#if CONFIG_ARM64_CPUS_START_IN_EL3 +#define SCTLR_ELx sctlr_el3 +#elif CONFIG_ARM64_CPUS_START_IN_EL2 +#define SCTLR_ELx sctlr_el2 +#elif CONFIG_ARM64_CPUS_START_IN_EL1 +#define SCTLR_ELx sctlr_el1 +#else +#error Need to know what ELx processor starts up in. +#endif + ENTRY(seed_stack) /* * Initialize the stack to a known value. This is used to check for @@ -43,19 +53,12 @@ load_stack: .quad _stack ENDPROC(seed_stack) -ENTRY(arm64_el3_startup) - /* Set all bits in SCTLR_EL3 to 0 except RES1 and RES0 */ - mrs x0, SCTLR_EL3 - ldr x1, .SCTLR_MASK - and x0, x0, x1 - msr SCTLR_EL3, x0 - /* Set all bits in SCR_EL3 to 0 except RES1 and RES0 */ - mrs x0, SCR_EL3 - ldr x1, .SCR_MASK - and x0, x0, x1 - msr SCR_EL3, x0 - - /* Initialize SP_EL3 as exception stack */ +/* + * Boot strap the processor into a C environemnt. That consists of providing + * 16-byte aligned stack. The programming enviroment uses SP_EL0 as its main + * stack while keeping SP_ELx reserved for exception entry. + */ +ENTRY(arm64_c_environment) ldr x0, .exception_stack_top cmp x0, #0 b.eq 2f @@ -80,27 +83,25 @@ ENTRY(arm64_el3_startup) br x1 .align 4 - .SCTLR_MASK: - .quad 0x0FFFFEFF0 - - .SCR_MASK: - .quad 0x0FFFFC070 - .align 4 - /* - * By default branch to main() and initialize the stack according - * to the Kconfig option for cpu0. However, this code can be relocated - * and reused to start up secondary cpus. - */ .exception_stack_top: .quad CONFIG_EXCEPTION_STACK_TOP .stack_top: .quad _estack .entry: .quad seed_stack -ENDPROC(arm64_el3_startup) -.global arm64_el3_startup_end -arm64_el3_startup_end: +ENDPROC(arm64_c_environment) + +CPU_RESET_ENTRY(arm64_cpu_startup) + mrs x0, SCTLR_ELx + bic x0, x0, #(1 << 25) /* Little Endian */ + bic x0, x0, #(1 << 19) /* XN not enforced */ + bic x0, x0, #(1 << 12) /* Disable Instruction Cache */ + bic x0, x0, #0xf /* Clear SA, C, A, and M */ + msr SCTLR_ELx, x0 + isb + b arm64_c_environment +ENDPROC(arm64_cpu_startup) ENTRY(stage_entry) - b arm64_el3_startup + b arm64_cpu_startup ENDPROC(stage_entry) -- cgit v1.2.3