aboutsummaryrefslogtreecommitdiff
path: root/src/arch/arm64/stage_entry.S
blob: 7aeabe6f2aaffe28e6a9cf1f56fa49264b5f1b90 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
/*
 * This file is part of the coreboot project.
 *
 * Copyright 2014 Google Inc.
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; version 2 of the License.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 */

/*
 * ======================== stage_entry.S =====================================
 * This file acts as an entry point to the different stages of arm64. They share
 * the same process of setting up stacks and jumping to c code. It is important
 * to save x25 from corruption as it contains the argument for rmodule.
 * =============================================================================
 */

#include <arch/asm.h>
#define __ASSEMBLY__
#include <arch/lib_helpers.h>

#define STACK_SZ CONFIG_STACK_SIZE
#define EXCEPTION_STACK_SZ CONFIG_STACK_SIZE

/*
 * Stack for armv8 CPU grows down from _estack. Additionally, provide exception
 * stack for the CPU.
 */
.section .bss, "aw", @nobits

.global _stack
.global _estack
.balign STACK_SZ
_stack:
.space STACK_SZ
_estack:

.global _stack_exceptions
.global _estack_exceptions
.balign EXCEPTION_STACK_SZ
_stack_exceptions:
.space EXCEPTION_STACK_SZ
_estack_exceptions:

ENTRY(cpu_get_stack)
	ldr	x0, 1f
	ret
.align 3
1:
	.quad	_estack
ENDPROC(cpu_get_stack)

ENTRY(cpu_get_exception_stack)
	ldr	x0, 1f
	ret
.align 3
1:
	.quad	_estack_exceptions
ENDPROC(cpu_get_exception_stack)

/*
 * Boot strap the processor into a C environemnt. That consists of providing
 * 16-byte aligned stack. The programming enviroment uses SP_EL0 as its main
 * stack while keeping SP_ELx reserved for exception entry.
 */
/*
 * IMPORTANT: Ensure x25 is not corrupted because it saves the argument to
 * any rmodules.
 */
ENTRY(arm64_c_environment)
	/* Set the exception stack for the CPU. */
	bl	cpu_get_exception_stack
	msr	SPSel, #1
	isb
	mov	sp, x0

	/* Have stack pointer use SP_EL0. */
	msr	SPSel, #0
	isb

	/* Set the non-exception stack for the CPU. */
	bl	cpu_get_stack
	mov	sp, x0

	/* Get entry point by dereferencing c_entry. */
	ldr	x1, 1f
	/* Move back the arguments from x25 to x0 */
	mov     x0, x25
	br	x1
.align 3
	1:
	.quad	c_entry
ENDPROC(arm64_c_environment)

ENTRY(_start)
	/* Save any arguments to current rmodule in x25 */
	mov	x25, x0
	b	arm64_c_environment
ENDPROC(_start)

/*
 * Setup SCTLR so that:
 * Little endian mode is setup, XN is not enforced, MMU and caches are disabled.
 * Alignment and stack alignment checks are disabled.
 */
.macro setup_sctlr
	read_current x0, sctlr
	bic	x0, x0, #(1 << 25)	/* Little Endian */
	bic	x0, x0, #(1 << 19)	/* XN not enforced */
	bic	x0, x0, #(1 << 12)	/* Disable Instruction Cache */
	bic	x0, x0, #0xf		/* Clear SA, C, A and M */
	write_current sctlr, x0, x1
.endm

CPU_RESET_ENTRY(arm64_cpu_startup)
	bl arm64_cpu_early_setup
	setup_sctlr
	b	arm64_c_environment
ENDPROC(arm64_cpu_startup)

/*
 * stage_entry is defined as a weak symbol to allow SoCs/CPUs to define a custom
 * entry point to perform any fixups that need to be done immediately after
 * power on reset. In case SoC/CPU does not need any custom-defined entrypoint,
 * this weak symbol can be used to jump directly to arm64_cpu_startup.
 */
ENTRY_WEAK(stage_entry)
	b	arm64_cpu_startup
ENDPROC(stage_entry)