summaryrefslogtreecommitdiff
path: root/src/arch/x86
diff options
context:
space:
mode:
Diffstat (limited to 'src/arch/x86')
-rw-r--r--src/arch/x86/coreboot_ram.ld14
-rw-r--r--src/arch/x86/lib/c_start.S36
2 files changed, 24 insertions, 26 deletions
diff --git a/src/arch/x86/coreboot_ram.ld b/src/arch/x86/coreboot_ram.ld
index 7ce0367cf1..d9f09090d9 100644
--- a/src/arch/x86/coreboot_ram.ld
+++ b/src/arch/x86/coreboot_ram.ld
@@ -92,20 +92,6 @@ SECTIONS
*(COMMON)
}
_ebss = .;
- _end = .;
-
- /* coreboot really "ends" here. Only heap and stack are placed after
- * this line.
- */
-
- . = ALIGN(CONFIG_STACK_SIZE);
-
- _stack = .;
- .stack . : {
- /* Reserve a stack for each possible cpu */
- . += CONFIG_MAX_CPUS*CONFIG_STACK_SIZE;
- }
- _estack = .;
_heap = .;
.heap . : {
diff --git a/src/arch/x86/lib/c_start.S b/src/arch/x86/lib/c_start.S
index 9a8b4ac3c2..295283b24a 100644
--- a/src/arch/x86/lib/c_start.S
+++ b/src/arch/x86/lib/c_start.S
@@ -1,5 +1,16 @@
#include <cpu/x86/post_code.h>
+/* Place the stack in the bss section. It's not necessary to define it in the
+ * the linker script. */
+ .section .bss, "aw", @nobits
+.global _stack
+.global _estack
+
+.align CONFIG_STACK_SIZE
+_stack:
+.space CONFIG_MAX_CPUS*CONFIG_STACK_SIZE
+_estack:
+
.section ".textfirst"
.code32
.globl _start
@@ -16,19 +27,7 @@ _start:
post_code(POST_ENTRY_C_START) /* post 13 */
- /** poison the stack. Code should not count on the
- * stack being full of zeros. This stack poisoning
- * recently uncovered a bug in the broadcast SIPI
- * code.
- */
cld
- leal _stack, %edi
- movl $_estack, %ecx
- subl %edi, %ecx
- shrl $2, %ecx /* it is 32 bit aligned, right? */
- movl $0xDEADBEEF, %eax
- rep
- stosl
/** clear bss, which unlike the stack is zero by definition */
leal _bss, %edi
@@ -41,6 +40,19 @@ _start:
stosl
.Lnobss:
+ /** poison the stack. Code should not count on the
+ * stack being full of zeros. This stack poisoning
+ * recently uncovered a bug in the broadcast SIPI
+ * code.
+ */
+ leal _stack, %edi
+ movl $_estack, %ecx
+ subl %edi, %ecx
+ shrl $2, %ecx /* it is 32 bit aligned, right? */
+ movl $0xDEADBEEF, %eax
+ rep
+ stosl
+
/* set new stack */
movl $_estack, %esp