summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/arch/x86/Makefile.inc2
-rw-r--r--src/cpu/x86/32bit/entry32.inc33
-rw-r--r--src/cpu/x86/Makefile.inc1
-rw-r--r--src/cpu/x86/entry32.S (renamed from src/arch/x86/bootblock_crt0.S)32
4 files changed, 30 insertions, 38 deletions
diff --git a/src/arch/x86/Makefile.inc b/src/arch/x86/Makefile.inc
index cfefec605f..7dea2ce906 100644
--- a/src/arch/x86/Makefile.inc
+++ b/src/arch/x86/Makefile.inc
@@ -85,8 +85,6 @@ bootblock-$(CONFIG_HAVE_CF9_RESET) += cf9_reset.c
$(call src-to-obj,bootblock,$(dir)/id.S): $(obj)/build.h
-bootblock-y += bootblock_crt0.S
-
ifeq ($(CONFIG_ARCH_BOOTBLOCK_X86_32),y)
$(eval $(call early_x86_stage,bootblock,elf32-i386))
else
diff --git a/src/cpu/x86/32bit/entry32.inc b/src/cpu/x86/32bit/entry32.inc
deleted file mode 100644
index b28fa2f37e..0000000000
--- a/src/cpu/x86/32bit/entry32.inc
+++ /dev/null
@@ -1,33 +0,0 @@
-/* SPDX-License-Identifier: GPL-2.0-only */
-
-/* For starting coreboot in protected mode */
-
-#include <arch/rom_segs.h>
-#include <cpu/x86/post_code.h>
-
- .code32
-/*
- * When we come here we are in protected mode.
- * NOTE aligned to 4 so that we are sure that the prefetch
- * cache will be reloaded.
- */
- .align 4
-
-.globl __protected_start
-__protected_start:
- /* Save the BIST value */
- movl %eax, %ebp
-
-#if !CONFIG(NO_EARLY_BOOTBLOCK_POSTCODES)
- post_code(POST_ENTER_PROTECTED_MODE)
-#endif
-
- movw $ROM_DATA_SEG, %ax
- movw %ax, %ds
- movw %ax, %es
- movw %ax, %ss
- movw %ax, %fs
- movw %ax, %gs
-
- /* Restore the BIST value to %eax */
- movl %ebp, %eax
diff --git a/src/cpu/x86/Makefile.inc b/src/cpu/x86/Makefile.inc
index 9112ddbe1b..f1d41bd1b4 100644
--- a/src/cpu/x86/Makefile.inc
+++ b/src/cpu/x86/Makefile.inc
@@ -8,6 +8,7 @@ ramstage-y += backup_default_smm.c
subdirs-$(CONFIG_CPU_INTEL_COMMON_SMM) += ../intel/smm
+bootblock-y += entry32.S
bootblock-y += entry16.S
bootblock-y += reset16.S
diff --git a/src/arch/x86/bootblock_crt0.S b/src/cpu/x86/entry32.S
index 16daed1b54..32f61ad261 100644
--- a/src/arch/x86/bootblock_crt0.S
+++ b/src/cpu/x86/entry32.S
@@ -1,4 +1,7 @@
/* SPDX-License-Identifier: GPL-2.0-or-later */
+
+/* For starting coreboot in protected mode */
+
/*
* This is the modern bootblock. It prepares the system for C environment runtime
* setup. The actual setup is done by hardware-specific code.
@@ -8,15 +11,38 @@
*
*/
+#include <arch/rom_segs.h>
#include <cpu/x86/cr.h>
+#include <cpu/x86/post_code.h>
.section .init, "ax", @progbits
+ .code32
/*
- * Include the old code for reset vector and protected mode entry. That code has
- * withstood the test of time.
+ * When we come here we are in protected mode.
+ * NOTE aligned to 4 so that we are sure that the prefetch
+ * cache will be reloaded.
*/
-#include <cpu/x86/32bit/entry32.inc>
+ .align 4
+
+.globl __protected_start
+__protected_start:
+ /* Save the BIST value */
+ movl %eax, %ebp
+
+#if !CONFIG(NO_EARLY_BOOTBLOCK_POSTCODES)
+ post_code(POST_ENTER_PROTECTED_MODE)
+#endif
+
+ movw $ROM_DATA_SEG, %ax
+ movw %ax, %ds
+ movw %ax, %es
+ movw %ax, %ss
+ movw %ax, %fs
+ movw %ax, %gs
+
+ /* Restore the BIST value to %eax */
+ movl %ebp, %eax
#if CONFIG(BOOTBLOCK_DEBUG_SPINLOOP)