From 47d58e5df0a5d1a96e64408c3c9384024d5ddaa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ky=C3=B6sti=20M=C3=A4lkki?= Date: Sun, 22 Nov 2020 00:55:31 +0200 Subject: cpu/x86: Link entry32.inc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: Ib475f40f950f8cc54f0e3c50a80970ba3d2b628f Signed-off-by: Kyösti Mälkki Reviewed-on: https://review.coreboot.org/c/coreboot/+/47969 Tested-by: build bot (Jenkins) Reviewed-by: Raul Rangel Reviewed-by: Arthur Heymans --- src/cpu/x86/32bit/entry32.inc | 33 ------------------ src/cpu/x86/Makefile.inc | 1 + src/cpu/x86/entry32.S | 79 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 80 insertions(+), 33 deletions(-) delete mode 100644 src/cpu/x86/32bit/entry32.inc create mode 100644 src/cpu/x86/entry32.S (limited to 'src/cpu') diff --git a/src/cpu/x86/32bit/entry32.inc b/src/cpu/x86/32bit/entry32.inc deleted file mode 100644 index b28fa2f37e..0000000000 --- a/src/cpu/x86/32bit/entry32.inc +++ /dev/null @@ -1,33 +0,0 @@ -/* SPDX-License-Identifier: GPL-2.0-only */ - -/* For starting coreboot in protected mode */ - -#include -#include - - .code32 -/* - * When we come here we are in protected mode. - * NOTE aligned to 4 so that we are sure that the prefetch - * cache will be reloaded. - */ - .align 4 - -.globl __protected_start -__protected_start: - /* Save the BIST value */ - movl %eax, %ebp - -#if !CONFIG(NO_EARLY_BOOTBLOCK_POSTCODES) - post_code(POST_ENTER_PROTECTED_MODE) -#endif - - movw $ROM_DATA_SEG, %ax - movw %ax, %ds - movw %ax, %es - movw %ax, %ss - movw %ax, %fs - movw %ax, %gs - - /* Restore the BIST value to %eax */ - movl %ebp, %eax diff --git a/src/cpu/x86/Makefile.inc b/src/cpu/x86/Makefile.inc index 9112ddbe1b..f1d41bd1b4 100644 --- a/src/cpu/x86/Makefile.inc +++ b/src/cpu/x86/Makefile.inc @@ -8,6 +8,7 @@ ramstage-y += backup_default_smm.c subdirs-$(CONFIG_CPU_INTEL_COMMON_SMM) += ../intel/smm +bootblock-y += entry32.S bootblock-y += entry16.S bootblock-y += reset16.S diff --git a/src/cpu/x86/entry32.S b/src/cpu/x86/entry32.S new file mode 100644 index 0000000000..32f61ad261 --- /dev/null +++ b/src/cpu/x86/entry32.S @@ -0,0 +1,79 @@ +/* SPDX-License-Identifier: GPL-2.0-or-later */ + +/* For starting coreboot in protected mode */ + +/* + * This is the modern bootblock. It prepares the system for C environment runtime + * setup. The actual setup is done by hardware-specific code. + * + * It provides a bootflow similar to other architectures, and thus is considered + * to be the modern approach. + * + */ + +#include +#include +#include + +.section .init, "ax", @progbits + + .code32 +/* + * When we come here we are in protected mode. + * NOTE aligned to 4 so that we are sure that the prefetch + * cache will be reloaded. + */ + .align 4 + +.globl __protected_start +__protected_start: + /* Save the BIST value */ + movl %eax, %ebp + +#if !CONFIG(NO_EARLY_BOOTBLOCK_POSTCODES) + post_code(POST_ENTER_PROTECTED_MODE) +#endif + + movw $ROM_DATA_SEG, %ax + movw %ax, %ds + movw %ax, %es + movw %ax, %ss + movw %ax, %fs + movw %ax, %gs + + /* Restore the BIST value to %eax */ + movl %ebp, %eax + +#if CONFIG(BOOTBLOCK_DEBUG_SPINLOOP) + + /* Wait for a JTAG debugger to break in and set EBX non-zero */ + xor %ebx, %ebx + +debug_spinloop: + cmp $0, %ebx + jz debug_spinloop +#endif + +bootblock_protected_mode_entry: + +#if !CONFIG(USE_MARCH_586) + /* MMX registers required here */ + + /* BIST result in eax */ + movd %eax, %mm0 + + /* Get an early timestamp */ + rdtsc + movd %eax, %mm1 + movd %edx, %mm2 +#endif + +#if CONFIG(SSE) +enable_sse: + mov %cr4, %eax + or $CR4_OSFXSR, %ax + mov %eax, %cr4 +#endif /* CONFIG(SSE) */ + + /* We're done. Now it's up to platform-specific code */ + jmp bootblock_pre_c_entry -- cgit v1.2.3