diff options
Diffstat (limited to 'src/cpu/x86')
-rw-r--r-- | src/cpu/x86/64bit/entry64.inc | 12 | ||||
-rw-r--r-- | src/cpu/x86/64bit/mode_switch.S | 5 | ||||
-rw-r--r-- | src/cpu/x86/64bit/mode_switch2.S | 4 | ||||
-rw-r--r-- | src/cpu/x86/sipi_vector.S | 4 | ||||
-rw-r--r-- | src/cpu/x86/smm/smm_stub.S | 3 |
5 files changed, 17 insertions, 11 deletions
diff --git a/src/cpu/x86/64bit/entry64.inc b/src/cpu/x86/64bit/entry64.inc index 7da68b47f9..878f310843 100644 --- a/src/cpu/x86/64bit/entry64.inc +++ b/src/cpu/x86/64bit/entry64.inc @@ -22,10 +22,9 @@ #include <arch/rom_segs.h> #endif - -setup_longmode: +.macro setup_longmode page_table /* Get page table address */ - movl $(CONFIG_ARCH_X86_64_PGTBL_LOC), %eax + movl \page_table, %eax /* load identity mapped page tables */ movl %eax, %cr3 @@ -48,12 +47,13 @@ setup_longmode: /* use long jump to switch to 64-bit code segment */ #if defined(__RAMSTAGE__) - ljmp $RAM_CODE_SEG64, $__longmode_start + ljmp $RAM_CODE_SEG64, $jmp_addr\@ #else - ljmp $ROM_CODE_SEG64, $__longmode_start + ljmp $ROM_CODE_SEG64, $jmp_addr\@ #endif .code64 -__longmode_start: +jmp_addr\@: +.endm #endif diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S index c4198f3dad..01fe003cb1 100644 --- a/src/cpu/x86/64bit/mode_switch.S +++ b/src/cpu/x86/64bit/mode_switch.S @@ -1,4 +1,5 @@ /* SPDX-License-Identifier: GPL-2.0-only */ +#include <cpu/x86/64bit/entry64.inc> /* Calls a x86_32 function from x86_64 context */ .text @@ -42,8 +43,8 @@ protected_mode_call_wrapper: call *%ebx movl %eax, %ebx - /* Jump to long mode. Preserves ebx */ - #include <cpu/x86/64bit/entry64.inc> + /* Preserves ebx */ + setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC) /* Place return value in rax */ movl %ebx, %eax diff --git a/src/cpu/x86/64bit/mode_switch2.S b/src/cpu/x86/64bit/mode_switch2.S index 65e9d94a98..1807d2e404 100644 --- a/src/cpu/x86/64bit/mode_switch2.S +++ b/src/cpu/x86/64bit/mode_switch2.S @@ -4,6 +4,8 @@ * Must not be directly invoked from C code! */ +#include <cpu/x86/64bit/entry64.inc> + .text .code32 .section ".text.long_mode_call_3arg", "ax", @progbits @@ -19,7 +21,7 @@ long_mode_call_3arg: mov %esp, %ebp /* Enter long mode, preserves ebx */ - #include <cpu/x86/64bit/entry64.inc> + setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC) /* Align stack */ movabs $0xfffffffffffffff0, %rax diff --git a/src/cpu/x86/sipi_vector.S b/src/cpu/x86/sipi_vector.S index 39973dbc8b..923e3987d0 100644 --- a/src/cpu/x86/sipi_vector.S +++ b/src/cpu/x86/sipi_vector.S @@ -6,6 +6,7 @@ #include <arch/ram_segs.h> #define __RAMSTAGE__ +#include <cpu/x86/64bit/entry64.inc> /* The SIPI vector is responsible for initializing the APs in the system. It * loads microcode, sets up MSRs, and enables caching before calling into @@ -223,7 +224,8 @@ load_msr: #if ENV_X86_64 /* entry64.inc preserves ebx, esi, edi, ebp */ -#include <cpu/x86/64bit/entry64.inc> + setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC) + movabs c_handler, %eax call *%rax #else diff --git a/src/cpu/x86/smm/smm_stub.S b/src/cpu/x86/smm/smm_stub.S index f97ab59cd9..9f1f21d974 100644 --- a/src/cpu/x86/smm/smm_stub.S +++ b/src/cpu/x86/smm/smm_stub.S @@ -12,6 +12,7 @@ #include <cpu/x86/cr.h> #include <cpu/x86/msr.h> #include <cpu/x86/lapic_def.h> +#include <cpu/x86/64bit/entry64.inc> .code32 .section ".module_parameters", "aw", @progbits @@ -195,7 +196,7 @@ align_stack: #if ENV_X86_64 mov %ecx, %edi /* entry64.inc preserves ebx, esi, edi, ebp */ -#include <cpu/x86/64bit/entry64.inc> + setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC) mov %edi, %ecx |