summaryrefslogtreecommitdiff
path: root/src/cpu
diff options
context:
space:
mode:
authorPatrick Rudolph <patrick.rudolph@9elements.com>2023-12-28 07:44:26 +0100
committerLean Sheng Tan <sheng.tan@9elements.com>2024-01-03 00:38:27 +0000
commitb4283a4fbbcc8702afa4d992789ca458a7df923a (patch)
treec41be09ed596948596bfeda123554321c5c3fd3f /src/cpu
parentb14b96d29a5ed196b2205a1bbd239c23d684fa47 (diff)
cpu/x86/64bit/mode_switch: Simplify assembly code
Drop the first argument specifying the number of arguments pushed to the stack. Instead always push the 3 arguments to stack and use the first one as function pointer to call while in protected mode. While on it add more comments and simplify register restore code. Tested: - On qemu can call x86_32 function and pass argument and return value. - Booted Lenovo X220 in x86_64 mode using x86_32 MRC. Change-Id: I30809453a1800ba3c0df60acd7eca778841c520f Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/79752 Reviewed-by: Arthur Heymans <arthur@aheymans.xyz> Reviewed-by: Jérémy Compostella <jeremy.compostella@intel.com> Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Diffstat (limited to 'src/cpu')
-rw-r--r--src/cpu/x86/64bit/mode_switch.S72
1 files changed, 30 insertions, 42 deletions
diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S
index c27f540ba3..f9f784e8c4 100644
--- a/src/cpu/x86/64bit/mode_switch.S
+++ b/src/cpu/x86/64bit/mode_switch.S
@@ -1,14 +1,13 @@
/* SPDX-License-Identifier: GPL-2.0-only */
+/* Calls a x86_32 function from x86_64 context */
.text
.code64
.section ".text.protected_mode_call", "ax", @progbits
- .globl protected_mode_call_narg
-protected_mode_call_narg:
-
- push %rbp
- mov %rsp, %rbp
+ .globl protected_mode_call_3arg
+protected_mode_call_3arg:
/* Preserve registers */
+ push %rbp
push %rbx
push %r12
push %r13
@@ -19,58 +18,47 @@ protected_mode_call_narg:
movl %gs, %eax
push %rax
- /* Arguments to stack */
- push %rdi
- push %rsi
- push %rdx
- push %rcx
-
- #include <cpu/x86/64bit/exit32.inc>
+ /* Store stack pointer */
+ mov %rsp, %rbp
- movl -56(%ebp), %eax /* Argument count */
- movl -72(%ebp), %edx /* Argument 0 */
- movl -80(%ebp), %ecx /* Argument 1 */
+ /* Align stack and make space for arguments */
+ movabs $0xfffffffffffffff0, %rax
+ andq %rax, %rsp
+ sub $16, %rsp
- /* Align the stack */
- andl $0xFFFFFFF0, %esp
- test %eax, %eax
- je 1f /* Zero arguments */
+ /* Arguments to stack */
+ movl %edi, 12(%rsp)
+ movl %esi, 0(%rsp)
+ movl %edx, 4(%rsp)
+ movl %ecx, 8(%rsp)
- subl $1, %eax
- test %eax, %eax
- je 2f /* One argument */
+ /* Drop to protected mode */
+ #include <cpu/x86/64bit/exit32.inc>
- /* Two arguments */
- subl $8, %esp
- pushl %ecx /* Argument 1 */
- pushl %edx /* Argument 0 */
- jmp 1f
-2:
- subl $12, %esp
- pushl %edx /* Argument 0 */
+ /* Fetch function to call */
+ movl 12(%esp), %ebx
-1:
- movl -64(%ebp), %ebx /* Function to call */
+ /* Call function */
call *%ebx
movl %eax, %ebx
- /* Preserves ebx */
+ /* Jump to long mode. Preserves ebx */
#include <cpu/x86/64bit/entry64.inc>
/* Place return value in rax */
movl %ebx, %eax
- /* Restore registers */
- mov -48(%rbp), %rbx
- movl %ebx, %gs
- mov -40(%rbp), %r15
- mov -32(%rbp), %r14
- mov -24(%rbp), %r13
- mov -16(%rbp), %r12
- mov -8(%rbp), %rbx
-
/* Restore stack pointer */
mov %rbp, %rsp
+
+ /* Restore registers */
+ pop %rbx
+ movl %ebx, %gs
+ pop %r15
+ pop %r14
+ pop %r13
+ pop %r12
+ pop %rbx
pop %rbp
ret