diff options
author | Patrick Rudolph <patrick.rudolph@9elements.com> | 2020-11-30 15:56:59 +0100 |
---|---|---|
committer | Patrick Georgi <pgeorgi@google.com> | 2020-12-05 08:19:17 +0000 |
commit | 7a359497cd83babdc5d3244a390cb775412105e6 (patch) | |
tree | ab346dffaa621ca95713f70b671126989aebc62e /src/cpu/x86/64bit | |
parent | 22b42a87de7cbc680857e512bcd0651a47b9fbdb (diff) |
cpu/x86/64bit: Add code to call function in protected mode
This adds a helper function for long mode to call some code in protected
mode and return back to long mode.
The primary use case is to run binaries that have been compiled for
protected mode, like the FSP or MRC binaries.
Tested on Intel Skylake. The FSP-M runs and returns without error while
coreboot runs in long mode.
Change-Id: I22af2d224b546c0be9e7295330b4b6602df106d6
Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com>
Reviewed-on: https://review.coreboot.org/c/coreboot/+/48175
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Arthur Heymans <arthur@aheymans.xyz>
Diffstat (limited to 'src/cpu/x86/64bit')
-rw-r--r-- | src/cpu/x86/64bit/mode_switch.S | 70 |
1 files changed, 70 insertions, 0 deletions
diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S new file mode 100644 index 0000000000..eea104bcf3 --- /dev/null +++ b/src/cpu/x86/64bit/mode_switch.S @@ -0,0 +1,70 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ + +.text +.code64 + .section ".text.protected_mode_call", "ax", @progbits + .globl protected_mode_call_narg +protected_mode_call_narg: + + push %rbp + mov %rsp, %rbp + /* Preserve registers */ + push %rbx + push %r12 + push %r13 + push %r14 + push %r15 + + /* Arguments to stack */ + push %rdi + push %rsi + push %rdx + push %rcx + + #include <cpu/x86/64bit/exit32.inc> + + movl -48(%ebp), %eax /* Argument count */ + movl -64(%ebp), %edx /* Argument 0 */ + movl -72(%ebp), %ecx /* Argument 1 */ + + /* Align the stack */ + andl $0xFFFFFFF0, %esp + test %eax, %eax + je 1f /* Zero arguments */ + + subl $1, %eax + test %eax, %eax + je 2f /* One argument */ + + /* Two arguments */ + subl $8, %esp + pushl %ecx /* Argument 1 */ + pushl %edx /* Argument 0 */ + jmp 1f +2: + subl $12, %esp + pushl %edx /* Argument 0 */ + +1: + movl -56(%ebp), %ebx /* Function to call */ + call *%ebx + movl %eax, %ebx + + /* Preserves ebx */ + #include <cpu/x86/64bit/entry64.inc> + + /* Place return value in rax */ + movl %ebx, %eax + + /* Restore registers */ + mov -40(%rbp), %r15 + mov -32(%rbp), %r14 + mov -24(%rbp), %r13 + mov -16(%rbp), %r12 + mov -8(%rbp), %rbx + + /* Restore stack pointer */ + mov %rbp, %rsp + pop %rbp + + ret |