1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
|
/* SPDX-License-Identifier: GPL-2.0-only */
#include <cpu/x86/64bit/entry64.inc>
/* Calls a x86_32 function from x86_64 context */
.text
.code64
.section ".text.protected_mode_call", "ax", @progbits
.globl protected_mode_call_wrapper
protected_mode_call_wrapper:
/* Preserve registers */
push %rbp
push %rbx
push %r12
push %r13
push %r14
push %r15
/* Backup gs to stack */
movl %gs, %eax
push %rax
/* Store stack pointer */
mov %rsp, %rbp
/* Align stack and make space for arguments */
movabs $0xfffffffffffffff0, %rax
andq %rax, %rsp
sub $16, %rsp
/* Arguments to stack */
movl %edi, 12(%rsp)
movl %esi, 0(%rsp)
movl %edx, 4(%rsp)
movl %ecx, 8(%rsp)
/* Drop to protected mode */
#include <cpu/x86/64bit/exit32.inc>
/* Fetch function to call */
movl 12(%esp), %ebx
/* Call function */
call *%ebx
movl %eax, %ebx
/* Preserves ebx */
setup_longmode $PM4LE
/* Place return value in rax */
movl %ebx, %eax
/* Restore stack pointer */
mov %rbp, %rsp
/* Restore registers */
pop %rbx
movl %ebx, %gs
pop %r15
pop %r14
pop %r13
pop %r12
pop %rbx
pop %rbp
ret
|