summaryrefslogtreecommitdiff
path: root/src/cpu/x86
diff options
context:
space:
mode:
authorPatrick Rudolph <patrick.rudolph@9elements.com>2020-08-27 20:50:18 +0200
committerPatrick Georgi <pgeorgi@google.com>2021-07-06 06:09:13 +0000
commitadcf7827bd00757cd52e87693c8bbfbe08ed6b13 (patch)
tree30d823ebe33966e2367e3f4c7aab9a4dae05fffe /src/cpu/x86
parente85e7af6d02cb7d5acc51a10d0a1abbd7980ce12 (diff)
arch/x86: Use ENV_X86_64 instead of _x86_64_
Tested on Intel Sandybridge x86_64 and x86_32. Change-Id: I152483d24af0512c0ee4fbbe8931b7312e487ac6 Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/44867 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Angel Pons <th3fanbus@gmail.com>
Diffstat (limited to 'src/cpu/x86')
-rw-r--r--src/cpu/x86/64bit/entry64.inc2
-rw-r--r--src/cpu/x86/lapic/lapic_cpu_init.c2
-rw-r--r--src/cpu/x86/sipi_vector.S2
-rw-r--r--src/cpu/x86/smm/smm_stub.S4
-rw-r--r--src/cpu/x86/smm/smmhandler.S6
5 files changed, 8 insertions, 8 deletions
diff --git a/src/cpu/x86/64bit/entry64.inc b/src/cpu/x86/64bit/entry64.inc
index 70255173f1..7da68b47f9 100644
--- a/src/cpu/x86/64bit/entry64.inc
+++ b/src/cpu/x86/64bit/entry64.inc
@@ -9,7 +9,7 @@
* Clobbers: eax, ecx, edx
*/
-#if defined(__x86_64__)
+#if ENV_X86_64
.code32
#if (CONFIG_ARCH_X86_64_PGTBL_LOC & 0xfff) > 0
#error pagetables must be 4KiB aligned!
diff --git a/src/cpu/x86/lapic/lapic_cpu_init.c b/src/cpu/x86/lapic/lapic_cpu_init.c
index 295f583b27..e141ad8ec9 100644
--- a/src/cpu/x86/lapic/lapic_cpu_init.c
+++ b/src/cpu/x86/lapic/lapic_cpu_init.c
@@ -38,7 +38,7 @@ static int lowmem_backup_size;
static inline void setup_secondary_gdt(void)
{
u16 *gdt_limit;
-#ifdef __x86_64__
+#if ENV_X86_64
u64 *gdt_base;
#else
u32 *gdt_base;
diff --git a/src/cpu/x86/sipi_vector.S b/src/cpu/x86/sipi_vector.S
index d8156b88a8..aa95461ae8 100644
--- a/src/cpu/x86/sipi_vector.S
+++ b/src/cpu/x86/sipi_vector.S
@@ -214,7 +214,7 @@ load_msr:
mov %eax, %cr4
#endif
-#ifdef __x86_64__
+#if ENV_X86_64
/* entry64.inc preserves ebx. */
#include <cpu/x86/64bit/entry64.inc>
diff --git a/src/cpu/x86/smm/smm_stub.S b/src/cpu/x86/smm/smm_stub.S
index 07be047a36..44ee7cb327 100644
--- a/src/cpu/x86/smm/smm_stub.S
+++ b/src/cpu/x86/smm/smm_stub.S
@@ -185,7 +185,7 @@ apicid_end:
/* Align stack to 16 bytes. Another 32 bytes are pushed below. */
andl $0xfffffff0, %esp
-#ifdef __x86_64__
+#if ENV_X86_64
mov %ecx, %edi
/* Backup IA32_EFER. Preserves ebx. */
movl $(IA32_EFER), %ecx
@@ -204,7 +204,7 @@ apicid_end:
* struct arg = { c_handler_params, cpu_num, smm_runtime, canary };
* c_handler(&arg)
*/
-#ifdef __x86_64__
+#if ENV_X86_64
push %rbx /* uintptr_t *canary */
push %rcx /* size_t cpu */
diff --git a/src/cpu/x86/smm/smmhandler.S b/src/cpu/x86/smm/smmhandler.S
index 3750e5224a..b7805d06ab 100644
--- a/src/cpu/x86/smm/smmhandler.S
+++ b/src/cpu/x86/smm/smmhandler.S
@@ -43,7 +43,7 @@
#define SMM_HANDLER_OFFSET 0x0000
-#if defined(__x86_64__)
+#if ENV_X86_64
.bss
ia32efer_backup_eax:
.long 0
@@ -166,7 +166,7 @@ untampered_lapic:
addl $SMM_STACK_SIZE, %ebx
movl %ebx, %esp
-#if defined(__x86_64__)
+#if ENV_X86_64
/* Backup IA32_EFER. Preserves ebx. */
movl $(IA32_EFER), %ecx
rdmsr
@@ -180,7 +180,7 @@ untampered_lapic:
/* Call C handler */
call smi_handler
-#if defined(__x86_64__)
+#if ENV_X86_64
/*
* The only reason to go back to protected mode is that RSM doesn't restore
* MSR registers and MSR IA32_EFER was modified by entering long mode.