summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/arch/x86/bootblock.ld2
-rw-r--r--src/cpu/x86/16bit/entry16.inc13
-rw-r--r--src/cpu/x86/32bit/entry32.inc20
-rw-r--r--src/soc/amd/common/block/cpu/noncar/memlayout_x86.ld2
4 files changed, 6 insertions, 31 deletions
diff --git a/src/arch/x86/bootblock.ld b/src/arch/x86/bootblock.ld
index 12f932c2dc..849addd2e2 100644
--- a/src/arch/x86/bootblock.ld
+++ b/src/arch/x86/bootblock.ld
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: GPL-2.0-only */
-gdtptr16_offset = gdtptr16 & 0xffff;
+gdtptr_offset = gdtptr & 0xffff;
nullidt_offset = nullidt & 0xffff;
/* Symbol _start16bit must be aligned to 4kB to start AP CPUs with
diff --git a/src/cpu/x86/16bit/entry16.inc b/src/cpu/x86/16bit/entry16.inc
index 13d12beb66..2665cc69ae 100644
--- a/src/cpu/x86/16bit/entry16.inc
+++ b/src/cpu/x86/16bit/entry16.inc
@@ -108,7 +108,7 @@ _start16bit:
movw $nullidt_offset, %bx
subw %ax, %bx
lidt %cs:(%bx)
- movw $gdtptr16_offset, %bx
+ movw $gdtptr_offset, %bx
subw %ax, %bx
lgdtl %cs:(%bx)
@@ -127,11 +127,8 @@ _start16bit:
* The gdt is defined in entry32.inc, it has a 4 Gb code segment
* at 0x08, and a 4 GB data segment at 0x10;
*/
-.align 4
-.globl gdtptr16
-gdtptr16:
- .word gdt_end - gdt -1 /* compute the table limit */
- .long gdt /* we know the offset */
+__gdtptr:
+ .long gdtptr
.align 4
.globl nullidt
@@ -139,7 +136,3 @@ nullidt:
.word 0 /* limit */
.long 0
.word 0
-
-.globl _estart16bit
-_estart16bit:
- .code32
diff --git a/src/cpu/x86/32bit/entry32.inc b/src/cpu/x86/32bit/entry32.inc
index be67b534cb..85094483e5 100644
--- a/src/cpu/x86/32bit/entry32.inc
+++ b/src/cpu/x86/32bit/entry32.inc
@@ -8,29 +8,11 @@
.code32
/*
- * When we come here we are in protected mode. We expand
- * the stack and copies the data segment from ROM to the
- * memory.
- *
- * After that, we call the chipset bootstrap routine that
- * does what is left of the chipset initialization.
- *
+ * When we come here we are in protected mode.
* NOTE aligned to 4 so that we are sure that the prefetch
* cache will be reloaded.
- *
- * In the bootblock there is already a ljmp to __protected_start and
- * the reset vector jumps to symbol _start16bit in entry16.inc from
- * the reset vectors's symbol which is _start. Therefore, don't
- * expose the _start symbol for bootblock.
*/
.align 4
-#if !ENV_BOOTBLOCK
-.globl _start
-_start:
-#endif
-
- lgdt %cs:gdtptr
- ljmp $ROM_CODE_SEG, $__protected_start
__protected_start:
/* Save the BIST value */
diff --git a/src/soc/amd/common/block/cpu/noncar/memlayout_x86.ld b/src/soc/amd/common/block/cpu/noncar/memlayout_x86.ld
index f702b4b201..352472e18b 100644
--- a/src/soc/amd/common/block/cpu/noncar/memlayout_x86.ld
+++ b/src/soc/amd/common/block/cpu/noncar/memlayout_x86.ld
@@ -102,7 +102,7 @@ SECTIONS
#if ENV_BOOTBLOCK
-gdtptr16_offset = gdtptr16 & 0xffff;
+gdtptr_offset = gdtptr & 0xffff;
nullidt_offset = nullidt & 0xffff;
SECTIONS {