From 9839cbd53fdcfcee52c406d9f52af924192e618d Mon Sep 17 00:00:00 2001 From: Stefan Reinauer Date: Wed, 21 Apr 2010 20:06:10 +0000 Subject: * clean up all but two warnings on artecgroup dbe61 * integrate vsm init into normal x86.c code (so it can run above 1M) * call void main(unsigned long bist) except void cache_as_ram_main(void) on Geode LX (as we do on almost all other platforms now) * Unify Geode LX MSR setup (will bring most non-working LX targets back to life) Signed-off-by: Stefan Reinauer Acked-by: Patrick Georgi git-svn-id: svn://svn.coreboot.org/coreboot/trunk@5471 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1 --- src/devices/oprom/x86_asm.S | 99 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) (limited to 'src/devices/oprom/x86_asm.S') diff --git a/src/devices/oprom/x86_asm.S b/src/devices/oprom/x86_asm.S index 616aa8675f..194b4cb64c 100644 --- a/src/devices/oprom/x86_asm.S +++ b/src/devices/oprom/x86_asm.S @@ -155,6 +155,105 @@ __run_optionrom = RELOCATED(.) popal ret +#if defined(CONFIG_GEODE_VSA) && CONFIG_GEODE_VSA +#define VSA2_ENTRY_POINT 0x60020 + + .globl __run_vsa +__run_vsa = RELOCATED(.) + /* save all registers to the stack */ + pushal + + /* Move the protected mode stack to a safe place */ + mov %esp, __stack + + movl %esp, %ebp + /* This function is called with regparm=0 and we have + * to skip the 32 byte from pushal: + */ + movl 36(%ebp), %ecx + movl 40(%ebp), %edx + + /* Activate the right segment descriptor real mode. */ + ljmp $0x28, $RELOCATED(1f) +1: +.code16 + /* 16 bit code from here on... */ + + /* Load the segment registers w/ properly configured + * segment descriptors. They will retain these + * configurations (limits, writability, etc.) once + * protected mode is turned off. + */ + mov $0x30, %ax + mov %ax, %ds + mov %ax, %es + mov %ax, %fs + mov %ax, %gs + mov %ax, %ss + + /* Turn off protection */ + movl %cr0, %eax + andl $~PE, %eax + movl %eax, %cr0 + + /* Now really going into real mode */ + ljmp $0, $RELOCATED(1f) +1: + /* Setup a stack: Put the stack at the end of page zero. + * That way we can easily share it between real and + * protected, since the 16-bit ESP at segment 0 will + * work for any case. */ + mov $0x0, %ax + mov %ax, %ss + movl $0x1000, %eax + movl %eax, %esp + + /* Load our 16 bit idt */ + xor %ax, %ax + mov %ax, %ds + lidt __realmode_idt + + /* Set all segments to 0x0000, ds to 0x0040 */ + mov %ax, %es + mov %ax, %fs + mov %ax, %gs + mov $0x40, %ax + mov %ax, %ds + mov %cx, %ax // restore ax + + /* ************************************ */ + lcall $((VSA2_ENTRY_POINT & 0xffff0000) >> 4), $(VSA2_ENTRY_POINT & 0xffff) + /* ************************************ */ + + /* If we got here, just about done. + * Need to get back to protected mode + */ + movl %cr0, %eax + orl $PE, %eax + movl %eax, %cr0 + + /* Now that we are in protected mode + * jump to a 32 bit code segment. + */ + data32 ljmp $0x10, $RELOCATED(1f) +1: + .code32 + movw $0x18, %ax + mov %ax, %ds + mov %ax, %es + mov %ax, %fs + mov %ax, %gs + mov %ax, %ss + + /* restore proper idt */ + lidt idtarg + + /* and exit */ + mov __stack, %esp + popal + ret +#endif + .globl __run_interrupt __run_interrupt = RELOCATED(.) -- cgit v1.2.3