summaryrefslogtreecommitdiff
path: root/src/drivers/amd
diff options
context:
space:
mode:
authorElyes HAOUAS <ehaouas@noos.fr>2018-12-27 09:14:07 +0100
committerKyösti Mälkki <kyosti.malkki@gmail.com>2018-12-28 13:10:39 +0000
commitdea45c1060ac2fc1be89e4c2e51a9f88246f8ae8 (patch)
treedd6f2281b9b18e5c076fceffee6c813bc421a22f /src/drivers/amd
parent2cc351da5f57e78b44eff50a5c1297a1ab2b79ee (diff)
drivers/amd/agesa/cache_as_ram.S: Fix coding style
Change-Id: Iada9b3ba71b991b6f9c7ebb5f300c8d28829ab4f Signed-off-by: Elyes HAOUAS <ehaouas@noos.fr> Reviewed-on: https://review.coreboot.org/c/30452 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Kyösti Mälkki <kyosti.malkki@gmail.com>
Diffstat (limited to 'src/drivers/amd')
-rw-r--r--src/drivers/amd/agesa/cache_as_ram.S162
1 files changed, 81 insertions, 81 deletions
diff --git a/src/drivers/amd/agesa/cache_as_ram.S b/src/drivers/amd/agesa/cache_as_ram.S
index 50242f7a54..09302d7d0e 100644
--- a/src/drivers/amd/agesa/cache_as_ram.S
+++ b/src/drivers/amd/agesa/cache_as_ram.S
@@ -33,86 +33,86 @@
_cache_as_ram_setup:
- /* Preserve BIST. */
- movd %eax, %mm0
+ /* Preserve BIST. */
+ movd %eax, %mm0
- post_code(0xa0)
+ post_code(0xa0)
- /* enable SSE2 128bit instructions */
- /* Turn on OSFXSR [BIT9] and OSXMMEXCPT [BIT10] onto CR4 register */
+ /* enable SSE2 128bit instructions */
+ /* Turn on OSFXSR [BIT9] and OSXMMEXCPT [BIT10] onto CR4 register */
- movl %cr4, %eax
- orl $(3 << 9), %eax
- movl %eax, %cr4
+ movl %cr4, %eax
+ orl $(3 << 9), %eax
+ movl %eax, %cr4
- post_code(0xa1)
+ post_code(0xa1)
- AMD_ENABLE_STACK
+ AMD_ENABLE_STACK
- /* Align the stack. */
- and $0xFFFFFFF0, %esp
+ /* Align the stack. */
+ and $0xFFFFFFF0, %esp
#ifdef __x86_64__
- /* switch to 64 bit long mode */
- mov %esi, %ecx
- add $0, %ecx # core number
- xor %eax, %eax
- lea (0x1000+0x23)(%ecx), %edi
- mov %edi, (%ecx)
- mov %eax, 4(%ecx)
-
- lea 0x1000(%ecx), %edi
- movl $0x000000e3, 0x00(%edi)
- movl %eax, 0x04(%edi)
- movl $0x400000e3, 0x08(%edi)
- movl %eax, 0x0c(%edi)
- movl $0x800000e3, 0x10(%edi)
- movl %eax, 0x14(%edi)
- movl $0xc00000e3, 0x18(%edi)
- movl %eax, 0x1c(%edi)
-
- # load ROM based identity mapped page tables
- mov %ecx, %eax
- mov %eax, %cr3
-
- # enable PAE
- mov %cr4, %eax
- bts $5, %eax
- mov %eax, %cr4
-
- # enable long mode
- mov $0xC0000080, %ecx
- rdmsr
- bts $8, %eax
- wrmsr
-
- # enable paging
- mov %cr0, %eax
- bts $31, %eax
- mov %eax, %cr0
-
- # use call far to switch to 64-bit code segment
- ljmp $0x18, $1f
+ /* switch to 64 bit long mode */
+ mov %esi, %ecx
+ add $0, %ecx # core number
+ xor %eax, %eax
+ lea (0x1000+0x23)(%ecx), %edi
+ mov %edi, (%ecx)
+ mov %eax, 4(%ecx)
+
+ lea 0x1000(%ecx), %edi
+ movl $0x000000e3, 0x00(%edi)
+ movl %eax, 0x04(%edi)
+ movl $0x400000e3, 0x08(%edi)
+ movl %eax, 0x0c(%edi)
+ movl $0x800000e3, 0x10(%edi)
+ movl %eax, 0x14(%edi)
+ movl $0xc00000e3, 0x18(%edi)
+ movl %eax, 0x1c(%edi)
+
+ # load ROM based identity mapped page tables
+ mov %ecx, %eax
+ mov %eax, %cr3
+
+ # enable PAE
+ mov %cr4, %eax
+ bts $5, %eax
+ mov %eax, %cr4
+
+ # enable long mode
+ mov $0xC0000080, %ecx
+ rdmsr
+ bts $8, %eax
+ wrmsr
+
+ # enable paging
+ mov %cr0, %eax
+ bts $31, %eax
+ mov %eax, %cr0
+
+ # use call far to switch to 64-bit code segment
+ ljmp $0x18, $1f
1:
#endif
- call early_all_cores
+ call early_all_cores
- /* Must maintain 16-byte stack alignment here. */
- pushl $0x0
- pushl $0x0
- pushl $0x0
- movd %mm0, %eax /* bist */
- pushl %eax
- call romstage_main
+ /* Must maintain 16-byte stack alignment here. */
+ pushl $0x0
+ pushl $0x0
+ pushl $0x0
+ movd %mm0, %eax /* bist */
+ pushl %eax
+ call romstage_main
#if IS_ENABLED(CONFIG_POSTCAR_STAGE)
/* We do not return. Execution continues with run_postcar_phase()
* calling to chipset_teardown_car below.
*/
- jmp postcar_entry_failure
+ jmp postcar_entry_failure
chipset_teardown_car:
@@ -120,53 +120,53 @@ chipset_teardown_car:
* Retrieve return address from stack as it will get trashed below if
* execution is utilizing the cache-as-ram stack.
*/
- pop %esp
+ pop %esp
#else
- movl %eax, %esp
+ movl %eax, %esp
/* Register %esp is new stacktop for remaining of romstage. */
#endif
- /* Disable cache */
- movl %cr0, %eax
- orl $CR0_CacheDisable, %eax
- movl %eax, %cr0
+ /* Disable cache */
+ movl %cr0, %eax
+ orl $CR0_CacheDisable, %eax
+ movl %eax, %cr0
/* Register %esp is preserved in AMD_DISABLE_STACK. */
- AMD_DISABLE_STACK
+ AMD_DISABLE_STACK
#if IS_ENABLED(CONFIG_POSTCAR_STAGE)
- jmp *%esp
+ jmp *%esp
#else
- /* enable cache */
- movl %cr0, %eax
- andl $0x9fffffff, %eax
- movl %eax, %cr0
+ /* enable cache */
+ movl %cr0, %eax
+ andl $0x9fffffff, %eax
+ movl %eax, %cr0
- call romstage_after_car
+ call romstage_after_car
#endif
- /* Should never see this postcode */
- post_code(0xaf)
+ /* Should never see this postcode */
+ post_code(0xaf)
stop:
- hlt
- jmp stop
+ hlt
+ jmp stop
/* These are here for linking purposes. */
.weak early_all_cores, romstage_main
early_all_cores:
romstage_main:
postcar_entry_failure:
- /* Should never see this postcode */
- post_code(0xae)
- jmp stop
+ /* Should never see this postcode */
+ post_code(0xae)
+ jmp stop
_cache_as_ram_setup_end: