/* SPDX-License-Identifier: GPL-2.0-only */ #include #include #include .section .init .global bootblock_pre_c_entry .code32 _cache_as_ram_setup: bootblock_pre_c_entry: cache_as_ram: post_code(0x20) /* Send INIT IPI to all excluding ourself. */ movl $0x000C4500, %eax movl $0xFEE00300, %esi movl %eax, (%esi) /* All CPUs need to be in Wait for SIPI state */ wait_for_sipi: movl (%esi), %eax bt $12, %eax jc wait_for_sipi post_code(0x22) /* Clear/disable fixed MTRRs */ mov $fixed_mtrr_list_size, %ebx xor %eax, %eax xor %edx, %edx clear_fixed_mtrr: add $-2, %ebx movzwl fixed_mtrr_list(%ebx), %ecx wrmsr jnz clear_fixed_mtrr /* Figure out how many MTRRs we have, and clear them out */ mov $MTRR_CAP_MSR, %ecx rdmsr movzb %al, %ebx /* Number of variable MTRRs */ mov $MTRR_PHYS_BASE(0), %ecx xor %eax, %eax xor %edx, %edx clear_var_mtrr: wrmsr inc %ecx wrmsr inc %ecx dec %ebx jnz clear_var_mtrr post_code(0x22) /* Configure the default memory type to uncacheable. */ movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr andl $(~0x00000cff), %eax wrmsr /* Determine CPU_ADDR_BITS and load PHYSMASK high word to %edx. */ movl $0x80000008, %eax cpuid movb %al, %cl sub $32, %cl movl $1, %edx shl %cl, %edx subl $1, %edx /* Preload high word of address mask (in %edx) for Variable MTRRs 0 and 1. */ addrsize_set_high: xorl %eax, %eax movl $MTRR_PHYS_MASK(0), %ecx wrmsr movl $MTRR_PHYS_MASK(1), %ecx wrmsr post_code(0x23) /* Set Cache-as-RAM base address. */ movl $(MTRR_PHYS_BASE(0)), %ecx movl $_car_mtrr_start, %eax orl $MTRR_TYPE_WRBACK, %eax xorl %edx, %edx wrmsr post_code(0x24) /* Set Cache-as-RAM mask. */ movl $(MTRR_PHYS_MASK(0)), %ecx rdmsr movl $_car_mtrr_mask, %eax orl $MTRR_PHYS_MASK_VALID, %eax wrmsr post_code(0x25) /* Enable MTRR. */ movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr orl $MTRR_DEF_TYPE_EN, %eax wrmsr /* Enable L2 cache. */ movl $0x11e, %ecx rdmsr orl $(1 << 8), %eax wrmsr /* Enable cache (CR0.CD = 0, CR0.NW = 0). */ movl %cr0, %eax andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax invd movl %eax, %cr0 /* Clear the cache memory region. This will also fill up the cache. */ cld xorl %eax, %eax movl $_car_mtrr_start, %edi movl $_car_mtrr_size, %ecx shr $2, %ecx rep stosl post_code(0x26) /* Enable Cache-as-RAM mode by disabling cache. */ movl %cr0, %eax orl $CR0_CacheDisable, %eax movl %eax, %cr0 /* Enable cache for our code in Flash because we do XIP here */ movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx movl $_program, %eax andl $_xip_mtrr_mask, %eax orl $MTRR_TYPE_WRPROT, %eax wrmsr movl $MTRR_PHYS_MASK(1), %ecx rdmsr movl $_xip_mtrr_mask, %eax orl $MTRR_PHYS_MASK_VALID, %eax wrmsr post_code(0x28) /* Enable cache. */ movl %cr0, %eax andl $(~(CR0_CacheDisable | CR0_NoWriteThrough)), %eax movl %eax, %cr0 /* Setup the stack. */ mov $_ecar_stack, %esp /* Need to align stack to 16 bytes at call instruction. Account for the pushes below. */ andl $0xfffffff0, %esp subl $4, %esp /* push TSC and BIST to stack */ movd %mm0, %eax pushl %eax /* BIST */ movd %mm2, %eax pushl %eax /* tsc[63:32] */ movd %mm1, %eax pushl %eax /* tsc[31:0] */ before_c_entry: post_code(0x29) call bootblock_c_entry_bist /* Should never see this postcode */ post_code(POST_DEAD_CODE) .Lhlt: hlt jmp .Lhlt fixed_mtrr_list: .word MTRR_FIX_64K_00000 .word MTRR_FIX_16K_80000 .word MTRR_FIX_16K_A0000 .word MTRR_FIX_4K_C0000 .word MTRR_FIX_4K_C8000 .word MTRR_FIX_4K_D0000 .word MTRR_FIX_4K_D8000 .word MTRR_FIX_4K_E0000 .word MTRR_FIX_4K_E8000 .word MTRR_FIX_4K_F0000 .word MTRR_FIX_4K_F8000 fixed_mtrr_list_size = . - fixed_mtrr_list _cache_as_ram_setup_end: