From 86091f94b6ca58f4b8795503b274492d6a935c15 Mon Sep 17 00:00:00 2001 From: Alexandru Gagniuc Date: Wed, 30 Sep 2015 20:23:09 -0700 Subject: cpu/mtrr.h: Fix macro names for MTRR registers We use UNDERSCORE_CASE. For the MTRR macros that refer to an MSR, we also remove the _MSR suffix, as they are, by definition, MSRs. Change-Id: Id4483a75d62cf1b478a9105ee98a8f55140ce0ef Signed-off-by: Alexandru Gagniuc Reviewed-on: http://review.coreboot.org/11761 Reviewed-by: Aaron Durbin Tested-by: build bot (Jenkins) --- src/cpu/intel/car/cache_as_ram.inc | 80 +++++++++++++++--------------- src/cpu/intel/car/cache_as_ram_ht.inc | 46 ++++++++--------- src/cpu/intel/fsp_model_406dx/bootblock.c | 8 +-- src/cpu/intel/haswell/bootblock.c | 12 ++--- src/cpu/intel/haswell/cache_as_ram.inc | 38 +++++++------- src/cpu/intel/haswell/romstage.c | 8 +-- src/cpu/intel/haswell/smmrelocate.c | 12 ++--- src/cpu/intel/model_2065x/bootblock.c | 8 +-- src/cpu/intel/model_2065x/cache_as_ram.inc | 42 ++++++++-------- src/cpu/intel/model_206ax/bootblock.c | 8 +-- src/cpu/intel/model_206ax/cache_as_ram.inc | 48 +++++++++--------- src/cpu/intel/model_6ex/cache_as_ram.inc | 38 +++++++------- src/cpu/intel/smm/gen1/smmrelocate.c | 8 +-- 13 files changed, 178 insertions(+), 178 deletions(-) (limited to 'src/cpu/intel') diff --git a/src/cpu/intel/car/cache_as_ram.inc b/src/cpu/intel/car/cache_as_ram.inc index f9be6e890f..6ef8604456 100644 --- a/src/cpu/intel/car/cache_as_ram.inc +++ b/src/cpu/intel/car/cache_as_ram.inc @@ -54,7 +54,7 @@ CacheAsRam: */ xorl %eax, %eax xorl %edx, %edx - movl $MTRRfix64K_00000_MSR, %ecx + movl $MTRR_FIX_64K_00000, %ecx wrmsr /* @@ -102,16 +102,16 @@ SIPI_Delay: /* Wait for the Logical AP to complete initialization. */ LogicalAP_SIPINotdone: - movl $MTRRfix64K_00000_MSR, %ecx + movl $MTRR_FIX_64K_00000, %ecx rdmsr orl %eax, %eax jz LogicalAP_SIPINotdone NotHtProcessor: /* Set the default memory type and enable fixed and variable MTRRs. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx xorl %edx, %edx - movl $(MTRRdefTypeEn | MTRRdefTypeFixEn), %eax + movl $(MTRR_DEF_TYPE_EN | MTRR_DEF_TYPE_FIX_EN), %eax wrmsr /* Clear all MTRRs. */ @@ -131,35 +131,35 @@ clear_fixed_var_mtrr: all_mtrr_msrs: /* fixed MTRR MSRs */ - .long MTRRfix64K_00000_MSR - .long MTRRfix16K_80000_MSR - .long MTRRfix16K_A0000_MSR - .long MTRRfix4K_C0000_MSR - .long MTRRfix4K_C8000_MSR - .long MTRRfix4K_D0000_MSR - .long MTRRfix4K_D8000_MSR - .long MTRRfix4K_E0000_MSR - .long MTRRfix4K_E8000_MSR - .long MTRRfix4K_F0000_MSR - .long MTRRfix4K_F8000_MSR + .long MTRR_FIX_64K_00000 + .long MTRR_FIX_16K_80000 + .long MTRR_FIX_16K_A0000 + .long MTRR_FIX_4K_C0000 + .long MTRR_FIX_4K_C8000 + .long MTRR_FIX_4K_D0000 + .long MTRR_FIX_4K_D8000 + .long MTRR_FIX_4K_E0000 + .long MTRR_FIX_4K_E8000 + .long MTRR_FIX_4K_F0000 + .long MTRR_FIX_4K_F8000 /* var MTRR MSRs */ - .long MTRRphysBase_MSR(0) - .long MTRRphysMask_MSR(0) - .long MTRRphysBase_MSR(1) - .long MTRRphysMask_MSR(1) - .long MTRRphysBase_MSR(2) - .long MTRRphysMask_MSR(2) - .long MTRRphysBase_MSR(3) - .long MTRRphysMask_MSR(3) - .long MTRRphysBase_MSR(4) - .long MTRRphysMask_MSR(4) - .long MTRRphysBase_MSR(5) - .long MTRRphysMask_MSR(5) - .long MTRRphysBase_MSR(6) - .long MTRRphysMask_MSR(6) - .long MTRRphysBase_MSR(7) - .long MTRRphysMask_MSR(7) + .long MTRR_PHYS_BASE(0) + .long MTRR_PHYS_MASK(0) + .long MTRR_PHYS_BASE(1) + .long MTRR_PHYS_MASK(1) + .long MTRR_PHYS_BASE(2) + .long MTRR_PHYS_MASK(2) + .long MTRR_PHYS_BASE(3) + .long MTRR_PHYS_MASK(3) + .long MTRR_PHYS_BASE(4) + .long MTRR_PHYS_MASK(4) + .long MTRR_PHYS_BASE(5) + .long MTRR_PHYS_MASK(5) + .long MTRR_PHYS_BASE(6) + .long MTRR_PHYS_MASK(6) + .long MTRR_PHYS_BASE(7) + .long MTRR_PHYS_MASK(7) .long 0x000 /* NULL, end of table */ @@ -219,13 +219,13 @@ clear_fixed_var_mtrr_out: #if CacheSize > 0x8000 /* Enable caching for 32K-64K using fixed MTRR. */ - movl $MTRRfix4K_C0000_MSR, %ecx + movl $MTRR_FIX_4K_C0000, %ecx simplemask CacheSize, 0x8000 wrmsr #endif /* Enable caching for 0-32K using fixed MTRR. */ - movl $MTRRfix4K_C8000_MSR, %ecx + movl $MTRR_FIX_4K_C8000, %ecx simplemask CacheSize, 0 wrmsr @@ -235,7 +235,7 @@ clear_fixed_var_mtrr_out: * Enable write base caching so we can do execute in place (XIP) * on the flash ROM. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -246,9 +246,9 @@ clear_fixed_var_mtrr_out: orl $MTRR_TYPE_WRBACK, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $0x0000000f, %edx - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #endif /* CONFIG_XIP_ROM_SIZE */ @@ -332,13 +332,13 @@ lout: movl %eax, %cr0 /* Clear sth. */ - movl $MTRRfix4K_C8000_MSR, %ecx + movl $MTRR_FIX_4K_C8000, %ecx xorl %edx, %edx xorl %eax, %eax wrmsr #if CONFIG_DCACHE_RAM_SIZE > 0x8000 - movl $MTRRfix4K_C0000_MSR, %ecx + movl $MTRR_FIX_4K_C0000, %ecx wrmsr #endif @@ -346,9 +346,9 @@ lout: * Set the default memory type and disable fixed * and enable variable MTRRs. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx xorl %edx, %edx - movl $MTRRdefTypeEn, %eax /* Enable variable and disable fixed MTRRs. */ + movl $MTRR_DEF_TYPE_EN, %eax /* Enable variable and disable fixed MTRRs. */ wrmsr /* Enable cache. */ diff --git a/src/cpu/intel/car/cache_as_ram_ht.inc b/src/cpu/intel/car/cache_as_ram_ht.inc index 193ad418c0..6eb50bacaa 100644 --- a/src/cpu/intel/car/cache_as_ram_ht.inc +++ b/src/cpu/intel/car/cache_as_ram_ht.inc @@ -61,7 +61,7 @@ clear_mtrrs: post_code(0x21) /* Configure the default memory type to uncacheable. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr andl $(~0x00000cff), %eax wrmsr @@ -95,9 +95,9 @@ addrsize_no_MSR: */ addrsize_set_high: xorl %eax, %eax - movl $MTRRphysMask_MSR(0), %ecx + movl $MTRR_PHYS_MASK(0), %ecx wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx wrmsr movl $LAPIC_BASE_MSR, %ecx not %edx @@ -188,7 +188,7 @@ hyper_threading_cpu: post_code(0x26) /* Wait for sibling CPU to start. */ -1: movl $(MTRRphysBase_MSR(0)), %ecx +1: movl $(MTRR_PHYS_BASE(0)), %ecx rdmsr andl %eax, %eax jnz sipi_complete @@ -211,7 +211,7 @@ ap_init: post_code(0x28) /* MTRR registers are shared between HT siblings. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(1<<12), %eax xorl %edx, %edx wrmsr @@ -230,21 +230,21 @@ sipi_complete: post_code(0x2a) /* Set Cache-as-RAM base address. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr /* Set Cache-as-RAM mask. */ - movl $(MTRRphysMask_MSR(0)), %ecx + movl $(MTRR_PHYS_MASK(0)), %ecx rdmsr - movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x2b) @@ -308,7 +308,7 @@ no_msr_11e: #if CONFIG_XIP_ROM_SIZE /* Enable cache for our code in Flash because we do XIP here */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -319,9 +319,9 @@ no_msr_11e: orl $MTRR_TYPE_WRBACK, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx rdmsr - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #endif /* CONFIG_XIP_ROM_SIZE */ @@ -356,9 +356,9 @@ no_msr_11e: post_code(0x34) /* Disable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $(~MTRRdefTypeEn), %eax + andl $(~MTRR_DEF_TYPE_EN), %eax wrmsr post_code(0x35) @@ -382,24 +382,24 @@ no_msr_11e: post_code(0x38) /* Enable Write Back and Speculative Reads for low RAM. */ - movl $MTRRphysBase_MSR(0), %ecx + movl $MTRR_PHYS_BASE(0), %ecx movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(0), %ecx + movl $MTRR_PHYS_MASK(0), %ecx rdmsr - movl $(~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #if CACHE_ROM_SIZE /* Enable caching and Speculative Reads for Flash ROM device. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx rdmsr - movl $(~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #endif @@ -413,9 +413,9 @@ no_msr_11e: post_code(0x3a) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x3b) diff --git a/src/cpu/intel/fsp_model_406dx/bootblock.c b/src/cpu/intel/fsp_model_406dx/bootblock.c index a685eaac71..ef949917a5 100644 --- a/src/cpu/intel/fsp_model_406dx/bootblock.c +++ b/src/cpu/intel/fsp_model_406dx/bootblock.c @@ -56,10 +56,10 @@ static void set_var_mtrr(int reg, uint32_t base, uint32_t size, int type) msr_t basem, maskm; basem.lo = base | type; basem.hi = 0; - wrmsr(MTRRphysBase_MSR(reg), basem); - maskm.lo = ~(size - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(reg), basem); + maskm.lo = ~(size - 1) | MTRR_PHYS_MASK_VALID; maskm.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(reg), maskm); + wrmsr(MTRR_PHYS_MASK(reg), maskm); } static void enable_rom_caching(void) @@ -74,7 +74,7 @@ static void enable_rom_caching(void) /* Enable Variable MTRRs */ msr.hi = 0x00000000; msr.lo = 0x00000800; - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); } static void set_no_evict_mode_msr(void) diff --git a/src/cpu/intel/haswell/bootblock.c b/src/cpu/intel/haswell/bootblock.c index f5d0f6cf41..8d0c53cee6 100644 --- a/src/cpu/intel/haswell/bootblock.c +++ b/src/cpu/intel/haswell/bootblock.c @@ -44,10 +44,10 @@ static void set_var_mtrr( msr_t basem, maskm; basem.lo = base | type; basem.hi = 0; - wrmsr(MTRRphysBase_MSR(reg), basem); - maskm.lo = ~(size - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(reg), basem); + maskm.lo = ~(size - 1) | MTRR_PHYS_MASK_VALID; maskm.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(reg), maskm); + wrmsr(MTRR_PHYS_MASK(reg), maskm); } static void enable_rom_caching(void) @@ -61,7 +61,7 @@ static void enable_rom_caching(void) /* Enable Variable MTRRs */ msr.hi = 0x00000000; msr.lo = 0x00000800; - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); } static void set_flex_ratio_to_tdp_nominal(void) @@ -113,12 +113,12 @@ static void set_flex_ratio_to_tdp_nominal(void) static void check_for_clean_reset(void) { msr_t msr; - msr = rdmsr(MTRRdefType_MSR); + msr = rdmsr(MTRR_DEF_TYPE_MSR); /* Use the MTRR default type MSR as a proxy for detecting INIT#. * Reset the system if any known bits are set in that MSR. That is * an indication of the CPU not being properly reset. */ - if (msr.lo & (MTRRdefTypeEn | MTRRdefTypeFixEn)) { + if (msr.lo & (MTRR_DEF_TYPE_EN | MTRR_DEF_TYPE_FIX_EN)) { outb(0x0, 0xcf9); outb(0x6, 0xcf9); halt(); diff --git a/src/cpu/intel/haswell/cache_as_ram.inc b/src/cpu/intel/haswell/cache_as_ram.inc index 0978bfb0ac..b8df2a1b76 100644 --- a/src/cpu/intel/haswell/cache_as_ram.inc +++ b/src/cpu/intel/haswell/cache_as_ram.inc @@ -73,31 +73,31 @@ clear_mtrrs: post_code(0x22) /* Configure the default memory type to uncacheable. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr andl $(~0x00000cff), %eax wrmsr post_code(0x23) /* Set Cache-as-RAM base address. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr post_code(0x24) /* Set Cache-as-RAM mask. */ - movl $(MTRRphysMask_MSR(0)), %ecx - movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(MTRR_PHYS_MASK(0)), %ecx + movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr post_code(0x25) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr /* Enable cache (CR0.CD = 0, CR0.NW = 0). */ @@ -134,7 +134,7 @@ clear_mtrrs: movl %eax, %cr0 /* Enable cache for our code in Flash because we do XIP here */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -145,19 +145,19 @@ clear_mtrrs: orl $MTRR_TYPE_WRPROT, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $CPU_PHYSMASK_HI, %edx - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr post_code(0x27) /* Enable caching for ram init code to run faster */ - movl $MTRRphysBase_MSR(2), %ecx + movl $MTRR_PHYS_BASE(2), %ecx movl $(CACHE_MRC_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(2), %ecx - movl $(CACHE_MRC_MASK | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(2), %ecx + movl $(CACHE_MRC_MASK | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr @@ -197,9 +197,9 @@ before_romstage: post_code(0x31) /* Disable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $(~MTRRdefTypeEn), %eax + andl $(~MTRR_DEF_TYPE_EN), %eax wrmsr post_code(0x31) @@ -220,9 +220,9 @@ before_romstage: /* Clear MTRR that was used to cache MRC */ xorl %eax, %eax xorl %edx, %edx - movl $MTRRphysBase_MSR(2), %ecx + movl $MTRR_PHYS_BASE(2), %ecx wrmsr - movl $MTRRphysMask_MSR(2), %ecx + movl $MTRR_PHYS_MASK(2), %ecx wrmsr post_code(0x33) @@ -246,7 +246,7 @@ before_romstage: /* Get number of MTRRs. */ popl %ebx - movl $MTRRphysBase_MSR(0), %ecx + movl $MTRR_PHYS_BASE(0), %ecx 1: testl %ebx, %ebx jz 1f @@ -279,9 +279,9 @@ before_romstage: post_code(0x3a) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x3b) diff --git a/src/cpu/intel/haswell/romstage.c b/src/cpu/intel/haswell/romstage.c index 9c238ca578..3bb1090073 100644 --- a/src/cpu/intel/haswell/romstage.c +++ b/src/cpu/intel/haswell/romstage.c @@ -117,14 +117,14 @@ static void *setup_romstage_stack_after_car(void) /* Cache the ROM as WP just below 4GiB. */ slot = stack_push(slot, mtrr_mask_upper); /* upper mask */ - slot = stack_push(slot, ~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid); + slot = stack_push(slot, ~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID); slot = stack_push(slot, 0); /* upper base */ slot = stack_push(slot, ~(CACHE_ROM_SIZE - 1) | MTRR_TYPE_WRPROT); num_mtrrs++; /* Cache RAM as WB from 0 -> CONFIG_RAMTOP. */ slot = stack_push(slot, mtrr_mask_upper); /* upper mask */ - slot = stack_push(slot, ~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid); + slot = stack_push(slot, ~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID); slot = stack_push(slot, 0); /* upper base */ slot = stack_push(slot, 0 | MTRR_TYPE_WRBACK); num_mtrrs++; @@ -135,7 +135,7 @@ static void *setup_romstage_stack_after_car(void) * be 8MiB aligned. Set this area as cacheable so it can be used later * for ramstage before setting up the entire RAM as cacheable. */ slot = stack_push(slot, mtrr_mask_upper); /* upper mask */ - slot = stack_push(slot, ~((8 << 20) - 1) | MTRRphysMaskValid); + slot = stack_push(slot, ~((8 << 20) - 1) | MTRR_PHYS_MASK_VALID); slot = stack_push(slot, 0); /* upper base */ slot = stack_push(slot, (top_of_ram - (8 << 20)) | MTRR_TYPE_WRBACK); num_mtrrs++; @@ -146,7 +146,7 @@ static void *setup_romstage_stack_after_car(void) * to cacheable it provides faster access when relocating the SMM * handler as well as using the TSEG region for other purposes. */ slot = stack_push(slot, mtrr_mask_upper); /* upper mask */ - slot = stack_push(slot, ~((8 << 20) - 1) | MTRRphysMaskValid); + slot = stack_push(slot, ~((8 << 20) - 1) | MTRR_PHYS_MASK_VALID); slot = stack_push(slot, 0); /* upper base */ slot = stack_push(slot, top_of_ram | MTRR_TYPE_WRBACK); num_mtrrs++; diff --git a/src/cpu/intel/haswell/smmrelocate.c b/src/cpu/intel/haswell/smmrelocate.c index ab94f9a973..00e2d5568b 100644 --- a/src/cpu/intel/haswell/smmrelocate.c +++ b/src/cpu/intel/haswell/smmrelocate.c @@ -73,8 +73,8 @@ static inline void write_smrr(struct smm_relocation_params *relo_params) { printk(BIOS_DEBUG, "Writing SMRR. base = 0x%08x, mask=0x%08x\n", relo_params->smrr_base.lo, relo_params->smrr_mask.lo); - wrmsr(SMRRphysBase_MSR, relo_params->smrr_base); - wrmsr(SMRRphysMask_MSR, relo_params->smrr_mask); + wrmsr(SMRR_PHYS_BASE, relo_params->smrr_base); + wrmsr(SMRR_PHYS_MASK, relo_params->smrr_mask); } static inline void write_emrr(struct smm_relocation_params *relo_params) @@ -214,7 +214,7 @@ static void asmlinkage cpu_smm_do_relocation(void *arg) update_save_state(cpu, relo_params, runtime); /* Write EMRR and SMRR MSRs based on indicated support. */ - mtrr_cap = rdmsr(MTRRcap_MSR); + mtrr_cap = rdmsr(MTRR_CAP_MSR); if (mtrr_cap.lo & SMRR_SUPPORTED) write_smrr(relo_params); @@ -272,7 +272,7 @@ static void fill_in_relocation_params(struct device *dev, /* SMRR has 32-bits of valid address aligned to 4KiB. */ params->smrr_base.lo = (params->smram_base & rmask) | MTRR_TYPE_WRBACK; params->smrr_base.hi = 0; - params->smrr_mask.lo = (~(tseg_size - 1) & rmask) | MTRRphysMaskValid; + params->smrr_mask.lo = (~(tseg_size - 1) & rmask) | MTRR_PHYS_MASK_VALID; params->smrr_mask.hi = 0; /* The EMRR and UNCORE_EMRR are at IEDBASE + 2MiB */ @@ -283,14 +283,14 @@ static void fill_in_relocation_params(struct device *dev, * on the number of physical address bits supported. */ params->emrr_base.lo = emrr_base | MTRR_TYPE_WRBACK; params->emrr_base.hi = 0; - params->emrr_mask.lo = (~(emrr_size - 1) & rmask) | MTRRphysMaskValid; + params->emrr_mask.lo = (~(emrr_size - 1) & rmask) | MTRR_PHYS_MASK_VALID; params->emrr_mask.hi = (1 << (phys_bits - 32)) - 1; /* UNCORE_EMRR has 39 bits of valid address aligned to 4KiB. */ params->uncore_emrr_base.lo = emrr_base; params->uncore_emrr_base.hi = 0; params->uncore_emrr_mask.lo = (~(emrr_size - 1) & rmask) | - MTRRphysMaskValid; + MTRR_PHYS_MASK_VALID; params->uncore_emrr_mask.hi = (1 << (39 - 32)) - 1; } diff --git a/src/cpu/intel/model_2065x/bootblock.c b/src/cpu/intel/model_2065x/bootblock.c index b6a2442fbf..edffe14af1 100644 --- a/src/cpu/intel/model_2065x/bootblock.c +++ b/src/cpu/intel/model_2065x/bootblock.c @@ -43,10 +43,10 @@ static void set_var_mtrr( msr_t basem, maskm; basem.lo = base | type; basem.hi = 0; - wrmsr(MTRRphysBase_MSR(reg), basem); - maskm.lo = ~(size - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(reg), basem); + maskm.lo = ~(size - 1) | MTRR_PHYS_MASK_VALID; maskm.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(reg), maskm); + wrmsr(MTRR_PHYS_MASK(reg), maskm); } static void enable_rom_caching(void) @@ -60,7 +60,7 @@ static void enable_rom_caching(void) /* Enable Variable MTRRs */ msr.hi = 0x00000000; msr.lo = 0x00000800; - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); } static void set_flex_ratio_to_tdp_nominal(void) diff --git a/src/cpu/intel/model_2065x/cache_as_ram.inc b/src/cpu/intel/model_2065x/cache_as_ram.inc index cfa3b6b3bf..f36af2b3fc 100644 --- a/src/cpu/intel/model_2065x/cache_as_ram.inc +++ b/src/cpu/intel/model_2065x/cache_as_ram.inc @@ -48,8 +48,8 @@ wait_for_sipi: jc wait_for_sipi post_code(0x21) - /* Clean-up MTRRdefType_MSR. */ - movl $MTRRdefType_MSR, %ecx + /* Clean-up MTRR_DEF_TYPE_MSR. */ + movl $MTRR_DEF_TYPE_MSR, %ecx xorl %eax, %eax xorl %edx, %edx wrmsr @@ -69,7 +69,7 @@ clear_mtrrs: jnz clear_mtrrs /* Zero out all variable range MTRRs. */ - movl $MTRRcap_MSR, %ecx + movl $MTRR_CAP_MSR, %ecx rdmsr andl $0xff, %eax shl $1, %eax @@ -85,24 +85,24 @@ clear_var_mtrrs: post_code(0x23) /* Set Cache-as-RAM base address. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr post_code(0x24) /* Set Cache-as-RAM mask. */ - movl $(MTRRphysMask_MSR(0)), %ecx - movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(MTRR_PHYS_MASK(0)), %ecx + movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr post_code(0x25) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr /* Enable cache (CR0.CD = 0, CR0.NW = 0). */ @@ -139,7 +139,7 @@ clear_var_mtrrs: movl %eax, %cr0 /* Enable cache for our code in Flash because we do XIP here */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -150,9 +150,9 @@ clear_var_mtrrs: orl $MTRR_TYPE_WRPROT, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $CPU_PHYSMASK_HI, %edx - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr post_code(0x27) @@ -189,9 +189,9 @@ before_romstage: post_code(0x31) /* Disable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $(~MTRRdefTypeEn), %eax + andl $(~MTRR_DEF_TYPE_EN), %eax wrmsr post_code(0x31) @@ -228,12 +228,12 @@ before_romstage: /* Enable Write Back and Speculative Reads for the first MB * and ramstage. */ - movl $MTRRphysBase_MSR(0), %ecx + movl $MTRR_PHYS_BASE(0), %ecx movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(0), %ecx - movl $(~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(0), %ecx + movl $(~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx // 36bit address space wrmsr @@ -241,12 +241,12 @@ before_romstage: /* Enable Caching and speculative Reads for the * complete ROM now that we actually have RAM. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(1), %ecx - movl $(~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(1), %ecx + movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr #endif @@ -261,9 +261,9 @@ before_romstage: post_code(0x3a) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x3b) diff --git a/src/cpu/intel/model_206ax/bootblock.c b/src/cpu/intel/model_206ax/bootblock.c index d41afb2431..416b484e11 100644 --- a/src/cpu/intel/model_206ax/bootblock.c +++ b/src/cpu/intel/model_206ax/bootblock.c @@ -44,10 +44,10 @@ static void set_var_mtrr( msr_t basem, maskm; basem.lo = base | type; basem.hi = 0; - wrmsr(MTRRphysBase_MSR(reg), basem); - maskm.lo = ~(size - 1) | MTRRphysMaskValid; + wrmsr(MTRR_PHYS_BASE(reg), basem); + maskm.lo = ~(size - 1) | MTRR_PHYS_MASK_VALID; maskm.hi = (1 << (CONFIG_CPU_ADDR_BITS - 32)) - 1; - wrmsr(MTRRphysMask_MSR(reg), maskm); + wrmsr(MTRR_PHYS_MASK(reg), maskm); } static void enable_rom_caching(void) @@ -61,7 +61,7 @@ static void enable_rom_caching(void) /* Enable Variable MTRRs */ msr.hi = 0x00000000; msr.lo = 0x00000800; - wrmsr(MTRRdefType_MSR, msr); + wrmsr(MTRR_DEF_TYPE_MSR, msr); } static void set_flex_ratio_to_tdp_nominal(void) diff --git a/src/cpu/intel/model_206ax/cache_as_ram.inc b/src/cpu/intel/model_206ax/cache_as_ram.inc index a3f1c649c1..04c0808252 100644 --- a/src/cpu/intel/model_206ax/cache_as_ram.inc +++ b/src/cpu/intel/model_206ax/cache_as_ram.inc @@ -68,31 +68,31 @@ clear_mtrrs: post_code(0x22) /* Configure the default memory type to uncacheable. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr andl $(~0x00000cff), %eax wrmsr post_code(0x23) /* Set Cache-as-RAM base address. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr post_code(0x24) /* Set Cache-as-RAM mask. */ - movl $(MTRRphysMask_MSR(0)), %ecx - movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(MTRR_PHYS_MASK(0)), %ecx + movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr post_code(0x25) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr /* Enable cache (CR0.CD = 0, CR0.NW = 0). */ @@ -129,7 +129,7 @@ clear_mtrrs: movl %eax, %cr0 /* Enable cache for our code in Flash because we do XIP here */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -140,19 +140,19 @@ clear_mtrrs: orl $MTRR_TYPE_WRPROT, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $CPU_PHYSMASK_HI, %edx - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr post_code(0x27) /* Enable caching for ram init code to run faster */ - movl $MTRRphysBase_MSR(2), %ecx + movl $MTRR_PHYS_BASE(2), %ecx movl $(CACHE_MRC_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(2), %ecx - movl $(CACHE_MRC_MASK | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(2), %ecx + movl $(CACHE_MRC_MASK | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr @@ -189,9 +189,9 @@ before_romstage: post_code(0x31) /* Disable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $(~MTRRdefTypeEn), %eax + andl $(~MTRR_DEF_TYPE_EN), %eax wrmsr post_code(0x31) @@ -212,9 +212,9 @@ before_romstage: /* Clear MTRR that was used to cache MRC */ xorl %eax, %eax xorl %edx, %edx - movl $MTRRphysBase_MSR(2), %ecx + movl $MTRR_PHYS_BASE(2), %ecx wrmsr - movl $MTRRphysMask_MSR(2), %ecx + movl $MTRR_PHYS_MASK(2), %ecx wrmsr post_code(0x33) @@ -236,12 +236,12 @@ before_romstage: /* Enable Write Back and Speculative Reads for the first MB * and ramstage. */ - movl $MTRRphysBase_MSR(0), %ecx + movl $MTRR_PHYS_BASE(0), %ecx movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(0), %ecx - movl $(~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(0), %ecx + movl $(~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx // 36bit address space wrmsr @@ -249,12 +249,12 @@ before_romstage: /* Enable Caching and speculative Reads for the * complete ROM now that we actually have RAM. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(1), %ecx - movl $(~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(1), %ecx + movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr #endif @@ -269,9 +269,9 @@ before_romstage: post_code(0x3a) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x3b) diff --git a/src/cpu/intel/model_6ex/cache_as_ram.inc b/src/cpu/intel/model_6ex/cache_as_ram.inc index 271b7568d9..16244fb4f5 100644 --- a/src/cpu/intel/model_6ex/cache_as_ram.inc +++ b/src/cpu/intel/model_6ex/cache_as_ram.inc @@ -52,27 +52,27 @@ clear_mtrrs: jnz clear_mtrrs /* Configure the default memory type to uncacheable. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr andl $(~0x00000cff), %eax wrmsr /* Set Cache-as-RAM base address. */ - movl $(MTRRphysBase_MSR(0)), %ecx + movl $(MTRR_PHYS_BASE(0)), %ecx movl $(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr /* Set Cache-as-RAM mask. */ - movl $(MTRRphysMask_MSR(0)), %ecx - movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(MTRR_PHYS_MASK(0)), %ecx + movl $(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr /* Enable L2 cache. */ @@ -102,7 +102,7 @@ clear_mtrrs: #if CONFIG_XIP_ROM_SIZE /* Enable cache for our code in Flash because we do XIP here */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx xorl %edx, %edx /* * IMPORTANT: The following calculation _must_ be done at runtime. See @@ -113,9 +113,9 @@ clear_mtrrs: orl $MTRR_TYPE_WRBACK, %eax wrmsr - movl $MTRRphysMask_MSR(1), %ecx + movl $MTRR_PHYS_MASK(1), %ecx movl $CPU_PHYSMASK_HI, %edx - movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax wrmsr #endif /* CONFIG_XIP_ROM_SIZE */ @@ -150,9 +150,9 @@ clear_mtrrs: post_code(0x31) /* Disable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - andl $(~MTRRdefTypeEn), %eax + andl $(~MTRR_DEF_TYPE_EN), %eax wrmsr post_code(0x31) @@ -176,23 +176,23 @@ clear_mtrrs: post_code(0x38) /* Enable Write Back and Speculative Reads for low RAM. */ - movl $MTRRphysBase_MSR(0), %ecx + movl $MTRR_PHYS_BASE(0), %ecx movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(0), %ecx - movl $(~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(0), %ecx + movl $(~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr #if CACHE_ROM_SIZE /* Enable caching and Speculative Reads for Flash ROM device. */ - movl $MTRRphysBase_MSR(1), %ecx + movl $MTRR_PHYS_BASE(1), %ecx movl $(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax xorl %edx, %edx wrmsr - movl $MTRRphysMask_MSR(1), %ecx - movl $(~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid), %eax + movl $MTRR_PHYS_MASK(1), %ecx + movl $(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax movl $CPU_PHYSMASK_HI, %edx wrmsr #endif @@ -207,9 +207,9 @@ clear_mtrrs: post_code(0x3a) /* Enable MTRR. */ - movl $MTRRdefType_MSR, %ecx + movl $MTRR_DEF_TYPE_MSR, %ecx rdmsr - orl $MTRRdefTypeEn, %eax + orl $MTRR_DEF_TYPE_EN, %eax wrmsr post_code(0x3b) diff --git a/src/cpu/intel/smm/gen1/smmrelocate.c b/src/cpu/intel/smm/gen1/smmrelocate.c index bc14444dbf..41ec39cab0 100644 --- a/src/cpu/intel/smm/gen1/smmrelocate.c +++ b/src/cpu/intel/smm/gen1/smmrelocate.c @@ -63,8 +63,8 @@ static inline void write_smrr(struct smm_relocation_params *relo_params) { printk(BIOS_DEBUG, "Writing SMRR. base = 0x%08x, mask=0x%08x\n", relo_params->smrr_base.lo, relo_params->smrr_mask.lo); - wrmsr(SMRRphysBase_MSR, relo_params->smrr_base); - wrmsr(SMRRphysMask_MSR, relo_params->smrr_mask); + wrmsr(SMRR_PHYS_BASE, relo_params->smrr_base); + wrmsr(SMRR_PHYS_MASK, relo_params->smrr_mask); } /* The relocation work is actually performed in SMM context, but the code @@ -109,7 +109,7 @@ static void asmlinkage cpu_smm_do_relocation(void *arg) save_state->smbase, save_state->iedbase, save_state); /* Write SMRR MSRs based on indicated support. */ - mtrr_cap = rdmsr(MTRRcap_MSR); + mtrr_cap = rdmsr(MTRR_CAP_MSR); if (mtrr_cap.lo & SMRR_SUPPORTED) write_smrr(relo_params); @@ -142,7 +142,7 @@ static void fill_in_relocation_params(struct smm_relocation_params *params) /* SMRR has 32-bits of valid address aligned to 4KiB. */ params->smrr_base.lo = (params->smram_base & rmask) | MTRR_TYPE_WRBACK; params->smrr_base.hi = 0; - params->smrr_mask.lo = (~(tseg_size - 1) & rmask) | MTRRphysMaskValid; + params->smrr_mask.lo = (~(tseg_size - 1) & rmask) | MTRR_PHYS_MASK_VALID; params->smrr_mask.hi = 0; } -- cgit v1.2.3