blob: 7da6cd1142b9345af32065080103561408b917a6 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
|
#ifndef CPU_AMD_MTRR_H
#define CPU_AMD_MTRR_H
#define IORR_FIRST 0xC0010016
#define IORR_LAST 0xC0010019
#define MTRR_READ_MEM (1 << 4)
#define MTRR_WRITE_MEM (1 << 3)
#define SYSCFG_MSR 0xC0010010
#define SYSCFG_MSR_TOM2WB (1 << 22)
#define SYSCFG_MSR_TOM2En (1 << 21)
#define SYSCFG_MSR_MtrrVarDramEn (1 << 20)
#define SYSCFG_MSR_MtrrFixDramModEn (1 << 19)
#define SYSCFG_MSR_MtrrFixDramEn (1 << 18)
#define SYSCFG_MSR_UcLockEn (1 << 17)
#define SYSCFG_MSR_ChxToDirtyDis (1 << 16)
#define SYSCFG_MSR_ClVicBlkEn (1 << 11)
#define SYSCFG_MSR_SetDirtyEnO (1 << 10)
#define SYSCFG_MSR_SetDirtyEnS (1 << 9)
#define SYSCFG_MSR_SetDirtyEnE (1 << 8)
#define SYSCFG_MSR_SysVicLimitMask ((1 << 8) - (1 << 5))
#define SYSCFG_MSR_SysAckLimitMask ((1 << 5) - (1 << 0))
#define IORRBase_MSR(reg) (0xC0010016 + 2 * (reg))
#define IORRMask_MSR(reg) (0xC0010016 + 2 * (reg) + 1)
#if defined(__ASSEMBLER__)
#define TOP_MEM 0xC001001A
#define TOP_MEM2 0xC001001D
#else
#define TOP_MEM 0xC001001Aul
#define TOP_MEM2 0xC001001Dul
#endif
#define TOP_MEM_MASK 0x007fffff
#define TOP_MEM_MASK_KB (TOP_MEM_MASK >> 10)
#if !defined(__PRE_RAM__) && !defined(__ASSEMBLER__)
#include <cpu/x86/msr.h>
void amd_setup_mtrrs(void);
static inline __attribute__((always_inline)) msr_t rdmsr_amd(unsigned index)
{
msr_t result;
__asm__ __volatile__ (
"rdmsr"
: "=a" (result.lo), "=d" (result.hi)
: "c"(index), "D"(0x9c5a203a)
);
return result;
}
static inline __attribute__((always_inline)) void wrmsr_amd(unsigned index, msr_t msr)
{
__asm__ __volatile__ (
"wrmsr"
: /* No outputs */
: "c" (index), "a" (msr.lo), "d" (msr.hi), "D" (0x9c5a203a)
);
}
/* To distribute topmem MSRs to APs. */
void setup_bsp_ramtop(void);
uint64_t bsp_topmem(void);
uint64_t bsp_topmem2(void);
#endif
#endif /* CPU_AMD_MTRR_H */
|