summaryrefslogtreecommitdiff
path: root/src/include/cpu/x86/lapic.h
blob: 717fc9aa3847555503abab8f9a2898522619510b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#ifndef CPU_X86_LAPIC_H
#define CPU_X86_LAPIC_H

#include <arch/mmio.h>
#include <arch/cpu.h>
#include <cpu/x86/lapic_def.h>
#include <cpu/x86/msr.h>
#include <halt.h>
#include <stdint.h>

static inline bool is_x2apic_mode(void)
{
	msr_t msr;
	msr = rdmsr(LAPIC_BASE_MSR);
	return ((msr.lo & LAPIC_BASE_X2APIC_ENABLED) == LAPIC_BASE_X2APIC_ENABLED);
}

static inline void x2apic_send_ipi(uint32_t icrlow, uint32_t apicid)
{
	msr_t icr;
	icr.hi = apicid;
	icr.lo = icrlow;
	wrmsr(X2APIC_MSR_ICR_ADDRESS, icr);
}

static __always_inline uint32_t lapic_read(unsigned int reg)
{
	uint32_t value, index;
	msr_t msr;

	if (is_x2apic_mode()) {
		index = X2APIC_MSR_BASE_ADDRESS + (uint32_t)(reg >> 4);
		msr = rdmsr(index);
		value = msr.lo;
	} else {
		value = read32((volatile void *)(uintptr_t)(LAPIC_DEFAULT_BASE + reg));
	}
	return value;
}

static __always_inline void lapic_write(unsigned int reg, uint32_t v)
{
	msr_t msr;
	uint32_t index;
	if (is_x2apic_mode()) {
		index = X2APIC_MSR_BASE_ADDRESS + (uint32_t)(reg >> 4);
		msr.hi = 0x0;
		msr.lo = v;
		wrmsr(index, msr);
	} else {
		write32((volatile void *)(uintptr_t)(LAPIC_DEFAULT_BASE + reg), v);
	}
}

static __always_inline void lapic_wait_icr_idle(void)
{
	do { } while (lapic_read(LAPIC_ICR) & LAPIC_ICR_BUSY);
}

static inline void enable_lapic(void)
{
	msr_t msr;
	msr = rdmsr(LAPIC_BASE_MSR);
	msr.hi &= 0xffffff00;
	msr.lo &= ~LAPIC_BASE_MSR_ADDR_MASK;
	msr.lo |= LAPIC_DEFAULT_BASE;
	msr.lo |= LAPIC_BASE_MSR_ENABLE;
	wrmsr(LAPIC_BASE_MSR, msr);
}

static inline void disable_lapic(void)
{
	msr_t msr;
	msr = rdmsr(LAPIC_BASE_MSR);
	msr.lo &= ~LAPIC_BASE_MSR_ENABLE;
	wrmsr(LAPIC_BASE_MSR, msr);
}

static __always_inline unsigned int initial_lapicid(void)
{
	uint32_t lapicid;
	if (is_x2apic_mode() && cpuid_get_max_func() >= 0xb)
		lapicid = cpuid_ext(0xb, 0).edx;
	else
		lapicid = cpuid_ebx(1) >> 24;
	return lapicid;
}

static __always_inline unsigned int lapicid(void)
{
	uint32_t lapicid = lapic_read(LAPIC_ID);

	/* check x2apic mode and return accordingly */
	if (!is_x2apic_mode())
		lapicid >>= 24;
	return lapicid;
}

#if !CONFIG(AP_IN_SIPI_WAIT)
/* If we need to go back to sipi wait, we use the long non-inlined version of
 * this function in lapic_cpu_init.c
 */
static __always_inline void stop_this_cpu(void)
{
	/* Called by an AP when it is ready to halt and wait for a new task */
	halt();
}
#else
void stop_this_cpu(void);
#endif

static inline void lapic_write_atomic(unsigned long reg, uint32_t v)
{
	volatile uint32_t *ptr;

	ptr = (volatile uint32_t *)(LAPIC_DEFAULT_BASE + reg);

	asm volatile ("xchgl %0, %1\n"
		      : "+r" (v), "+m" (*(ptr))
		      : : "memory", "cc");
}

# define lapic_read_around(x) lapic_read(x)
# define lapic_write_around(x, y) lapic_write_atomic((x), (y))

void lapic_virtual_wire_mode_init(void);

/* See if I need to initialize the local APIC */
static inline int need_lapic_init(void)
{
	return CONFIG(SMP) || CONFIG(IOAPIC);
}

static inline void setup_lapic(void)
{
	if (need_lapic_init())
		lapic_virtual_wire_mode_init();
	else
		disable_lapic();
}

struct device;
int start_cpu(struct device *cpu);

#endif /* CPU_X86_LAPIC_H */