blob: 62341104a44c65efbd662c40aff9130484c65272 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
|
/* SPDX-License-Identifier: GPL-2.0-only */
#ifndef CPU_X86_CACHE
#define CPU_X86_CACHE
#include <cpu/x86/cr.h>
#define CR0_CacheDisable (CR0_CD)
#define CR0_NoWriteThrough (CR0_NW)
#define CPUID_FEATURE_CLFLUSH_BIT 19
#if !defined(__ASSEMBLER__)
static inline void wbinvd(void)
{
asm volatile ("wbinvd" ::: "memory");
}
static inline void invd(void)
{
asm volatile("invd" ::: "memory");
}
static inline void clflush(void *addr)
{
asm volatile ("clflush (%0)"::"r" (addr));
}
/* The following functions require the __always_inline due to AMD
* function STOP_CAR_AND_CPU that disables cache as
* RAM, the cache as RAM stack can no longer be used. Called
* functions must be inlined to avoid stack usage. Also, the
* compiler must keep local variables register based and not
* allocated them from the stack. With gcc 4.5.0, some functions
* declared as inline are not being inlined. This patch forces
* these functions to always be inlined by adding the qualifier
* __always_inline to their declaration.
*/
static __always_inline void enable_cache(void)
{
CRx_TYPE cr0;
cr0 = read_cr0();
cr0 &= ~(CR0_CD | CR0_NW);
write_cr0(cr0);
}
static __always_inline void disable_cache(void)
{
/* Disable and write back the cache */
CRx_TYPE cr0;
cr0 = read_cr0();
cr0 |= CR0_CD;
wbinvd();
write_cr0(cr0);
wbinvd();
}
void x86_enable_cache(void);
#endif /* !__ASSEMBLER__ */
#endif /* CPU_X86_CACHE */
|