aboutsummaryrefslogtreecommitdiff
path: root/src/include/cpu/x86/msr.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/include/cpu/x86/msr.h')
-rw-r--r--src/include/cpu/x86/msr.h16
1 files changed, 8 insertions, 8 deletions
diff --git a/src/include/cpu/x86/msr.h b/src/include/cpu/x86/msr.h
index 74c2521af5..290c54a499 100644
--- a/src/include/cpu/x86/msr.h
+++ b/src/include/cpu/x86/msr.h
@@ -1,6 +1,8 @@
#ifndef CPU_X86_MSR_H
#define CPU_X86_MSR_H
+#include <compiler.h>
+
/* Intel SDM: Table 2-1
* IA-32 architectural MSR: Extended Feature Enable Register
*/
@@ -50,19 +52,18 @@ msr_t soc_msr_read(unsigned int index);
void soc_msr_write(unsigned int index, msr_t msr);
/* Handle MSR references in the other source code */
-static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index)
+static __always_inline msr_t rdmsr(unsigned int index)
{
return soc_msr_read(index);
}
-static inline __attribute__((always_inline)) void wrmsr(unsigned int index,
- msr_t msr)
+static __always_inline void wrmsr(unsigned int index, msr_t msr)
{
soc_msr_write(index, msr);
}
#else /* CONFIG_SOC_SETS_MSRS */
-/* The following functions require the always_inline due to AMD
+/* The following functions require the __always_inline due to AMD
* function STOP_CAR_AND_CPU that disables cache as
* RAM, the cache as RAM stack can no longer be used. Called
* functions must be inlined to avoid stack usage. Also, the
@@ -70,9 +71,9 @@ static inline __attribute__((always_inline)) void wrmsr(unsigned int index,
* allocated them from the stack. With gcc 4.5.0, some functions
* declared as inline are not being inlined. This patch forces
* these functions to always be inlined by adding the qualifier
- * __attribute__((always_inline)) to their declaration.
+ * __always_inline to their declaration.
*/
-static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index)
+static __always_inline msr_t rdmsr(unsigned int index)
{
msr_t result;
__asm__ __volatile__ (
@@ -83,8 +84,7 @@ static inline __attribute__((always_inline)) msr_t rdmsr(unsigned int index)
return result;
}
-static inline __attribute__((always_inline)) void wrmsr(unsigned int index,
- msr_t msr)
+static __always_inline void wrmsr(unsigned int index, msr_t msr)
{
__asm__ __volatile__ (
"wrmsr"