summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/include/cpu/x86/cache.h14
-rw-r--r--src/include/cpu/x86/msr.h14
2 files changed, 24 insertions, 4 deletions
diff --git a/src/include/cpu/x86/cache.h b/src/include/cpu/x86/cache.h
index 65979fff77..a473d66c8f 100644
--- a/src/include/cpu/x86/cache.h
+++ b/src/include/cpu/x86/cache.h
@@ -74,7 +74,17 @@ static inline void invd(void)
asm volatile("invd" ::: "memory");
}
-static inline void enable_cache(void)
+/* The following functions require the always_inline due to AMD
+ * function STOP_CAR_AND_CPU that disables cache as
+ * ram, the cache as ram stack can no longer be used. Called
+ * functions must be inlined to avoid stack usage. Also, the
+ * compiler must keep local variables register based and not
+ * allocated them from the stack. With gcc 4.5.0, some functions
+ * declared as inline are not being inlined. This patch forces
+ * these functions to always be inlined by adding the qualifier
+ * __attribute__((always_inline)) to their declaration.
+ */
+static inline __attribute__((always_inline)) void enable_cache(void)
{
unsigned long cr0;
cr0 = read_cr0();
@@ -82,7 +92,7 @@ static inline void enable_cache(void)
write_cr0(cr0);
}
-static inline void disable_cache(void)
+static inline __attribute__((always_inline)) void disable_cache(void)
{
/* Disable and write back the cache */
unsigned long cr0;
diff --git a/src/include/cpu/x86/msr.h b/src/include/cpu/x86/msr.h
index a201ef42f2..e8bc195307 100644
--- a/src/include/cpu/x86/msr.h
+++ b/src/include/cpu/x86/msr.h
@@ -29,7 +29,17 @@ typedef struct msrinit_struct
msr_t msr;
} msrinit_t;
-static inline msr_t rdmsr(unsigned index)
+/* The following functions require the always_inline due to AMD
+ * function STOP_CAR_AND_CPU that disables cache as
+ * ram, the cache as ram stack can no longer be used. Called
+ * functions must be inlined to avoid stack usage. Also, the
+ * compiler must keep local variables register based and not
+ * allocated them from the stack. With gcc 4.5.0, some functions
+ * declared as inline are not being inlined. This patch forces
+ * these functions to always be inlined by adding the qualifier
+ * __attribute__((always_inline)) to their declaration.
+ */
+static inline __attribute__((always_inline)) msr_t rdmsr(unsigned index)
{
msr_t result;
__asm__ __volatile__ (
@@ -40,7 +50,7 @@ static inline msr_t rdmsr(unsigned index)
return result;
}
-static inline void wrmsr(unsigned index, msr_t msr)
+static inline __attribute__((always_inline)) void wrmsr(unsigned index, msr_t msr)
{
__asm__ __volatile__ (
"wrmsr"