From 75a62e76486f63f6dadb5492c205570ace81e9d5 Mon Sep 17 00:00:00 2001 From: Aaron Durbin Date: Thu, 13 Sep 2018 02:10:45 -0600 Subject: complier.h: add __always_inline and use it in code base Add a __always_inline macro that wraps __attribute__((always_inline)) and replace current users with the macro, excluding files under src/vendorcode. Change-Id: Ic57e474c1d2ca7cc0405ac677869f78a28d3e529 Signed-off-by: Aaron Durbin Reviewed-on: https://review.coreboot.org/28587 Tested-by: build bot (Jenkins) Reviewed-by: Julius Werner --- src/arch/arm/include/arch/hlt.h | 4 +++- src/arch/arm/include/smp/spinlock.h | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) (limited to 'src/arch/arm/include') diff --git a/src/arch/arm/include/arch/hlt.h b/src/arch/arm/include/arch/hlt.h index a6a67576fd..fd2aac9c9f 100644 --- a/src/arch/arm/include/arch/hlt.h +++ b/src/arch/arm/include/arch/hlt.h @@ -14,7 +14,9 @@ #ifndef ARCH_HLT_H #define ARCH_HLT_H -static inline __attribute__((always_inline)) void hlt(void) +#include + +static __always_inline void hlt(void) { for (;;) ; } diff --git a/src/arch/arm/include/smp/spinlock.h b/src/arch/arm/include/smp/spinlock.h index f98900a66b..189bf2c507 100644 --- a/src/arch/arm/include/smp/spinlock.h +++ b/src/arch/arm/include/smp/spinlock.h @@ -33,7 +33,7 @@ typedef struct { #define spin_is_locked(x) (*(volatile char *)(&(x)->lock) != 0) #define spin_unlock_wait(x) do { barrier(); } while (spin_is_locked(x)) -static inline __attribute__((always_inline)) void spin_lock(spinlock_t *lock) +static __always_inline void spin_lock(spinlock_t *lock) { unsigned long tmp; __asm__ __volatile__ ( @@ -49,7 +49,7 @@ static inline __attribute__((always_inline)) void spin_lock(spinlock_t *lock) barrier(); } -static inline __attribute__((always_inline)) void spin_unlock(spinlock_t *lock) +static __always_inline void spin_unlock(spinlock_t *lock) { __asm__ __volatile__( " str %1, [%0]\n" -- cgit v1.2.3