summaryrefslogtreecommitdiff
path: root/src/arch
diff options
context:
space:
mode:
authorKyösti Mälkki <kyosti.malkki@gmail.com>2019-08-19 16:14:15 +0300
committerKyösti Mälkki <kyosti.malkki@gmail.com>2019-11-22 06:25:53 +0000
commitf8dc4bc0224f18a33fcf19e3d754ac96a383a863 (patch)
tree058f1c05945113a64b8b22c88fde815efdaa6212 /src/arch
parent9bb16cd9c5b0fdf198f2b78c193d1a02f4f51338 (diff)
arch/x86: Remove spinlocks inside CAR
This was only used with amdfam10h-15h, where cache coherency between nodes was supposed to be guaranteed with this code. We could want a cleaner and more generic approach for this, possibly utilising .data sections. Change-Id: I00da5c2b0570c26f2e3bb464274485cc2c08c8f0 Signed-off-by: Kyösti Mälkki <kyosti.malkki@gmail.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/34929 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Arthur Heymans <arthur@aheymans.xyz> Reviewed-by: Aaron Durbin <adurbin@chromium.org>
Diffstat (limited to 'src/arch')
-rw-r--r--src/arch/x86/include/arch/smp/spinlock.h26
1 files changed, 6 insertions, 20 deletions
diff --git a/src/arch/x86/include/arch/smp/spinlock.h b/src/arch/x86/include/arch/smp/spinlock.h
index f9186787af..8bdb125223 100644
--- a/src/arch/x86/include/arch/smp/spinlock.h
+++ b/src/arch/x86/include/arch/smp/spinlock.h
@@ -14,11 +14,6 @@
#ifndef ARCH_SMP_SPINLOCK_H
#define ARCH_SMP_SPINLOCK_H
-#if !defined(__PRE_RAM__) \
- || CONFIG(HAVE_ROMSTAGE_CONSOLE_SPINLOCK) \
- || CONFIG(HAVE_ROMSTAGE_NVRAM_CBFS_SPINLOCK) \
- || CONFIG(HAVE_ROMSTAGE_MICROCODE_CBFS_SPINLOCK)
-
/*
* Your basic SMP spinlocks, allowing only a single CPU anywhere
*/
@@ -27,23 +22,14 @@ typedef struct {
volatile unsigned int lock;
} spinlock_t;
-#ifdef __PRE_RAM__
-spinlock_t *romstage_console_lock(void);
-void initialize_romstage_console_lock(void);
-spinlock_t *romstage_nvram_cbfs_lock(void);
-void initialize_romstage_nvram_cbfs_lock(void);
-spinlock_t *romstage_microcode_cbfs_lock(void);
-void initialize_romstage_microcode_cbfs_lock(void);
-#endif
-
#define SPIN_LOCK_UNLOCKED { 1 }
-#ifndef __PRE_RAM__
+#define STAGE_HAS_SPINLOCKS !ENV_ROMSTAGE_OR_BEFORE
+
+#if STAGE_HAS_SPINLOCKS
+
#define DECLARE_SPIN_LOCK(x) \
static spinlock_t x = SPIN_LOCK_UNLOCKED;
-#else
-#define DECLARE_SPIN_LOCK(x)
-#endif
/*
* Simple spin lock operations. There are two variants, one clears IRQ's
@@ -93,7 +79,7 @@ static __always_inline void cpu_relax(void)
__asm__ __volatile__("rep;nop" : : : "memory");
}
-#else /* !__PRE_RAM__ */
+#else
#define DECLARE_SPIN_LOCK(x)
#define barrier() do {} while (0)
@@ -103,6 +89,6 @@ static __always_inline void cpu_relax(void)
#define spin_unlock(lock) do {} while (0)
#define cpu_relax() do {} while (0)
-#endif /* !__PRE_RAM__ */
+#endif
#endif /* ARCH_SMP_SPINLOCK_H */