summaryrefslogtreecommitdiffstats
path: root/arch/parisc
diff options
context:
space:
mode:
authorJohn David Anglin <dave.anglin@bell.net>2020-10-02 21:41:52 +0200
committerHelge Deller <deller@gmx.de>2020-10-15 08:10:39 +0200
commitf173e3a751708b7f5e0b77dcdd4ac3c2287b735e (patch)
tree6454ea28d3de7c9533e10ce6f57756b8e6e4eedf /arch/parisc
parentb47cf497469fae14fcbf63526150c8c71e636c20 (diff)
downloadlinux-stable-f173e3a751708b7f5e0b77dcdd4ac3c2287b735e.tar.gz
linux-stable-f173e3a751708b7f5e0b77dcdd4ac3c2287b735e.tar.bz2
linux-stable-f173e3a751708b7f5e0b77dcdd4ac3c2287b735e.zip
parisc: Improve spinlock handling
Use READ_ONCE() to check if spinlock is locked. The other changes are cleanups. Signed-off-by: John David Anglin <dave.anglin@bell.net> Signed-off-by: Helge Deller <deller@gmx.de>
Diffstat (limited to 'arch/parisc')
-rw-r--r--arch/parisc/include/asm/spinlock.h23
1 files changed, 13 insertions, 10 deletions
diff --git a/arch/parisc/include/asm/spinlock.h b/arch/parisc/include/asm/spinlock.h
index 51b6c47f802f..fa5ee8a45dbd 100644
--- a/arch/parisc/include/asm/spinlock.h
+++ b/arch/parisc/include/asm/spinlock.h
@@ -10,13 +10,21 @@
static inline int arch_spin_is_locked(arch_spinlock_t *x)
{
volatile unsigned int *a = __ldcw_align(x);
- return *a == 0;
+ return READ_ONCE(*a) == 0;
}
-#define arch_spin_lock(lock) arch_spin_lock_flags(lock, 0)
+static inline void arch_spin_lock(arch_spinlock_t *x)
+{
+ volatile unsigned int *a;
+
+ a = __ldcw_align(x);
+ while (__ldcw(a) == 0)
+ while (*a == 0)
+ continue;
+}
static inline void arch_spin_lock_flags(arch_spinlock_t *x,
- unsigned long flags)
+ unsigned long flags)
{
volatile unsigned int *a;
@@ -25,10 +33,8 @@ static inline void arch_spin_lock_flags(arch_spinlock_t *x,
while (*a == 0)
if (flags & PSW_SM_I) {
local_irq_enable();
- cpu_relax();
local_irq_disable();
- } else
- cpu_relax();
+ }
}
#define arch_spin_lock_flags arch_spin_lock_flags
@@ -44,12 +50,9 @@ static inline void arch_spin_unlock(arch_spinlock_t *x)
static inline int arch_spin_trylock(arch_spinlock_t *x)
{
volatile unsigned int *a;
- int ret;
a = __ldcw_align(x);
- ret = __ldcw(a) != 0;
-
- return ret;
+ return __ldcw(a) != 0;
}
/*