diff options
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/barrier.h | 10 | ||||
-rw-r--r-- | include/asm-generic/qspinlock.h | 4 |
2 files changed, 14 insertions, 0 deletions
diff --git a/include/asm-generic/barrier.h b/include/asm-generic/barrier.h index fec97dc34de7..798027bb89be 100644 --- a/include/asm-generic/barrier.h +++ b/include/asm-generic/barrier.h @@ -240,5 +240,15 @@ do { \ }) #endif +/* + * pmem_wmb() ensures that all stores for which the modification + * are written to persistent storage by preceding instructions have + * updated persistent storage before any data access or data transfer + * caused by subsequent instructions is initiated. + */ +#ifndef pmem_wmb +#define pmem_wmb() wmb() +#endif + #endif /* !__ASSEMBLY__ */ #endif /* __ASM_GENERIC_BARRIER_H */ diff --git a/include/asm-generic/qspinlock.h b/include/asm-generic/qspinlock.h index 2b26cd729b94..4fe7fd0fe834 100644 --- a/include/asm-generic/qspinlock.h +++ b/include/asm-generic/qspinlock.h @@ -13,6 +13,7 @@ #include <asm-generic/qspinlock_types.h> #include <linux/atomic.h> +#ifndef queued_spin_is_locked /** * queued_spin_is_locked - is the spinlock locked? * @lock: Pointer to queued spinlock structure @@ -26,6 +27,7 @@ static __always_inline int queued_spin_is_locked(struct qspinlock *lock) */ return atomic_read(&lock->val); } +#endif /** * queued_spin_value_unlocked - is the spinlock structure unlocked? @@ -68,6 +70,7 @@ static __always_inline int queued_spin_trylock(struct qspinlock *lock) extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); +#ifndef queued_spin_lock /** * queued_spin_lock - acquire a queued spinlock * @lock: Pointer to queued spinlock structure @@ -81,6 +84,7 @@ static __always_inline void queued_spin_lock(struct qspinlock *lock) queued_spin_lock_slowpath(lock, val); } +#endif #ifndef queued_spin_unlock /** |