diff options
Diffstat (limited to 'arch/openrisc')
-rw-r--r-- | arch/openrisc/include/asm/atomic.h | 42 | ||||
-rw-r--r-- | arch/openrisc/include/asm/cmpxchg.h | 4 |
2 files changed, 28 insertions, 18 deletions
diff --git a/arch/openrisc/include/asm/atomic.h b/arch/openrisc/include/asm/atomic.h index b589fac39b92..326167e4783a 100644 --- a/arch/openrisc/include/asm/atomic.h +++ b/arch/openrisc/include/asm/atomic.h @@ -13,7 +13,7 @@ /* Atomically perform op with v->counter and i */ #define ATOMIC_OP(op) \ -static inline void atomic_##op(int i, atomic_t *v) \ +static inline void arch_atomic_##op(int i, atomic_t *v) \ { \ int tmp; \ \ @@ -30,7 +30,7 @@ static inline void atomic_##op(int i, atomic_t *v) \ /* Atomically perform op with v->counter and i, return the result */ #define ATOMIC_OP_RETURN(op) \ -static inline int atomic_##op##_return(int i, atomic_t *v) \ +static inline int arch_atomic_##op##_return(int i, atomic_t *v) \ { \ int tmp; \ \ @@ -49,7 +49,7 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ /* Atomically perform op with v->counter and i, return orig v->counter */ #define ATOMIC_FETCH_OP(op) \ -static inline int atomic_fetch_##op(int i, atomic_t *v) \ +static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \ { \ int tmp, old; \ \ @@ -75,6 +75,8 @@ ATOMIC_FETCH_OP(and) ATOMIC_FETCH_OP(or) ATOMIC_FETCH_OP(xor) +ATOMIC_OP(add) +ATOMIC_OP(sub) ATOMIC_OP(and) ATOMIC_OP(or) ATOMIC_OP(xor) @@ -83,16 +85,18 @@ ATOMIC_OP(xor) #undef ATOMIC_OP_RETURN #undef ATOMIC_OP -#define atomic_add_return atomic_add_return -#define atomic_sub_return atomic_sub_return -#define atomic_fetch_add atomic_fetch_add -#define atomic_fetch_sub atomic_fetch_sub -#define atomic_fetch_and atomic_fetch_and -#define atomic_fetch_or atomic_fetch_or -#define atomic_fetch_xor atomic_fetch_xor -#define atomic_and atomic_and -#define atomic_or atomic_or -#define atomic_xor atomic_xor +#define arch_atomic_add_return arch_atomic_add_return +#define arch_atomic_sub_return arch_atomic_sub_return +#define arch_atomic_fetch_add arch_atomic_fetch_add +#define arch_atomic_fetch_sub arch_atomic_fetch_sub +#define arch_atomic_fetch_and arch_atomic_fetch_and +#define arch_atomic_fetch_or arch_atomic_fetch_or +#define arch_atomic_fetch_xor arch_atomic_fetch_xor +#define arch_atomic_add arch_atomic_add +#define arch_atomic_sub arch_atomic_sub +#define arch_atomic_and arch_atomic_and +#define arch_atomic_or arch_atomic_or +#define arch_atomic_xor arch_atomic_xor /* * Atomically add a to v->counter as long as v is not already u. @@ -100,7 +104,7 @@ ATOMIC_OP(xor) * * This is often used through atomic_inc_not_zero() */ -static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) +static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) { int old, tmp; @@ -119,8 +123,14 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) return old; } -#define atomic_fetch_add_unless atomic_fetch_add_unless +#define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless -#include <asm-generic/atomic.h> +#define arch_atomic_read(v) READ_ONCE((v)->counter) +#define arch_atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) + +#include <asm/cmpxchg.h> + +#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v))) +#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new))) #endif /* __ASM_OPENRISC_ATOMIC_H */ diff --git a/arch/openrisc/include/asm/cmpxchg.h b/arch/openrisc/include/asm/cmpxchg.h index f9cd43a39d72..79fd16162ccb 100644 --- a/arch/openrisc/include/asm/cmpxchg.h +++ b/arch/openrisc/include/asm/cmpxchg.h @@ -132,7 +132,7 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, } } -#define cmpxchg(ptr, o, n) \ +#define arch_cmpxchg(ptr, o, n) \ ({ \ (__typeof__(*(ptr))) __cmpxchg((ptr), \ (unsigned long)(o), \ @@ -161,7 +161,7 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long with, } } -#define xchg(ptr, with) \ +#define arch_xchg(ptr, with) \ ({ \ (__typeof__(*(ptr))) __xchg((ptr), \ (unsigned long)(with), \ |