diff options
author | Max Filippov <jcmvbkbc@gmail.com> | 2018-12-20 17:18:12 -0800 |
---|---|---|
committer | Max Filippov <jcmvbkbc@gmail.com> | 2019-05-07 10:36:31 -0700 |
commit | f7c34874f04a80d6c39a32f08da2529e59602d3c (patch) | |
tree | a225ab153205701d24bee7423fc950a9009cb533 /arch/xtensa/include/asm/bitops.h | |
parent | d065fcf12c21348cbc125460f75332f467518fb1 (diff) | |
download | linux-f7c34874f04a80d6c39a32f08da2529e59602d3c.tar.gz linux-f7c34874f04a80d6c39a32f08da2529e59602d3c.tar.bz2 linux-f7c34874f04a80d6c39a32f08da2529e59602d3c.zip |
xtensa: add exclusive atomics support
Implement atomic primitives using exclusive access opcodes available in
the recent xtensa cores.
Since l32ex/s32ex don't have any memory ordering guarantees don't define
__smp_mb__before_atomic/__smp_mb__after_atomic to make them use memw.
Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Diffstat (limited to 'arch/xtensa/include/asm/bitops.h')
-rw-r--r-- | arch/xtensa/include/asm/bitops.h | 121 |
1 files changed, 120 insertions, 1 deletions
diff --git a/arch/xtensa/include/asm/bitops.h b/arch/xtensa/include/asm/bitops.h index 345d28aead65..aeb15f4c755b 100644 --- a/arch/xtensa/include/asm/bitops.h +++ b/arch/xtensa/include/asm/bitops.h @@ -96,7 +96,126 @@ static inline unsigned long __fls(unsigned long word) #include <asm-generic/bitops/fls64.h> -#if XCHAL_HAVE_S32C1I +#if XCHAL_HAVE_EXCLUSIVE + +static inline void set_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %0, %2\n" + " or %0, %0, %1\n" + " s32ex %0, %2\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp) + : "a" (mask), "a" (p) + : "memory"); +} + +static inline void clear_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %0, %2\n" + " and %0, %0, %1\n" + " s32ex %0, %2\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp) + : "a" (~mask), "a" (p) + : "memory"); +} + +static inline void change_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %0, %2\n" + " xor %0, %0, %1\n" + " s32ex %0, %2\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp) + : "a" (~mask), "a" (p) + : "memory"); +} + +static inline int +test_and_set_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp, value; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %1, %3\n" + " or %0, %1, %2\n" + " s32ex %0, %3\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp), "=&a" (value) + : "a" (mask), "a" (p) + : "memory"); + + return value & mask; +} + +static inline int +test_and_clear_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp, value; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %1, %3\n" + " and %0, %1, %2\n" + " s32ex %0, %3\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp), "=&a" (value) + : "a" (~mask), "a" (p) + : "memory"); + + return value & mask; +} + +static inline int +test_and_change_bit(unsigned int bit, volatile unsigned long *p) +{ + unsigned long tmp, value; + unsigned long mask = 1UL << (bit & 31); + + p += bit >> 5; + + __asm__ __volatile__( + "1: l32ex %1, %3\n" + " xor %0, %1, %2\n" + " s32ex %0, %3\n" + " getex %0\n" + " beqz %0, 1b\n" + : "=&a" (tmp), "=&a" (value) + : "a" (mask), "a" (p) + : "memory"); + + return value & mask; +} + +#elif XCHAL_HAVE_S32C1I static inline void set_bit(unsigned int bit, volatile unsigned long *p) { |