From 068550631fbe0b7fb41625cea6fb204fdc8cb224 Mon Sep 17 00:00:00 2001 From: Andrzej Hajda Date: Wed, 18 Jan 2023 16:44:44 +0100 Subject: locking/arch: Rename all internal __xchg() names to __arch_xchg() Decrease the probability of this internal facility to be used by driver code. Signed-off-by: Andrzej Hajda Signed-off-by: Peter Zijlstra (Intel) Signed-off-by: Ingo Molnar Reviewed-by: Arnd Bergmann Reviewed-by: Andi Shyti Acked-by: Geert Uytterhoeven [m68k] Acked-by: Palmer Dabbelt [riscv] Link: https://lore.kernel.org/r/20230118154450.73842-1-andrzej.hajda@intel.com Cc: Linus Torvalds --- arch/hexagon/include/asm/cmpxchg.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'arch/hexagon') diff --git a/arch/hexagon/include/asm/cmpxchg.h b/arch/hexagon/include/asm/cmpxchg.h index cdb705e1496a..bf6cf5579cf4 100644 --- a/arch/hexagon/include/asm/cmpxchg.h +++ b/arch/hexagon/include/asm/cmpxchg.h @@ -9,7 +9,7 @@ #define _ASM_CMPXCHG_H /* - * __xchg - atomically exchange a register and a memory location + * __arch_xchg - atomically exchange a register and a memory location * @x: value to swap * @ptr: pointer to memory * @size: size of the value @@ -19,8 +19,8 @@ * Note: there was an errata for V2 about .new's and memw_locked. * */ -static inline unsigned long __xchg(unsigned long x, volatile void *ptr, - int size) +static inline unsigned long +__arch_xchg(unsigned long x, volatile void *ptr, int size) { unsigned long retval; @@ -42,8 +42,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, * Atomically swap the contents of a register with memory. Should be atomic * between multiple CPU's and within interrupts on the same CPU. */ -#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \ - sizeof(*(ptr)))) +#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \ + sizeof(*(ptr)))) /* * see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps. -- cgit v1.2.3