1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_CMPXCHG_64_H
#define _ASM_X86_CMPXCHG_64_H
#define arch_cmpxchg64(ptr, o, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
arch_cmpxchg((ptr), (o), (n)); \
})
#define arch_cmpxchg64_local(ptr, o, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
arch_cmpxchg_local((ptr), (o), (n)); \
})
#define arch_try_cmpxchg64(ptr, po, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
arch_try_cmpxchg((ptr), (po), (n)); \
})
#define arch_try_cmpxchg64_local(ptr, po, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
arch_try_cmpxchg_local((ptr), (po), (n)); \
})
union __u128_halves {
u128 full;
struct {
u64 low, high;
};
};
#define __arch_cmpxchg128(_ptr, _old, _new, _lock) \
({ \
union __u128_halves o = { .full = (_old), }, \
n = { .full = (_new), }; \
\
asm volatile(_lock "cmpxchg16b %[ptr]" \
: [ptr] "+m" (*(_ptr)), \
"+a" (o.low), "+d" (o.high) \
: "b" (n.low), "c" (n.high) \
: "memory"); \
\
o.full; \
})
static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
{
return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
}
#define arch_cmpxchg128 arch_cmpxchg128
static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
{
return __arch_cmpxchg128(ptr, old, new,);
}
#define arch_cmpxchg128_local arch_cmpxchg128_local
#define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock) \
({ \
union __u128_halves o = { .full = *(_oldp), }, \
n = { .full = (_new), }; \
bool ret; \
\
asm volatile(_lock "cmpxchg16b %[ptr]" \
CC_SET(e) \
: CC_OUT(e) (ret), \
[ptr] "+m" (*(_ptr)), \
"+a" (o.low), "+d" (o.high) \
: "b" (n.low), "c" (n.high) \
: "memory"); \
\
if (unlikely(!ret)) \
*(_oldp) = o.full; \
\
likely(ret); \
})
static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
{
return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
}
#define arch_try_cmpxchg128 arch_try_cmpxchg128
static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
{
return __arch_try_cmpxchg128(ptr, oldp, new,);
}
#define arch_try_cmpxchg128_local arch_try_cmpxchg128_local
#define system_has_cmpxchg128() boot_cpu_has(X86_FEATURE_CX16)
#endif /* _ASM_X86_CMPXCHG_64_H */
|