summaryrefslogtreecommitdiffstats
path: root/src/arch/arm64/include/armv8/arch/barrier.h
blob: 99a417bcde36be6b0e0535bb2da5ed07ca172fc9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
/* SPDX-License-Identifier: GPL-2.0-only */
/*
 * Based on arch/arm/include/asm/barrier.h
 */
#ifndef __ASM_ARM_BARRIER_H
#define __ASM_ARM_BARRIER_H

#ifndef __ASSEMBLER__

#define sevl()		asm volatile("sevl" : : : "memory")
#define sev()		asm volatile("sev" : : : "memory")
#define wfe()		asm volatile("wfe" : : : "memory")
#define wfi()		asm volatile("wfi" : : : "memory")

#define isb()		asm volatile("isb" : : : "memory")
#define dsb()		asm volatile("dsb sy" : : : "memory")
#define dmb()		asm volatile("dmb sy" : : : "memory")

#define mb()		dsb()
#define rmb()		asm volatile("dsb ld" : : : "memory")
#define wmb()		asm volatile("dsb st" : : : "memory")

#if CONFIG(SMP)
#define barrier() __asm__ __volatile__("": : :"memory")
#endif

#define nop()		asm volatile("nop");

#define force_read(x) (*(volatile typeof(x) *)&(x))

#define load_acquire(p) \
({									\
	typeof(*p) ___p1;						\
	switch (sizeof(*p)) {						\
	case 4:								\
		asm volatile ("ldar %w0, %1"				\
			: "=r" (___p1) : "Q" (*p) : "memory");		\
		break;							\
	case 8:								\
		asm volatile ("ldar %0, %1"				\
			: "=r" (___p1) : "Q" (*p) : "memory");		\
		break;							\
	}								\
	___p1;								\
})

#define store_release(p, v)						\
do {									\
	switch (sizeof(*p)) {						\
	case 4:								\
		asm volatile ("stlr %w1, %0"				\
				: "=Q" (*p) : "r" (v) : "memory");	\
		break;							\
	case 8:								\
		asm volatile ("stlr %1, %0"				\
				: "=Q" (*p) : "r" (v) : "memory");	\
		break;							\
	}								\
} while (0)

#define load_acquire_exclusive(p) \
({									\
	typeof(*p) ___p1;						\
	switch (sizeof(*p)) {						\
	case 4:								\
		asm volatile ("ldaxr %w0, %1"				\
			: "=r" (___p1) : "Q" (*p) : "memory");		\
		break;							\
	case 8:								\
		asm volatile ("ldaxr %0, %1"				\
			: "=r" (___p1) : "Q" (*p) : "memory");		\
		break;							\
	}								\
	___p1;								\
})

/* Returns 1 on success. */
#define store_release_exclusive(p, v)					\
({									\
	int ret;							\
	switch (sizeof(*p)) {						\
	case 4:								\
		asm volatile ("stlxr %w0, %w2, %1"			\
				: "=&r" (ret), "=Q" (*p) : "r" (v)	\
				: "memory");				\
		break;							\
	case 8:								\
		asm volatile ("stlxr %w0, %2, %1"			\
				: "=&r" (ret), "=Q" (*p) : "r" (v)	\
				: "memory");				\
		break;							\
	}								\
	!ret;								\
})

#endif	/* __ASSEMBLER__ */

#endif	/* __ASM_ARM_BARRIER_H */