summaryrefslogtreecommitdiffstats
path: root/src/arch/riscv/include/arch/smp/atomic.h
blob: ba20efbc64b870496bd13b0d023460c1a9d8cb4d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
/* SPDX-License-Identifier: BSD-4-Clause-UC */

#ifndef _RISCV_ATOMIC_H
#define _RISCV_ATOMIC_H

#include <arch/encoding.h>

typedef struct { volatile int counter; } atomic_t;

#define disable_irqsave() clear_csr(mstatus, MSTATUS_MIE)
#define enable_irqrestore(flags) set_csr(mstatus, (flags) & MSTATUS_MIE)

#define atomic_set(v, val) ((v)->counter = (val))
#define atomic_read(v)     ((v)->counter)

#ifdef __riscv_atomic
# define atomic_add(v, inc)       __sync_fetch_and_add(&((v)->counter), inc)
# define atomic_swap(v, swp)      __sync_lock_test_and_set(&((v)->counter), swp)
# define atomic_cas(v, cmp, swp)  __sync_val_compare_and_swap(&((v)->counter), \
					cmp, swp)
# define atomic_inc(v)            atomic_add(v, 1)
# define atomic_dec(v)            atomic_add(v, -1)
#else
static inline int atomic_add(atomic_t *v, int inc)
{
	long flags = disable_irqsave();
	int res = v->counter;
	v->counter += inc;
	enable_irqrestore(flags);
	return res;
}

static inline int atomic_swap(atomic_t *v, int swp)
{
	long flags = disable_irqsave();
	int res = v->counter;
	v->counter = swp;
	enable_irqrestore(flags);
	return res;
}

static inline int atomic_cas(atomic_t *v, int cmp, int swp)
{
	long flags = disable_irqsave();
	int res = v->counter;
	v->counter = (res == cmp ? swp : res);
	enable_irqrestore(flags);
	return res;
}

static inline int atomic_inc(atomic_t *v)
{
	return atomic_add(v, 1);
}

static inline int atomic_dec(atomic_t *v)
{
	return atomic_add(v, -1);
}
#endif //__riscv_atomic

#endif //_RISCV_ATOMIC_H