aboutsummaryrefslogtreecommitdiff
path: root/sys/linux/arm64/arch/atomic.h
blob: 2fa4b04758d9eea8b45b853744261dbe0f85e272 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
static inline int
atomic·ll(volatile int *p)
{
	int v;
	__asm__ __volatile__ ("ldaxr %w0,%1" : "=r"(v) : "Q"(*p));
	return v;
}

static inline int
atomic·sc(volatile int *p, int v)
{
	int r;
	__asm__ __volatile__ ("stlxr %w0,%w2,%1" : "=&r"(r), "=Q"(*p) : "r"(v) : "memory");
	return !r;
}

static inline void
atomic·barrier()
{
	__asm__ __volatile__ ("dmb ish" : : : "memory");
}

static inline int
atomic·cas(volatile int *p, int t, int s)
{
	int old;
	do{
		old = atomic·ll(p);
		if(old != t){
			atomic·barrier();
			break;
		}
	}while(!atomic·sc(p, s));
	return old;
}

static inline void
*atomic·llp(volatile void *p)
{
	void *v;
	__asm__ __volatile__ ("ldaxr %0, %1" : "=r"(v) : "Q"(*(void *volatile *)p));
	return v;
}

static inline int
atomic·scp(volatile int *p, void *v)
{
	int r;
	__asm__ __volatile__ ("stlxr %w0,%2,%1" : "=&r"(r), "=Q"(*(void *volatile *)p) : "r"(v) : "memory");
	return !r;
}

static inline void
*atomic·casp(volatile void *p, void *t, void *s)
{
	void *old;
	do{
		old = atomic·llp(p);
		if(old != t){
			atomic·barrier();
			break;
		}
	}while(!atomic·scp(p, s));
	return old;
}

static inline int
atomic·ctz64(uint64 x)
{
	__asm__(
		"	rbit %0, %1\n"
		"	clz %0, %0\n"
		: "=r"(x) : "r"(x));
	return x;
}

static inline int
atomic·clz64(uint64 x)
{
	__asm__("clz %0, %1" : "=r"(x) : "r"(x));
	return x;
}