xref: /aosp_15_r20/external/musl/arch/sh/atomic_arch.h (revision c9945492fdd68bbe62686c5b452b4dc1be3f8453)
1 #include "libc.h"
2 
3 #if defined(__SH4A__)
4 
5 #define a_ll a_ll
a_ll(volatile int * p)6 static inline int a_ll(volatile int *p)
7 {
8 	int v;
9 	__asm__ __volatile__ ("movli.l @%1, %0" : "=z"(v) : "r"(p), "m"(*p));
10 	return v;
11 }
12 
13 #define a_sc a_sc
a_sc(volatile int * p,int v)14 static inline int a_sc(volatile int *p, int v)
15 {
16 	int r;
17 	__asm__ __volatile__ (
18 		"movco.l %2, @%3 ; movt %0"
19 		: "=r"(r), "=m"(*p) : "z"(v), "r"(p) : "memory", "cc");
20 	return r;
21 }
22 
23 #define a_barrier a_barrier
a_barrier()24 static inline void a_barrier()
25 {
26 	__asm__ __volatile__ ("synco" ::: "memory");
27 }
28 
29 #define a_pre_llsc a_barrier
30 #define a_post_llsc a_barrier
31 
32 #else
33 
34 #define a_cas a_cas
35 extern hidden const void *__sh_cas_ptr;
a_cas(volatile int * p,int t,int s)36 static inline int a_cas(volatile int *p, int t, int s)
37 {
38 	register int r1 __asm__("r1");
39 	register int r2 __asm__("r2") = t;
40 	register int r3 __asm__("r3") = s;
41 	__asm__ __volatile__ (
42 		"jsr @%4 ; nop"
43 		: "=r"(r1), "+r"(r3) : "z"(p), "r"(r2), "r"(__sh_cas_ptr)
44 		: "memory", "pr", "cc");
45 	return r3;
46 }
47 
48 #endif
49