1*150812a8SEvalZero /*
2*150812a8SEvalZero * Copyright (c) 2018, Nordic Semiconductor ASA
3*150812a8SEvalZero * All rights reserved.
4*150812a8SEvalZero *
5*150812a8SEvalZero * Redistribution and use in source and binary forms, with or without
6*150812a8SEvalZero * modification, are permitted provided that the following conditions are met:
7*150812a8SEvalZero *
8*150812a8SEvalZero * 1. Redistributions of source code must retain the above copyright notice, this
9*150812a8SEvalZero * list of conditions and the following disclaimer.
10*150812a8SEvalZero *
11*150812a8SEvalZero * 2. Redistributions in binary form must reproduce the above copyright
12*150812a8SEvalZero * notice, this list of conditions and the following disclaimer in the
13*150812a8SEvalZero * documentation and/or other materials provided with the distribution.
14*150812a8SEvalZero *
15*150812a8SEvalZero * 3. Neither the name of the copyright holder nor the names of its
16*150812a8SEvalZero * contributors may be used to endorse or promote products derived from this
17*150812a8SEvalZero * software without specific prior written permission.
18*150812a8SEvalZero *
19*150812a8SEvalZero * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20*150812a8SEvalZero * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21*150812a8SEvalZero * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22*150812a8SEvalZero * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
23*150812a8SEvalZero * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24*150812a8SEvalZero * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25*150812a8SEvalZero * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26*150812a8SEvalZero * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27*150812a8SEvalZero * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28*150812a8SEvalZero * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29*150812a8SEvalZero * POSSIBILITY OF SUCH DAMAGE.
30*150812a8SEvalZero */
31*150812a8SEvalZero #include "nrfx_atomic.h"
32*150812a8SEvalZero
33*150812a8SEvalZero #ifndef NRFX_ATOMIC_USE_BUILT_IN
34*150812a8SEvalZero #define NRFX_ATOMIC_USE_BUILT_IN 0
35*150812a8SEvalZero #endif // NRFX_ATOMIC_USE_BUILT_IN
36*150812a8SEvalZero
37*150812a8SEvalZero #if ((__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U))
38*150812a8SEvalZero #define NRFX_ATOMIC_STREX_LDREX_PRESENT
39*150812a8SEvalZero #endif
40*150812a8SEvalZero
41*150812a8SEvalZero #if (NRFX_ATOMIC_USE_BUILT_IN == 0) && defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
42*150812a8SEvalZero #include "nrfx_atomic_internal.h"
43*150812a8SEvalZero #endif
44*150812a8SEvalZero
nrfx_atomic_u32_fetch_store(nrfx_atomic_u32_t * p_data,uint32_t value)45*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_store(nrfx_atomic_u32_t * p_data, uint32_t value)
46*150812a8SEvalZero {
47*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
48*150812a8SEvalZero return __atomic_exchange_n(p_data, value, __ATOMIC_SEQ_CST);
49*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
50*150812a8SEvalZero uint32_t old_val;
51*150812a8SEvalZero uint32_t new_val;
52*150812a8SEvalZero NRFX_ATOMIC_OP(mov, old_val, new_val, p_data, value);
53*150812a8SEvalZero (void) new_val;
54*150812a8SEvalZero return old_val;
55*150812a8SEvalZero #else
56*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
57*150812a8SEvalZero uint32_t old_val = *p_data;
58*150812a8SEvalZero *p_data = value;
59*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
60*150812a8SEvalZero return old_val;
61*150812a8SEvalZero #endif // NRFX_ATOMIC_USE_BUILT_IN
62*150812a8SEvalZero }
63*150812a8SEvalZero
nrfx_atomic_u32_store(nrfx_atomic_u32_t * p_data,uint32_t value)64*150812a8SEvalZero uint32_t nrfx_atomic_u32_store(nrfx_atomic_u32_t * p_data, uint32_t value)
65*150812a8SEvalZero {
66*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
67*150812a8SEvalZero __atomic_store_n(p_data, value, __ATOMIC_SEQ_CST);
68*150812a8SEvalZero return value;
69*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
70*150812a8SEvalZero uint32_t old_val;
71*150812a8SEvalZero uint32_t new_val;
72*150812a8SEvalZero NRFX_ATOMIC_OP(mov, old_val, new_val, p_data, value);
73*150812a8SEvalZero (void) old_val;
74*150812a8SEvalZero return new_val;
75*150812a8SEvalZero #else
76*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
77*150812a8SEvalZero *p_data = value;
78*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
79*150812a8SEvalZero return value;
80*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
81*150812a8SEvalZero }
82*150812a8SEvalZero
nrfx_atomic_u32_fetch_or(nrfx_atomic_u32_t * p_data,uint32_t value)83*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_or(nrfx_atomic_u32_t * p_data, uint32_t value)
84*150812a8SEvalZero {
85*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
86*150812a8SEvalZero return __atomic_fetch_or(p_data, value, __ATOMIC_SEQ_CST);
87*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
88*150812a8SEvalZero uint32_t old_val;
89*150812a8SEvalZero uint32_t new_val;
90*150812a8SEvalZero NRFX_ATOMIC_OP(orr, old_val, new_val, p_data, value);
91*150812a8SEvalZero (void) new_val;
92*150812a8SEvalZero return old_val;
93*150812a8SEvalZero #else
94*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
95*150812a8SEvalZero uint32_t old_val = *p_data;
96*150812a8SEvalZero *p_data |= value;
97*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
98*150812a8SEvalZero return old_val;
99*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
100*150812a8SEvalZero }
101*150812a8SEvalZero
nrfx_atomic_u32_or(nrfx_atomic_u32_t * p_data,uint32_t value)102*150812a8SEvalZero uint32_t nrfx_atomic_u32_or(nrfx_atomic_u32_t * p_data, uint32_t value)
103*150812a8SEvalZero {
104*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
105*150812a8SEvalZero return __atomic_or_fetch(p_data, value, __ATOMIC_SEQ_CST);
106*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
107*150812a8SEvalZero uint32_t old_val;
108*150812a8SEvalZero uint32_t new_val;
109*150812a8SEvalZero NRFX_ATOMIC_OP(orr, old_val, new_val, p_data, value);
110*150812a8SEvalZero (void) old_val;
111*150812a8SEvalZero return new_val;
112*150812a8SEvalZero #else
113*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
114*150812a8SEvalZero *p_data |= value;
115*150812a8SEvalZero uint32_t new_value = *p_data;
116*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
117*150812a8SEvalZero return new_value;
118*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
119*150812a8SEvalZero }
120*150812a8SEvalZero
nrfx_atomic_u32_fetch_and(nrfx_atomic_u32_t * p_data,uint32_t value)121*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_and(nrfx_atomic_u32_t * p_data, uint32_t value)
122*150812a8SEvalZero {
123*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
124*150812a8SEvalZero return __atomic_fetch_and(p_data, value, __ATOMIC_SEQ_CST);
125*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
126*150812a8SEvalZero uint32_t old_val;
127*150812a8SEvalZero uint32_t new_val;
128*150812a8SEvalZero NRFX_ATOMIC_OP(and, old_val, new_val, p_data, value);
129*150812a8SEvalZero (void) new_val;
130*150812a8SEvalZero return old_val;
131*150812a8SEvalZero #else
132*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
133*150812a8SEvalZero uint32_t old_val = *p_data;
134*150812a8SEvalZero *p_data &= value;
135*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
136*150812a8SEvalZero return old_val;
137*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
138*150812a8SEvalZero }
139*150812a8SEvalZero
nrfx_atomic_u32_and(nrfx_atomic_u32_t * p_data,uint32_t value)140*150812a8SEvalZero uint32_t nrfx_atomic_u32_and(nrfx_atomic_u32_t * p_data, uint32_t value)
141*150812a8SEvalZero {
142*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
143*150812a8SEvalZero return __atomic_and_fetch(p_data, value, __ATOMIC_SEQ_CST);
144*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
145*150812a8SEvalZero uint32_t old_val;
146*150812a8SEvalZero uint32_t new_val;
147*150812a8SEvalZero NRFX_ATOMIC_OP(and, old_val, new_val, p_data, value);
148*150812a8SEvalZero (void) old_val;
149*150812a8SEvalZero return new_val;
150*150812a8SEvalZero #else
151*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
152*150812a8SEvalZero *p_data &= value;
153*150812a8SEvalZero uint32_t new_value = *p_data;
154*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
155*150812a8SEvalZero return new_value;
156*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
157*150812a8SEvalZero }
158*150812a8SEvalZero
nrfx_atomic_u32_fetch_xor(nrfx_atomic_u32_t * p_data,uint32_t value)159*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_xor(nrfx_atomic_u32_t * p_data, uint32_t value)
160*150812a8SEvalZero {
161*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
162*150812a8SEvalZero return __atomic_fetch_xor(p_data, value, __ATOMIC_SEQ_CST);
163*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
164*150812a8SEvalZero uint32_t old_val;
165*150812a8SEvalZero uint32_t new_val;
166*150812a8SEvalZero NRFX_ATOMIC_OP(eor, old_val, new_val, p_data, value);
167*150812a8SEvalZero (void) new_val;
168*150812a8SEvalZero return old_val;
169*150812a8SEvalZero #else
170*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
171*150812a8SEvalZero uint32_t old_val = *p_data;
172*150812a8SEvalZero *p_data ^= value;
173*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
174*150812a8SEvalZero return old_val;
175*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
176*150812a8SEvalZero }
177*150812a8SEvalZero
nrfx_atomic_u32_xor(nrfx_atomic_u32_t * p_data,uint32_t value)178*150812a8SEvalZero uint32_t nrfx_atomic_u32_xor(nrfx_atomic_u32_t * p_data, uint32_t value)
179*150812a8SEvalZero {
180*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
181*150812a8SEvalZero return __atomic_xor_fetch(p_data, value, __ATOMIC_SEQ_CST);
182*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
183*150812a8SEvalZero uint32_t old_val;
184*150812a8SEvalZero uint32_t new_val;
185*150812a8SEvalZero NRFX_ATOMIC_OP(eor, old_val, new_val, p_data, value);
186*150812a8SEvalZero (void) old_val;
187*150812a8SEvalZero return new_val;
188*150812a8SEvalZero #else
189*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
190*150812a8SEvalZero *p_data ^= value;
191*150812a8SEvalZero uint32_t new_value = *p_data;
192*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
193*150812a8SEvalZero return new_value;
194*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
195*150812a8SEvalZero }
196*150812a8SEvalZero
nrfx_atomic_u32_fetch_add(nrfx_atomic_u32_t * p_data,uint32_t value)197*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_add(nrfx_atomic_u32_t * p_data, uint32_t value)
198*150812a8SEvalZero {
199*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
200*150812a8SEvalZero return __atomic_fetch_add(p_data, value, __ATOMIC_SEQ_CST);
201*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
202*150812a8SEvalZero uint32_t old_val;
203*150812a8SEvalZero uint32_t new_val;
204*150812a8SEvalZero NRFX_ATOMIC_OP(add, old_val, new_val, p_data, value);
205*150812a8SEvalZero (void) new_val;
206*150812a8SEvalZero return old_val;
207*150812a8SEvalZero #else
208*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
209*150812a8SEvalZero uint32_t old_val = *p_data;
210*150812a8SEvalZero *p_data += value;
211*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
212*150812a8SEvalZero return old_val;
213*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
214*150812a8SEvalZero }
215*150812a8SEvalZero
nrfx_atomic_u32_add(nrfx_atomic_u32_t * p_data,uint32_t value)216*150812a8SEvalZero uint32_t nrfx_atomic_u32_add(nrfx_atomic_u32_t * p_data, uint32_t value)
217*150812a8SEvalZero {
218*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
219*150812a8SEvalZero return __atomic_add_fetch(p_data, value, __ATOMIC_SEQ_CST);
220*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
221*150812a8SEvalZero uint32_t old_val;
222*150812a8SEvalZero uint32_t new_val;
223*150812a8SEvalZero NRFX_ATOMIC_OP(add, old_val, new_val, p_data, value);
224*150812a8SEvalZero (void) old_val;
225*150812a8SEvalZero return new_val;
226*150812a8SEvalZero #else
227*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
228*150812a8SEvalZero *p_data += value;
229*150812a8SEvalZero uint32_t new_value = *p_data;
230*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
231*150812a8SEvalZero return new_value;
232*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
233*150812a8SEvalZero }
234*150812a8SEvalZero
nrfx_atomic_u32_fetch_sub(nrfx_atomic_u32_t * p_data,uint32_t value)235*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_sub(nrfx_atomic_u32_t * p_data, uint32_t value)
236*150812a8SEvalZero {
237*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
238*150812a8SEvalZero return __atomic_fetch_sub(p_data, value, __ATOMIC_SEQ_CST);
239*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
240*150812a8SEvalZero uint32_t old_val;
241*150812a8SEvalZero uint32_t new_val;
242*150812a8SEvalZero NRFX_ATOMIC_OP(sub, old_val, new_val, p_data, value);
243*150812a8SEvalZero (void) new_val;
244*150812a8SEvalZero return old_val;
245*150812a8SEvalZero #else
246*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
247*150812a8SEvalZero uint32_t old_val = *p_data;
248*150812a8SEvalZero *p_data -= value;
249*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
250*150812a8SEvalZero return old_val;
251*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
252*150812a8SEvalZero }
253*150812a8SEvalZero
nrfx_atomic_u32_sub(nrfx_atomic_u32_t * p_data,uint32_t value)254*150812a8SEvalZero uint32_t nrfx_atomic_u32_sub(nrfx_atomic_u32_t * p_data, uint32_t value)
255*150812a8SEvalZero {
256*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
257*150812a8SEvalZero return __atomic_sub_fetch(p_data, value, __ATOMIC_SEQ_CST);
258*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
259*150812a8SEvalZero uint32_t old_val;
260*150812a8SEvalZero uint32_t new_val;
261*150812a8SEvalZero NRFX_ATOMIC_OP(sub, old_val, new_val, p_data, value);
262*150812a8SEvalZero (void) old_val;
263*150812a8SEvalZero return new_val;
264*150812a8SEvalZero #else
265*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
266*150812a8SEvalZero *p_data -= value;
267*150812a8SEvalZero uint32_t new_value = *p_data;
268*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
269*150812a8SEvalZero return new_value;
270*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
271*150812a8SEvalZero }
272*150812a8SEvalZero
nrfx_atomic_u32_cmp_exch(nrfx_atomic_u32_t * p_data,uint32_t * p_expected,uint32_t desired)273*150812a8SEvalZero bool nrfx_atomic_u32_cmp_exch(nrfx_atomic_u32_t * p_data,
274*150812a8SEvalZero uint32_t * p_expected,
275*150812a8SEvalZero uint32_t desired)
276*150812a8SEvalZero {
277*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
278*150812a8SEvalZero return __atomic_compare_exchange(p_data,
279*150812a8SEvalZero p_expected,
280*150812a8SEvalZero &desired,
281*150812a8SEvalZero 1,
282*150812a8SEvalZero __ATOMIC_SEQ_CST,
283*150812a8SEvalZero __ATOMIC_SEQ_CST);
284*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
285*150812a8SEvalZero return nrfx_atomic_internal_cmp_exch(p_data, p_expected, desired);
286*150812a8SEvalZero #else
287*150812a8SEvalZero bool result;
288*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
289*150812a8SEvalZero if (*p_data == *p_expected)
290*150812a8SEvalZero {
291*150812a8SEvalZero *p_data = desired;
292*150812a8SEvalZero result = true;
293*150812a8SEvalZero }
294*150812a8SEvalZero else
295*150812a8SEvalZero {
296*150812a8SEvalZero *p_expected = *p_data;
297*150812a8SEvalZero result = false;
298*150812a8SEvalZero }
299*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
300*150812a8SEvalZero return result;
301*150812a8SEvalZero #endif
302*150812a8SEvalZero }
303*150812a8SEvalZero
nrfx_atomic_u32_fetch_sub_hs(nrfx_atomic_u32_t * p_data,uint32_t value)304*150812a8SEvalZero uint32_t nrfx_atomic_u32_fetch_sub_hs(nrfx_atomic_u32_t * p_data, uint32_t value)
305*150812a8SEvalZero {
306*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
307*150812a8SEvalZero uint32_t expected = *p_data;
308*150812a8SEvalZero uint32_t new_val;
309*150812a8SEvalZero do {
310*150812a8SEvalZero if (expected >= value)
311*150812a8SEvalZero {
312*150812a8SEvalZero new_val = expected - value;
313*150812a8SEvalZero }
314*150812a8SEvalZero else
315*150812a8SEvalZero {
316*150812a8SEvalZero new_val = expected;
317*150812a8SEvalZero }
318*150812a8SEvalZero } while (!__atomic_compare_exchange(p_data, &expected, &new_val,
319*150812a8SEvalZero 1, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST));
320*150812a8SEvalZero return expected;
321*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
322*150812a8SEvalZero uint32_t old_val;
323*150812a8SEvalZero uint32_t new_val;
324*150812a8SEvalZero NRFX_ATOMIC_OP(sub_hs, old_val, new_val, p_data, value);
325*150812a8SEvalZero (void) new_val;
326*150812a8SEvalZero return old_val;
327*150812a8SEvalZero #else
328*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
329*150812a8SEvalZero uint32_t old_val = *p_data;
330*150812a8SEvalZero *p_data -= value;
331*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
332*150812a8SEvalZero return old_val;
333*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
334*150812a8SEvalZero }
335*150812a8SEvalZero
nrfx_atomic_u32_sub_hs(nrfx_atomic_u32_t * p_data,uint32_t value)336*150812a8SEvalZero uint32_t nrfx_atomic_u32_sub_hs(nrfx_atomic_u32_t * p_data, uint32_t value)
337*150812a8SEvalZero {
338*150812a8SEvalZero #if NRFX_ATOMIC_USE_BUILT_IN
339*150812a8SEvalZero uint32_t expected = *p_data;
340*150812a8SEvalZero uint32_t new_val;
341*150812a8SEvalZero do {
342*150812a8SEvalZero if (expected >= value)
343*150812a8SEvalZero {
344*150812a8SEvalZero new_val = expected - value;
345*150812a8SEvalZero }
346*150812a8SEvalZero else
347*150812a8SEvalZero {
348*150812a8SEvalZero new_val = expected;
349*150812a8SEvalZero }
350*150812a8SEvalZero } while (!__atomic_compare_exchange(p_data, &expected, &new_val,
351*150812a8SEvalZero 1, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST));
352*150812a8SEvalZero return new_val;
353*150812a8SEvalZero #elif defined(NRFX_ATOMIC_STREX_LDREX_PRESENT)
354*150812a8SEvalZero uint32_t old_val;
355*150812a8SEvalZero uint32_t new_val;
356*150812a8SEvalZero NRFX_ATOMIC_OP(sub_hs, old_val, new_val, p_data, value);
357*150812a8SEvalZero (void) old_val;
358*150812a8SEvalZero return new_val;
359*150812a8SEvalZero #else
360*150812a8SEvalZero NRFX_CRITICAL_SECTION_ENTER();
361*150812a8SEvalZero *p_data -= value;
362*150812a8SEvalZero uint32_t new_value = *p_data;
363*150812a8SEvalZero NRFX_CRITICAL_SECTION_EXIT();
364*150812a8SEvalZero return new_value;
365*150812a8SEvalZero #endif //NRFX_ATOMIC_USE_BUILT_IN
366*150812a8SEvalZero }
367*150812a8SEvalZero
nrfx_atomic_flag_set_fetch(nrfx_atomic_flag_t * p_data)368*150812a8SEvalZero uint32_t nrfx_atomic_flag_set_fetch(nrfx_atomic_flag_t * p_data)
369*150812a8SEvalZero {
370*150812a8SEvalZero return nrfx_atomic_u32_fetch_or(p_data, 1);
371*150812a8SEvalZero }
372*150812a8SEvalZero
nrfx_atomic_flag_set(nrfx_atomic_flag_t * p_data)373*150812a8SEvalZero uint32_t nrfx_atomic_flag_set(nrfx_atomic_flag_t * p_data)
374*150812a8SEvalZero {
375*150812a8SEvalZero return nrfx_atomic_u32_or(p_data, 1);
376*150812a8SEvalZero }
377*150812a8SEvalZero
nrfx_atomic_flag_clear_fetch(nrfx_atomic_flag_t * p_data)378*150812a8SEvalZero uint32_t nrfx_atomic_flag_clear_fetch(nrfx_atomic_flag_t * p_data)
379*150812a8SEvalZero {
380*150812a8SEvalZero return nrfx_atomic_u32_fetch_and(p_data, 0);
381*150812a8SEvalZero }
382*150812a8SEvalZero
nrfx_atomic_flag_clear(nrfx_atomic_flag_t * p_data)383*150812a8SEvalZero uint32_t nrfx_atomic_flag_clear(nrfx_atomic_flag_t * p_data)
384*150812a8SEvalZero {
385*150812a8SEvalZero return nrfx_atomic_u32_and(p_data, 0);
386*150812a8SEvalZero }
387