1*225f4ba4SMatthias Ringwald /**************************************************************************//**
2*225f4ba4SMatthias Ringwald * @file cmsis_armclang.h
3*225f4ba4SMatthias Ringwald * @brief CMSIS compiler armclang (Arm Compiler 6) header file
4*225f4ba4SMatthias Ringwald * @version V5.0.4
5*225f4ba4SMatthias Ringwald * @date 10. January 2018
6*225f4ba4SMatthias Ringwald ******************************************************************************/
7*225f4ba4SMatthias Ringwald /*
8*225f4ba4SMatthias Ringwald * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9*225f4ba4SMatthias Ringwald *
10*225f4ba4SMatthias Ringwald * SPDX-License-Identifier: Apache-2.0
11*225f4ba4SMatthias Ringwald *
12*225f4ba4SMatthias Ringwald * Licensed under the Apache License, Version 2.0 (the License); you may
13*225f4ba4SMatthias Ringwald * not use this file except in compliance with the License.
14*225f4ba4SMatthias Ringwald * You may obtain a copy of the License at
15*225f4ba4SMatthias Ringwald *
16*225f4ba4SMatthias Ringwald * www.apache.org/licenses/LICENSE-2.0
17*225f4ba4SMatthias Ringwald *
18*225f4ba4SMatthias Ringwald * Unless required by applicable law or agreed to in writing, software
19*225f4ba4SMatthias Ringwald * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*225f4ba4SMatthias Ringwald * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*225f4ba4SMatthias Ringwald * See the License for the specific language governing permissions and
22*225f4ba4SMatthias Ringwald * limitations under the License.
23*225f4ba4SMatthias Ringwald */
24*225f4ba4SMatthias Ringwald
25*225f4ba4SMatthias Ringwald /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26*225f4ba4SMatthias Ringwald
27*225f4ba4SMatthias Ringwald #ifndef __CMSIS_ARMCLANG_H
28*225f4ba4SMatthias Ringwald #define __CMSIS_ARMCLANG_H
29*225f4ba4SMatthias Ringwald
30*225f4ba4SMatthias Ringwald #pragma clang system_header /* treat file as system include file */
31*225f4ba4SMatthias Ringwald
32*225f4ba4SMatthias Ringwald #ifndef __ARM_COMPAT_H
33*225f4ba4SMatthias Ringwald #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34*225f4ba4SMatthias Ringwald #endif
35*225f4ba4SMatthias Ringwald
36*225f4ba4SMatthias Ringwald /* CMSIS compiler specific defines */
37*225f4ba4SMatthias Ringwald #ifndef __ASM
38*225f4ba4SMatthias Ringwald #define __ASM __asm
39*225f4ba4SMatthias Ringwald #endif
40*225f4ba4SMatthias Ringwald #ifndef __INLINE
41*225f4ba4SMatthias Ringwald #define __INLINE __inline
42*225f4ba4SMatthias Ringwald #endif
43*225f4ba4SMatthias Ringwald #ifndef __STATIC_INLINE
44*225f4ba4SMatthias Ringwald #define __STATIC_INLINE static __inline
45*225f4ba4SMatthias Ringwald #endif
46*225f4ba4SMatthias Ringwald #ifndef __STATIC_FORCEINLINE
47*225f4ba4SMatthias Ringwald #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48*225f4ba4SMatthias Ringwald #endif
49*225f4ba4SMatthias Ringwald #ifndef __NO_RETURN
50*225f4ba4SMatthias Ringwald #define __NO_RETURN __attribute__((__noreturn__))
51*225f4ba4SMatthias Ringwald #endif
52*225f4ba4SMatthias Ringwald #ifndef __USED
53*225f4ba4SMatthias Ringwald #define __USED __attribute__((used))
54*225f4ba4SMatthias Ringwald #endif
55*225f4ba4SMatthias Ringwald #ifndef __WEAK
56*225f4ba4SMatthias Ringwald #define __WEAK __attribute__((weak))
57*225f4ba4SMatthias Ringwald #endif
58*225f4ba4SMatthias Ringwald #ifndef __PACKED
59*225f4ba4SMatthias Ringwald #define __PACKED __attribute__((packed, aligned(1)))
60*225f4ba4SMatthias Ringwald #endif
61*225f4ba4SMatthias Ringwald #ifndef __PACKED_STRUCT
62*225f4ba4SMatthias Ringwald #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63*225f4ba4SMatthias Ringwald #endif
64*225f4ba4SMatthias Ringwald #ifndef __PACKED_UNION
65*225f4ba4SMatthias Ringwald #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66*225f4ba4SMatthias Ringwald #endif
67*225f4ba4SMatthias Ringwald #ifndef __UNALIGNED_UINT32 /* deprecated */
68*225f4ba4SMatthias Ringwald #pragma clang diagnostic push
69*225f4ba4SMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
70*225f4ba4SMatthias Ringwald /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71*225f4ba4SMatthias Ringwald struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72*225f4ba4SMatthias Ringwald #pragma clang diagnostic pop
73*225f4ba4SMatthias Ringwald #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74*225f4ba4SMatthias Ringwald #endif
75*225f4ba4SMatthias Ringwald #ifndef __UNALIGNED_UINT16_WRITE
76*225f4ba4SMatthias Ringwald #pragma clang diagnostic push
77*225f4ba4SMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
78*225f4ba4SMatthias Ringwald /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79*225f4ba4SMatthias Ringwald __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80*225f4ba4SMatthias Ringwald #pragma clang diagnostic pop
81*225f4ba4SMatthias Ringwald #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82*225f4ba4SMatthias Ringwald #endif
83*225f4ba4SMatthias Ringwald #ifndef __UNALIGNED_UINT16_READ
84*225f4ba4SMatthias Ringwald #pragma clang diagnostic push
85*225f4ba4SMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
86*225f4ba4SMatthias Ringwald /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87*225f4ba4SMatthias Ringwald __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88*225f4ba4SMatthias Ringwald #pragma clang diagnostic pop
89*225f4ba4SMatthias Ringwald #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90*225f4ba4SMatthias Ringwald #endif
91*225f4ba4SMatthias Ringwald #ifndef __UNALIGNED_UINT32_WRITE
92*225f4ba4SMatthias Ringwald #pragma clang diagnostic push
93*225f4ba4SMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
94*225f4ba4SMatthias Ringwald /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95*225f4ba4SMatthias Ringwald __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96*225f4ba4SMatthias Ringwald #pragma clang diagnostic pop
97*225f4ba4SMatthias Ringwald #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98*225f4ba4SMatthias Ringwald #endif
99*225f4ba4SMatthias Ringwald #ifndef __UNALIGNED_UINT32_READ
100*225f4ba4SMatthias Ringwald #pragma clang diagnostic push
101*225f4ba4SMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
102*225f4ba4SMatthias Ringwald /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103*225f4ba4SMatthias Ringwald __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104*225f4ba4SMatthias Ringwald #pragma clang diagnostic pop
105*225f4ba4SMatthias Ringwald #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106*225f4ba4SMatthias Ringwald #endif
107*225f4ba4SMatthias Ringwald #ifndef __ALIGNED
108*225f4ba4SMatthias Ringwald #define __ALIGNED(x) __attribute__((aligned(x)))
109*225f4ba4SMatthias Ringwald #endif
110*225f4ba4SMatthias Ringwald #ifndef __RESTRICT
111*225f4ba4SMatthias Ringwald #define __RESTRICT __restrict
112*225f4ba4SMatthias Ringwald #endif
113*225f4ba4SMatthias Ringwald
114*225f4ba4SMatthias Ringwald
115*225f4ba4SMatthias Ringwald /* ########################### Core Function Access ########################### */
116*225f4ba4SMatthias Ringwald /** \ingroup CMSIS_Core_FunctionInterface
117*225f4ba4SMatthias Ringwald \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
118*225f4ba4SMatthias Ringwald @{
119*225f4ba4SMatthias Ringwald */
120*225f4ba4SMatthias Ringwald
121*225f4ba4SMatthias Ringwald /**
122*225f4ba4SMatthias Ringwald \brief Enable IRQ Interrupts
123*225f4ba4SMatthias Ringwald \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
124*225f4ba4SMatthias Ringwald Can only be executed in Privileged modes.
125*225f4ba4SMatthias Ringwald */
126*225f4ba4SMatthias Ringwald /* intrinsic void __enable_irq(); see arm_compat.h */
127*225f4ba4SMatthias Ringwald
128*225f4ba4SMatthias Ringwald
129*225f4ba4SMatthias Ringwald /**
130*225f4ba4SMatthias Ringwald \brief Disable IRQ Interrupts
131*225f4ba4SMatthias Ringwald \details Disables IRQ interrupts by setting the I-bit in the CPSR.
132*225f4ba4SMatthias Ringwald Can only be executed in Privileged modes.
133*225f4ba4SMatthias Ringwald */
134*225f4ba4SMatthias Ringwald /* intrinsic void __disable_irq(); see arm_compat.h */
135*225f4ba4SMatthias Ringwald
136*225f4ba4SMatthias Ringwald
137*225f4ba4SMatthias Ringwald /**
138*225f4ba4SMatthias Ringwald \brief Get Control Register
139*225f4ba4SMatthias Ringwald \details Returns the content of the Control Register.
140*225f4ba4SMatthias Ringwald \return Control Register value
141*225f4ba4SMatthias Ringwald */
__get_CONTROL(void)142*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
143*225f4ba4SMatthias Ringwald {
144*225f4ba4SMatthias Ringwald uint32_t result;
145*225f4ba4SMatthias Ringwald
146*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, control" : "=r" (result) );
147*225f4ba4SMatthias Ringwald return(result);
148*225f4ba4SMatthias Ringwald }
149*225f4ba4SMatthias Ringwald
150*225f4ba4SMatthias Ringwald
151*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
152*225f4ba4SMatthias Ringwald /**
153*225f4ba4SMatthias Ringwald \brief Get Control Register (non-secure)
154*225f4ba4SMatthias Ringwald \details Returns the content of the non-secure Control Register when in secure mode.
155*225f4ba4SMatthias Ringwald \return non-secure Control Register value
156*225f4ba4SMatthias Ringwald */
__TZ_get_CONTROL_NS(void)157*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
158*225f4ba4SMatthias Ringwald {
159*225f4ba4SMatthias Ringwald uint32_t result;
160*225f4ba4SMatthias Ringwald
161*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
162*225f4ba4SMatthias Ringwald return(result);
163*225f4ba4SMatthias Ringwald }
164*225f4ba4SMatthias Ringwald #endif
165*225f4ba4SMatthias Ringwald
166*225f4ba4SMatthias Ringwald
167*225f4ba4SMatthias Ringwald /**
168*225f4ba4SMatthias Ringwald \brief Set Control Register
169*225f4ba4SMatthias Ringwald \details Writes the given value to the Control Register.
170*225f4ba4SMatthias Ringwald \param [in] control Control Register value to set
171*225f4ba4SMatthias Ringwald */
__set_CONTROL(uint32_t control)172*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
173*225f4ba4SMatthias Ringwald {
174*225f4ba4SMatthias Ringwald __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
175*225f4ba4SMatthias Ringwald }
176*225f4ba4SMatthias Ringwald
177*225f4ba4SMatthias Ringwald
178*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
179*225f4ba4SMatthias Ringwald /**
180*225f4ba4SMatthias Ringwald \brief Set Control Register (non-secure)
181*225f4ba4SMatthias Ringwald \details Writes the given value to the non-secure Control Register when in secure state.
182*225f4ba4SMatthias Ringwald \param [in] control Control Register value to set
183*225f4ba4SMatthias Ringwald */
__TZ_set_CONTROL_NS(uint32_t control)184*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
185*225f4ba4SMatthias Ringwald {
186*225f4ba4SMatthias Ringwald __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
187*225f4ba4SMatthias Ringwald }
188*225f4ba4SMatthias Ringwald #endif
189*225f4ba4SMatthias Ringwald
190*225f4ba4SMatthias Ringwald
191*225f4ba4SMatthias Ringwald /**
192*225f4ba4SMatthias Ringwald \brief Get IPSR Register
193*225f4ba4SMatthias Ringwald \details Returns the content of the IPSR Register.
194*225f4ba4SMatthias Ringwald \return IPSR Register value
195*225f4ba4SMatthias Ringwald */
__get_IPSR(void)196*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
197*225f4ba4SMatthias Ringwald {
198*225f4ba4SMatthias Ringwald uint32_t result;
199*225f4ba4SMatthias Ringwald
200*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
201*225f4ba4SMatthias Ringwald return(result);
202*225f4ba4SMatthias Ringwald }
203*225f4ba4SMatthias Ringwald
204*225f4ba4SMatthias Ringwald
205*225f4ba4SMatthias Ringwald /**
206*225f4ba4SMatthias Ringwald \brief Get APSR Register
207*225f4ba4SMatthias Ringwald \details Returns the content of the APSR Register.
208*225f4ba4SMatthias Ringwald \return APSR Register value
209*225f4ba4SMatthias Ringwald */
__get_APSR(void)210*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
211*225f4ba4SMatthias Ringwald {
212*225f4ba4SMatthias Ringwald uint32_t result;
213*225f4ba4SMatthias Ringwald
214*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, apsr" : "=r" (result) );
215*225f4ba4SMatthias Ringwald return(result);
216*225f4ba4SMatthias Ringwald }
217*225f4ba4SMatthias Ringwald
218*225f4ba4SMatthias Ringwald
219*225f4ba4SMatthias Ringwald /**
220*225f4ba4SMatthias Ringwald \brief Get xPSR Register
221*225f4ba4SMatthias Ringwald \details Returns the content of the xPSR Register.
222*225f4ba4SMatthias Ringwald \return xPSR Register value
223*225f4ba4SMatthias Ringwald */
__get_xPSR(void)224*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
225*225f4ba4SMatthias Ringwald {
226*225f4ba4SMatthias Ringwald uint32_t result;
227*225f4ba4SMatthias Ringwald
228*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
229*225f4ba4SMatthias Ringwald return(result);
230*225f4ba4SMatthias Ringwald }
231*225f4ba4SMatthias Ringwald
232*225f4ba4SMatthias Ringwald
233*225f4ba4SMatthias Ringwald /**
234*225f4ba4SMatthias Ringwald \brief Get Process Stack Pointer
235*225f4ba4SMatthias Ringwald \details Returns the current value of the Process Stack Pointer (PSP).
236*225f4ba4SMatthias Ringwald \return PSP Register value
237*225f4ba4SMatthias Ringwald */
__get_PSP(void)238*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
239*225f4ba4SMatthias Ringwald {
240*225f4ba4SMatthias Ringwald uint32_t result;
241*225f4ba4SMatthias Ringwald
242*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, psp" : "=r" (result) );
243*225f4ba4SMatthias Ringwald return(result);
244*225f4ba4SMatthias Ringwald }
245*225f4ba4SMatthias Ringwald
246*225f4ba4SMatthias Ringwald
247*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
248*225f4ba4SMatthias Ringwald /**
249*225f4ba4SMatthias Ringwald \brief Get Process Stack Pointer (non-secure)
250*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
251*225f4ba4SMatthias Ringwald \return PSP Register value
252*225f4ba4SMatthias Ringwald */
__TZ_get_PSP_NS(void)253*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
254*225f4ba4SMatthias Ringwald {
255*225f4ba4SMatthias Ringwald uint32_t result;
256*225f4ba4SMatthias Ringwald
257*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
258*225f4ba4SMatthias Ringwald return(result);
259*225f4ba4SMatthias Ringwald }
260*225f4ba4SMatthias Ringwald #endif
261*225f4ba4SMatthias Ringwald
262*225f4ba4SMatthias Ringwald
263*225f4ba4SMatthias Ringwald /**
264*225f4ba4SMatthias Ringwald \brief Set Process Stack Pointer
265*225f4ba4SMatthias Ringwald \details Assigns the given value to the Process Stack Pointer (PSP).
266*225f4ba4SMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
267*225f4ba4SMatthias Ringwald */
__set_PSP(uint32_t topOfProcStack)268*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
269*225f4ba4SMatthias Ringwald {
270*225f4ba4SMatthias Ringwald __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
271*225f4ba4SMatthias Ringwald }
272*225f4ba4SMatthias Ringwald
273*225f4ba4SMatthias Ringwald
274*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
275*225f4ba4SMatthias Ringwald /**
276*225f4ba4SMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
277*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
278*225f4ba4SMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
279*225f4ba4SMatthias Ringwald */
__TZ_set_PSP_NS(uint32_t topOfProcStack)280*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
281*225f4ba4SMatthias Ringwald {
282*225f4ba4SMatthias Ringwald __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
283*225f4ba4SMatthias Ringwald }
284*225f4ba4SMatthias Ringwald #endif
285*225f4ba4SMatthias Ringwald
286*225f4ba4SMatthias Ringwald
287*225f4ba4SMatthias Ringwald /**
288*225f4ba4SMatthias Ringwald \brief Get Main Stack Pointer
289*225f4ba4SMatthias Ringwald \details Returns the current value of the Main Stack Pointer (MSP).
290*225f4ba4SMatthias Ringwald \return MSP Register value
291*225f4ba4SMatthias Ringwald */
__get_MSP(void)292*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
293*225f4ba4SMatthias Ringwald {
294*225f4ba4SMatthias Ringwald uint32_t result;
295*225f4ba4SMatthias Ringwald
296*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, msp" : "=r" (result) );
297*225f4ba4SMatthias Ringwald return(result);
298*225f4ba4SMatthias Ringwald }
299*225f4ba4SMatthias Ringwald
300*225f4ba4SMatthias Ringwald
301*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
302*225f4ba4SMatthias Ringwald /**
303*225f4ba4SMatthias Ringwald \brief Get Main Stack Pointer (non-secure)
304*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
305*225f4ba4SMatthias Ringwald \return MSP Register value
306*225f4ba4SMatthias Ringwald */
__TZ_get_MSP_NS(void)307*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
308*225f4ba4SMatthias Ringwald {
309*225f4ba4SMatthias Ringwald uint32_t result;
310*225f4ba4SMatthias Ringwald
311*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
312*225f4ba4SMatthias Ringwald return(result);
313*225f4ba4SMatthias Ringwald }
314*225f4ba4SMatthias Ringwald #endif
315*225f4ba4SMatthias Ringwald
316*225f4ba4SMatthias Ringwald
317*225f4ba4SMatthias Ringwald /**
318*225f4ba4SMatthias Ringwald \brief Set Main Stack Pointer
319*225f4ba4SMatthias Ringwald \details Assigns the given value to the Main Stack Pointer (MSP).
320*225f4ba4SMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
321*225f4ba4SMatthias Ringwald */
__set_MSP(uint32_t topOfMainStack)322*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
323*225f4ba4SMatthias Ringwald {
324*225f4ba4SMatthias Ringwald __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
325*225f4ba4SMatthias Ringwald }
326*225f4ba4SMatthias Ringwald
327*225f4ba4SMatthias Ringwald
328*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
329*225f4ba4SMatthias Ringwald /**
330*225f4ba4SMatthias Ringwald \brief Set Main Stack Pointer (non-secure)
331*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
332*225f4ba4SMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
333*225f4ba4SMatthias Ringwald */
__TZ_set_MSP_NS(uint32_t topOfMainStack)334*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
335*225f4ba4SMatthias Ringwald {
336*225f4ba4SMatthias Ringwald __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
337*225f4ba4SMatthias Ringwald }
338*225f4ba4SMatthias Ringwald #endif
339*225f4ba4SMatthias Ringwald
340*225f4ba4SMatthias Ringwald
341*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
342*225f4ba4SMatthias Ringwald /**
343*225f4ba4SMatthias Ringwald \brief Get Stack Pointer (non-secure)
344*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
345*225f4ba4SMatthias Ringwald \return SP Register value
346*225f4ba4SMatthias Ringwald */
__TZ_get_SP_NS(void)347*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
348*225f4ba4SMatthias Ringwald {
349*225f4ba4SMatthias Ringwald uint32_t result;
350*225f4ba4SMatthias Ringwald
351*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
352*225f4ba4SMatthias Ringwald return(result);
353*225f4ba4SMatthias Ringwald }
354*225f4ba4SMatthias Ringwald
355*225f4ba4SMatthias Ringwald
356*225f4ba4SMatthias Ringwald /**
357*225f4ba4SMatthias Ringwald \brief Set Stack Pointer (non-secure)
358*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
359*225f4ba4SMatthias Ringwald \param [in] topOfStack Stack Pointer value to set
360*225f4ba4SMatthias Ringwald */
__TZ_set_SP_NS(uint32_t topOfStack)361*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
362*225f4ba4SMatthias Ringwald {
363*225f4ba4SMatthias Ringwald __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
364*225f4ba4SMatthias Ringwald }
365*225f4ba4SMatthias Ringwald #endif
366*225f4ba4SMatthias Ringwald
367*225f4ba4SMatthias Ringwald
368*225f4ba4SMatthias Ringwald /**
369*225f4ba4SMatthias Ringwald \brief Get Priority Mask
370*225f4ba4SMatthias Ringwald \details Returns the current state of the priority mask bit from the Priority Mask Register.
371*225f4ba4SMatthias Ringwald \return Priority Mask value
372*225f4ba4SMatthias Ringwald */
__get_PRIMASK(void)373*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
374*225f4ba4SMatthias Ringwald {
375*225f4ba4SMatthias Ringwald uint32_t result;
376*225f4ba4SMatthias Ringwald
377*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, primask" : "=r" (result) );
378*225f4ba4SMatthias Ringwald return(result);
379*225f4ba4SMatthias Ringwald }
380*225f4ba4SMatthias Ringwald
381*225f4ba4SMatthias Ringwald
382*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
383*225f4ba4SMatthias Ringwald /**
384*225f4ba4SMatthias Ringwald \brief Get Priority Mask (non-secure)
385*225f4ba4SMatthias Ringwald \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
386*225f4ba4SMatthias Ringwald \return Priority Mask value
387*225f4ba4SMatthias Ringwald */
__TZ_get_PRIMASK_NS(void)388*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
389*225f4ba4SMatthias Ringwald {
390*225f4ba4SMatthias Ringwald uint32_t result;
391*225f4ba4SMatthias Ringwald
392*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
393*225f4ba4SMatthias Ringwald return(result);
394*225f4ba4SMatthias Ringwald }
395*225f4ba4SMatthias Ringwald #endif
396*225f4ba4SMatthias Ringwald
397*225f4ba4SMatthias Ringwald
398*225f4ba4SMatthias Ringwald /**
399*225f4ba4SMatthias Ringwald \brief Set Priority Mask
400*225f4ba4SMatthias Ringwald \details Assigns the given value to the Priority Mask Register.
401*225f4ba4SMatthias Ringwald \param [in] priMask Priority Mask
402*225f4ba4SMatthias Ringwald */
__set_PRIMASK(uint32_t priMask)403*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
404*225f4ba4SMatthias Ringwald {
405*225f4ba4SMatthias Ringwald __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
406*225f4ba4SMatthias Ringwald }
407*225f4ba4SMatthias Ringwald
408*225f4ba4SMatthias Ringwald
409*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
410*225f4ba4SMatthias Ringwald /**
411*225f4ba4SMatthias Ringwald \brief Set Priority Mask (non-secure)
412*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
413*225f4ba4SMatthias Ringwald \param [in] priMask Priority Mask
414*225f4ba4SMatthias Ringwald */
__TZ_set_PRIMASK_NS(uint32_t priMask)415*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
416*225f4ba4SMatthias Ringwald {
417*225f4ba4SMatthias Ringwald __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
418*225f4ba4SMatthias Ringwald }
419*225f4ba4SMatthias Ringwald #endif
420*225f4ba4SMatthias Ringwald
421*225f4ba4SMatthias Ringwald
422*225f4ba4SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
423*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
424*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
425*225f4ba4SMatthias Ringwald /**
426*225f4ba4SMatthias Ringwald \brief Enable FIQ
427*225f4ba4SMatthias Ringwald \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
428*225f4ba4SMatthias Ringwald Can only be executed in Privileged modes.
429*225f4ba4SMatthias Ringwald */
430*225f4ba4SMatthias Ringwald #define __enable_fault_irq __enable_fiq /* see arm_compat.h */
431*225f4ba4SMatthias Ringwald
432*225f4ba4SMatthias Ringwald
433*225f4ba4SMatthias Ringwald /**
434*225f4ba4SMatthias Ringwald \brief Disable FIQ
435*225f4ba4SMatthias Ringwald \details Disables FIQ interrupts by setting the F-bit in the CPSR.
436*225f4ba4SMatthias Ringwald Can only be executed in Privileged modes.
437*225f4ba4SMatthias Ringwald */
438*225f4ba4SMatthias Ringwald #define __disable_fault_irq __disable_fiq /* see arm_compat.h */
439*225f4ba4SMatthias Ringwald
440*225f4ba4SMatthias Ringwald
441*225f4ba4SMatthias Ringwald /**
442*225f4ba4SMatthias Ringwald \brief Get Base Priority
443*225f4ba4SMatthias Ringwald \details Returns the current value of the Base Priority register.
444*225f4ba4SMatthias Ringwald \return Base Priority register value
445*225f4ba4SMatthias Ringwald */
__get_BASEPRI(void)446*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
447*225f4ba4SMatthias Ringwald {
448*225f4ba4SMatthias Ringwald uint32_t result;
449*225f4ba4SMatthias Ringwald
450*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, basepri" : "=r" (result) );
451*225f4ba4SMatthias Ringwald return(result);
452*225f4ba4SMatthias Ringwald }
453*225f4ba4SMatthias Ringwald
454*225f4ba4SMatthias Ringwald
455*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
456*225f4ba4SMatthias Ringwald /**
457*225f4ba4SMatthias Ringwald \brief Get Base Priority (non-secure)
458*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Base Priority register when in secure state.
459*225f4ba4SMatthias Ringwald \return Base Priority register value
460*225f4ba4SMatthias Ringwald */
__TZ_get_BASEPRI_NS(void)461*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
462*225f4ba4SMatthias Ringwald {
463*225f4ba4SMatthias Ringwald uint32_t result;
464*225f4ba4SMatthias Ringwald
465*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
466*225f4ba4SMatthias Ringwald return(result);
467*225f4ba4SMatthias Ringwald }
468*225f4ba4SMatthias Ringwald #endif
469*225f4ba4SMatthias Ringwald
470*225f4ba4SMatthias Ringwald
471*225f4ba4SMatthias Ringwald /**
472*225f4ba4SMatthias Ringwald \brief Set Base Priority
473*225f4ba4SMatthias Ringwald \details Assigns the given value to the Base Priority register.
474*225f4ba4SMatthias Ringwald \param [in] basePri Base Priority value to set
475*225f4ba4SMatthias Ringwald */
__set_BASEPRI(uint32_t basePri)476*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
477*225f4ba4SMatthias Ringwald {
478*225f4ba4SMatthias Ringwald __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
479*225f4ba4SMatthias Ringwald }
480*225f4ba4SMatthias Ringwald
481*225f4ba4SMatthias Ringwald
482*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
483*225f4ba4SMatthias Ringwald /**
484*225f4ba4SMatthias Ringwald \brief Set Base Priority (non-secure)
485*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Base Priority register when in secure state.
486*225f4ba4SMatthias Ringwald \param [in] basePri Base Priority value to set
487*225f4ba4SMatthias Ringwald */
__TZ_set_BASEPRI_NS(uint32_t basePri)488*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
489*225f4ba4SMatthias Ringwald {
490*225f4ba4SMatthias Ringwald __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
491*225f4ba4SMatthias Ringwald }
492*225f4ba4SMatthias Ringwald #endif
493*225f4ba4SMatthias Ringwald
494*225f4ba4SMatthias Ringwald
495*225f4ba4SMatthias Ringwald /**
496*225f4ba4SMatthias Ringwald \brief Set Base Priority with condition
497*225f4ba4SMatthias Ringwald \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
498*225f4ba4SMatthias Ringwald or the new value increases the BASEPRI priority level.
499*225f4ba4SMatthias Ringwald \param [in] basePri Base Priority value to set
500*225f4ba4SMatthias Ringwald */
__set_BASEPRI_MAX(uint32_t basePri)501*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
502*225f4ba4SMatthias Ringwald {
503*225f4ba4SMatthias Ringwald __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
504*225f4ba4SMatthias Ringwald }
505*225f4ba4SMatthias Ringwald
506*225f4ba4SMatthias Ringwald
507*225f4ba4SMatthias Ringwald /**
508*225f4ba4SMatthias Ringwald \brief Get Fault Mask
509*225f4ba4SMatthias Ringwald \details Returns the current value of the Fault Mask register.
510*225f4ba4SMatthias Ringwald \return Fault Mask register value
511*225f4ba4SMatthias Ringwald */
__get_FAULTMASK(void)512*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
513*225f4ba4SMatthias Ringwald {
514*225f4ba4SMatthias Ringwald uint32_t result;
515*225f4ba4SMatthias Ringwald
516*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
517*225f4ba4SMatthias Ringwald return(result);
518*225f4ba4SMatthias Ringwald }
519*225f4ba4SMatthias Ringwald
520*225f4ba4SMatthias Ringwald
521*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
522*225f4ba4SMatthias Ringwald /**
523*225f4ba4SMatthias Ringwald \brief Get Fault Mask (non-secure)
524*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Fault Mask register when in secure state.
525*225f4ba4SMatthias Ringwald \return Fault Mask register value
526*225f4ba4SMatthias Ringwald */
__TZ_get_FAULTMASK_NS(void)527*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
528*225f4ba4SMatthias Ringwald {
529*225f4ba4SMatthias Ringwald uint32_t result;
530*225f4ba4SMatthias Ringwald
531*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
532*225f4ba4SMatthias Ringwald return(result);
533*225f4ba4SMatthias Ringwald }
534*225f4ba4SMatthias Ringwald #endif
535*225f4ba4SMatthias Ringwald
536*225f4ba4SMatthias Ringwald
537*225f4ba4SMatthias Ringwald /**
538*225f4ba4SMatthias Ringwald \brief Set Fault Mask
539*225f4ba4SMatthias Ringwald \details Assigns the given value to the Fault Mask register.
540*225f4ba4SMatthias Ringwald \param [in] faultMask Fault Mask value to set
541*225f4ba4SMatthias Ringwald */
__set_FAULTMASK(uint32_t faultMask)542*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
543*225f4ba4SMatthias Ringwald {
544*225f4ba4SMatthias Ringwald __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
545*225f4ba4SMatthias Ringwald }
546*225f4ba4SMatthias Ringwald
547*225f4ba4SMatthias Ringwald
548*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
549*225f4ba4SMatthias Ringwald /**
550*225f4ba4SMatthias Ringwald \brief Set Fault Mask (non-secure)
551*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Fault Mask register when in secure state.
552*225f4ba4SMatthias Ringwald \param [in] faultMask Fault Mask value to set
553*225f4ba4SMatthias Ringwald */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)554*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
555*225f4ba4SMatthias Ringwald {
556*225f4ba4SMatthias Ringwald __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
557*225f4ba4SMatthias Ringwald }
558*225f4ba4SMatthias Ringwald #endif
559*225f4ba4SMatthias Ringwald
560*225f4ba4SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
561*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
562*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
563*225f4ba4SMatthias Ringwald
564*225f4ba4SMatthias Ringwald
565*225f4ba4SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
566*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
567*225f4ba4SMatthias Ringwald
568*225f4ba4SMatthias Ringwald /**
569*225f4ba4SMatthias Ringwald \brief Get Process Stack Pointer Limit
570*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
571*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
572*225f4ba4SMatthias Ringwald mode.
573*225f4ba4SMatthias Ringwald
574*225f4ba4SMatthias Ringwald \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
575*225f4ba4SMatthias Ringwald \return PSPLIM Register value
576*225f4ba4SMatthias Ringwald */
__get_PSPLIM(void)577*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
578*225f4ba4SMatthias Ringwald {
579*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
580*225f4ba4SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
581*225f4ba4SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
582*225f4ba4SMatthias Ringwald return 0U;
583*225f4ba4SMatthias Ringwald #else
584*225f4ba4SMatthias Ringwald uint32_t result;
585*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, psplim" : "=r" (result) );
586*225f4ba4SMatthias Ringwald return result;
587*225f4ba4SMatthias Ringwald #endif
588*225f4ba4SMatthias Ringwald }
589*225f4ba4SMatthias Ringwald
590*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
591*225f4ba4SMatthias Ringwald /**
592*225f4ba4SMatthias Ringwald \brief Get Process Stack Pointer Limit (non-secure)
593*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
594*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
595*225f4ba4SMatthias Ringwald mode.
596*225f4ba4SMatthias Ringwald
597*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
598*225f4ba4SMatthias Ringwald \return PSPLIM Register value
599*225f4ba4SMatthias Ringwald */
__TZ_get_PSPLIM_NS(void)600*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
601*225f4ba4SMatthias Ringwald {
602*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
603*225f4ba4SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
604*225f4ba4SMatthias Ringwald return 0U;
605*225f4ba4SMatthias Ringwald #else
606*225f4ba4SMatthias Ringwald uint32_t result;
607*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
608*225f4ba4SMatthias Ringwald return result;
609*225f4ba4SMatthias Ringwald #endif
610*225f4ba4SMatthias Ringwald }
611*225f4ba4SMatthias Ringwald #endif
612*225f4ba4SMatthias Ringwald
613*225f4ba4SMatthias Ringwald
614*225f4ba4SMatthias Ringwald /**
615*225f4ba4SMatthias Ringwald \brief Set Process Stack Pointer Limit
616*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
617*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
618*225f4ba4SMatthias Ringwald mode.
619*225f4ba4SMatthias Ringwald
620*225f4ba4SMatthias Ringwald \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
621*225f4ba4SMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
622*225f4ba4SMatthias Ringwald */
__set_PSPLIM(uint32_t ProcStackPtrLimit)623*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
624*225f4ba4SMatthias Ringwald {
625*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626*225f4ba4SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
627*225f4ba4SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
628*225f4ba4SMatthias Ringwald (void)ProcStackPtrLimit;
629*225f4ba4SMatthias Ringwald #else
630*225f4ba4SMatthias Ringwald __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
631*225f4ba4SMatthias Ringwald #endif
632*225f4ba4SMatthias Ringwald }
633*225f4ba4SMatthias Ringwald
634*225f4ba4SMatthias Ringwald
635*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
636*225f4ba4SMatthias Ringwald /**
637*225f4ba4SMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
638*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
639*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
640*225f4ba4SMatthias Ringwald mode.
641*225f4ba4SMatthias Ringwald
642*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
643*225f4ba4SMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
644*225f4ba4SMatthias Ringwald */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)645*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
646*225f4ba4SMatthias Ringwald {
647*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
648*225f4ba4SMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
649*225f4ba4SMatthias Ringwald (void)ProcStackPtrLimit;
650*225f4ba4SMatthias Ringwald #else
651*225f4ba4SMatthias Ringwald __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
652*225f4ba4SMatthias Ringwald #endif
653*225f4ba4SMatthias Ringwald }
654*225f4ba4SMatthias Ringwald #endif
655*225f4ba4SMatthias Ringwald
656*225f4ba4SMatthias Ringwald
657*225f4ba4SMatthias Ringwald /**
658*225f4ba4SMatthias Ringwald \brief Get Main Stack Pointer Limit
659*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
660*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
661*225f4ba4SMatthias Ringwald
662*225f4ba4SMatthias Ringwald \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
663*225f4ba4SMatthias Ringwald \return MSPLIM Register value
664*225f4ba4SMatthias Ringwald */
__get_MSPLIM(void)665*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
666*225f4ba4SMatthias Ringwald {
667*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
668*225f4ba4SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
669*225f4ba4SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
670*225f4ba4SMatthias Ringwald return 0U;
671*225f4ba4SMatthias Ringwald #else
672*225f4ba4SMatthias Ringwald uint32_t result;
673*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, msplim" : "=r" (result) );
674*225f4ba4SMatthias Ringwald return result;
675*225f4ba4SMatthias Ringwald #endif
676*225f4ba4SMatthias Ringwald }
677*225f4ba4SMatthias Ringwald
678*225f4ba4SMatthias Ringwald
679*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
680*225f4ba4SMatthias Ringwald /**
681*225f4ba4SMatthias Ringwald \brief Get Main Stack Pointer Limit (non-secure)
682*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
683*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
684*225f4ba4SMatthias Ringwald
685*225f4ba4SMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
686*225f4ba4SMatthias Ringwald \return MSPLIM Register value
687*225f4ba4SMatthias Ringwald */
__TZ_get_MSPLIM_NS(void)688*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
689*225f4ba4SMatthias Ringwald {
690*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
691*225f4ba4SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
692*225f4ba4SMatthias Ringwald return 0U;
693*225f4ba4SMatthias Ringwald #else
694*225f4ba4SMatthias Ringwald uint32_t result;
695*225f4ba4SMatthias Ringwald __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
696*225f4ba4SMatthias Ringwald return result;
697*225f4ba4SMatthias Ringwald #endif
698*225f4ba4SMatthias Ringwald }
699*225f4ba4SMatthias Ringwald #endif
700*225f4ba4SMatthias Ringwald
701*225f4ba4SMatthias Ringwald
702*225f4ba4SMatthias Ringwald /**
703*225f4ba4SMatthias Ringwald \brief Set Main Stack Pointer Limit
704*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
705*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
706*225f4ba4SMatthias Ringwald
707*225f4ba4SMatthias Ringwald \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
708*225f4ba4SMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
709*225f4ba4SMatthias Ringwald */
__set_MSPLIM(uint32_t MainStackPtrLimit)710*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
711*225f4ba4SMatthias Ringwald {
712*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
713*225f4ba4SMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
714*225f4ba4SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
715*225f4ba4SMatthias Ringwald (void)MainStackPtrLimit;
716*225f4ba4SMatthias Ringwald #else
717*225f4ba4SMatthias Ringwald __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
718*225f4ba4SMatthias Ringwald #endif
719*225f4ba4SMatthias Ringwald }
720*225f4ba4SMatthias Ringwald
721*225f4ba4SMatthias Ringwald
722*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
723*225f4ba4SMatthias Ringwald /**
724*225f4ba4SMatthias Ringwald \brief Set Main Stack Pointer Limit (non-secure)
725*225f4ba4SMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
726*225f4ba4SMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
727*225f4ba4SMatthias Ringwald
728*225f4ba4SMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
729*225f4ba4SMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer value to set
730*225f4ba4SMatthias Ringwald */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)731*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
732*225f4ba4SMatthias Ringwald {
733*225f4ba4SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
734*225f4ba4SMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
735*225f4ba4SMatthias Ringwald (void)MainStackPtrLimit;
736*225f4ba4SMatthias Ringwald #else
737*225f4ba4SMatthias Ringwald __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
738*225f4ba4SMatthias Ringwald #endif
739*225f4ba4SMatthias Ringwald }
740*225f4ba4SMatthias Ringwald #endif
741*225f4ba4SMatthias Ringwald
742*225f4ba4SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
743*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
744*225f4ba4SMatthias Ringwald
745*225f4ba4SMatthias Ringwald /**
746*225f4ba4SMatthias Ringwald \brief Get FPSCR
747*225f4ba4SMatthias Ringwald \details Returns the current value of the Floating Point Status/Control register.
748*225f4ba4SMatthias Ringwald \return Floating Point Status/Control register value
749*225f4ba4SMatthias Ringwald */
750*225f4ba4SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
751*225f4ba4SMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
752*225f4ba4SMatthias Ringwald #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
753*225f4ba4SMatthias Ringwald #else
754*225f4ba4SMatthias Ringwald #define __get_FPSCR() ((uint32_t)0U)
755*225f4ba4SMatthias Ringwald #endif
756*225f4ba4SMatthias Ringwald
757*225f4ba4SMatthias Ringwald /**
758*225f4ba4SMatthias Ringwald \brief Set FPSCR
759*225f4ba4SMatthias Ringwald \details Assigns the given value to the Floating Point Status/Control register.
760*225f4ba4SMatthias Ringwald \param [in] fpscr Floating Point Status/Control value to set
761*225f4ba4SMatthias Ringwald */
762*225f4ba4SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
763*225f4ba4SMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
764*225f4ba4SMatthias Ringwald #define __set_FPSCR __builtin_arm_set_fpscr
765*225f4ba4SMatthias Ringwald #else
766*225f4ba4SMatthias Ringwald #define __set_FPSCR(x) ((void)(x))
767*225f4ba4SMatthias Ringwald #endif
768*225f4ba4SMatthias Ringwald
769*225f4ba4SMatthias Ringwald
770*225f4ba4SMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
771*225f4ba4SMatthias Ringwald
772*225f4ba4SMatthias Ringwald
773*225f4ba4SMatthias Ringwald /* ########################## Core Instruction Access ######################### */
774*225f4ba4SMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
775*225f4ba4SMatthias Ringwald Access to dedicated instructions
776*225f4ba4SMatthias Ringwald @{
777*225f4ba4SMatthias Ringwald */
778*225f4ba4SMatthias Ringwald
779*225f4ba4SMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
780*225f4ba4SMatthias Ringwald * For thumb1, use low register (r0-r7), specified by constraint "l"
781*225f4ba4SMatthias Ringwald * Otherwise, use general registers, specified by constraint "r" */
782*225f4ba4SMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
783*225f4ba4SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
784*225f4ba4SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
785*225f4ba4SMatthias Ringwald #else
786*225f4ba4SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
787*225f4ba4SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
788*225f4ba4SMatthias Ringwald #endif
789*225f4ba4SMatthias Ringwald
790*225f4ba4SMatthias Ringwald /**
791*225f4ba4SMatthias Ringwald \brief No Operation
792*225f4ba4SMatthias Ringwald \details No Operation does nothing. This instruction can be used for code alignment purposes.
793*225f4ba4SMatthias Ringwald */
794*225f4ba4SMatthias Ringwald #define __NOP __builtin_arm_nop
795*225f4ba4SMatthias Ringwald
796*225f4ba4SMatthias Ringwald /**
797*225f4ba4SMatthias Ringwald \brief Wait For Interrupt
798*225f4ba4SMatthias Ringwald \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
799*225f4ba4SMatthias Ringwald */
800*225f4ba4SMatthias Ringwald #define __WFI __builtin_arm_wfi
801*225f4ba4SMatthias Ringwald
802*225f4ba4SMatthias Ringwald
803*225f4ba4SMatthias Ringwald /**
804*225f4ba4SMatthias Ringwald \brief Wait For Event
805*225f4ba4SMatthias Ringwald \details Wait For Event is a hint instruction that permits the processor to enter
806*225f4ba4SMatthias Ringwald a low-power state until one of a number of events occurs.
807*225f4ba4SMatthias Ringwald */
808*225f4ba4SMatthias Ringwald #define __WFE __builtin_arm_wfe
809*225f4ba4SMatthias Ringwald
810*225f4ba4SMatthias Ringwald
811*225f4ba4SMatthias Ringwald /**
812*225f4ba4SMatthias Ringwald \brief Send Event
813*225f4ba4SMatthias Ringwald \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
814*225f4ba4SMatthias Ringwald */
815*225f4ba4SMatthias Ringwald #define __SEV __builtin_arm_sev
816*225f4ba4SMatthias Ringwald
817*225f4ba4SMatthias Ringwald
818*225f4ba4SMatthias Ringwald /**
819*225f4ba4SMatthias Ringwald \brief Instruction Synchronization Barrier
820*225f4ba4SMatthias Ringwald \details Instruction Synchronization Barrier flushes the pipeline in the processor,
821*225f4ba4SMatthias Ringwald so that all instructions following the ISB are fetched from cache or memory,
822*225f4ba4SMatthias Ringwald after the instruction has been completed.
823*225f4ba4SMatthias Ringwald */
824*225f4ba4SMatthias Ringwald #define __ISB() __builtin_arm_isb(0xF);
825*225f4ba4SMatthias Ringwald
826*225f4ba4SMatthias Ringwald /**
827*225f4ba4SMatthias Ringwald \brief Data Synchronization Barrier
828*225f4ba4SMatthias Ringwald \details Acts as a special kind of Data Memory Barrier.
829*225f4ba4SMatthias Ringwald It completes when all explicit memory accesses before this instruction complete.
830*225f4ba4SMatthias Ringwald */
831*225f4ba4SMatthias Ringwald #define __DSB() __builtin_arm_dsb(0xF);
832*225f4ba4SMatthias Ringwald
833*225f4ba4SMatthias Ringwald
834*225f4ba4SMatthias Ringwald /**
835*225f4ba4SMatthias Ringwald \brief Data Memory Barrier
836*225f4ba4SMatthias Ringwald \details Ensures the apparent order of the explicit memory operations before
837*225f4ba4SMatthias Ringwald and after the instruction, without ensuring their completion.
838*225f4ba4SMatthias Ringwald */
839*225f4ba4SMatthias Ringwald #define __DMB() __builtin_arm_dmb(0xF);
840*225f4ba4SMatthias Ringwald
841*225f4ba4SMatthias Ringwald
842*225f4ba4SMatthias Ringwald /**
843*225f4ba4SMatthias Ringwald \brief Reverse byte order (32 bit)
844*225f4ba4SMatthias Ringwald \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
845*225f4ba4SMatthias Ringwald \param [in] value Value to reverse
846*225f4ba4SMatthias Ringwald \return Reversed value
847*225f4ba4SMatthias Ringwald */
848*225f4ba4SMatthias Ringwald #define __REV(value) __builtin_bswap32(value)
849*225f4ba4SMatthias Ringwald
850*225f4ba4SMatthias Ringwald
851*225f4ba4SMatthias Ringwald /**
852*225f4ba4SMatthias Ringwald \brief Reverse byte order (16 bit)
853*225f4ba4SMatthias Ringwald \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
854*225f4ba4SMatthias Ringwald \param [in] value Value to reverse
855*225f4ba4SMatthias Ringwald \return Reversed value
856*225f4ba4SMatthias Ringwald */
857*225f4ba4SMatthias Ringwald #define __REV16(value) __ROR(__REV(value), 16)
858*225f4ba4SMatthias Ringwald
859*225f4ba4SMatthias Ringwald
860*225f4ba4SMatthias Ringwald /**
861*225f4ba4SMatthias Ringwald \brief Reverse byte order (16 bit)
862*225f4ba4SMatthias Ringwald \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
863*225f4ba4SMatthias Ringwald \param [in] value Value to reverse
864*225f4ba4SMatthias Ringwald \return Reversed value
865*225f4ba4SMatthias Ringwald */
866*225f4ba4SMatthias Ringwald #define __REVSH(value) (int16_t)__builtin_bswap16(value)
867*225f4ba4SMatthias Ringwald
868*225f4ba4SMatthias Ringwald
869*225f4ba4SMatthias Ringwald /**
870*225f4ba4SMatthias Ringwald \brief Rotate Right in unsigned value (32 bit)
871*225f4ba4SMatthias Ringwald \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
872*225f4ba4SMatthias Ringwald \param [in] op1 Value to rotate
873*225f4ba4SMatthias Ringwald \param [in] op2 Number of Bits to rotate
874*225f4ba4SMatthias Ringwald \return Rotated value
875*225f4ba4SMatthias Ringwald */
__ROR(uint32_t op1,uint32_t op2)876*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
877*225f4ba4SMatthias Ringwald {
878*225f4ba4SMatthias Ringwald op2 %= 32U;
879*225f4ba4SMatthias Ringwald if (op2 == 0U)
880*225f4ba4SMatthias Ringwald {
881*225f4ba4SMatthias Ringwald return op1;
882*225f4ba4SMatthias Ringwald }
883*225f4ba4SMatthias Ringwald return (op1 >> op2) | (op1 << (32U - op2));
884*225f4ba4SMatthias Ringwald }
885*225f4ba4SMatthias Ringwald
886*225f4ba4SMatthias Ringwald
887*225f4ba4SMatthias Ringwald /**
888*225f4ba4SMatthias Ringwald \brief Breakpoint
889*225f4ba4SMatthias Ringwald \details Causes the processor to enter Debug state.
890*225f4ba4SMatthias Ringwald Debug tools can use this to investigate system state when the instruction at a particular address is reached.
891*225f4ba4SMatthias Ringwald \param [in] value is ignored by the processor.
892*225f4ba4SMatthias Ringwald If required, a debugger can use it to store additional information about the breakpoint.
893*225f4ba4SMatthias Ringwald */
894*225f4ba4SMatthias Ringwald #define __BKPT(value) __ASM volatile ("bkpt "#value)
895*225f4ba4SMatthias Ringwald
896*225f4ba4SMatthias Ringwald
897*225f4ba4SMatthias Ringwald /**
898*225f4ba4SMatthias Ringwald \brief Reverse bit order of value
899*225f4ba4SMatthias Ringwald \details Reverses the bit order of the given value.
900*225f4ba4SMatthias Ringwald \param [in] value Value to reverse
901*225f4ba4SMatthias Ringwald \return Reversed value
902*225f4ba4SMatthias Ringwald */
903*225f4ba4SMatthias Ringwald #define __RBIT __builtin_arm_rbit
904*225f4ba4SMatthias Ringwald
905*225f4ba4SMatthias Ringwald /**
906*225f4ba4SMatthias Ringwald \brief Count leading zeros
907*225f4ba4SMatthias Ringwald \details Counts the number of leading zeros of a data value.
908*225f4ba4SMatthias Ringwald \param [in] value Value to count the leading zeros
909*225f4ba4SMatthias Ringwald \return number of leading zeros in value
910*225f4ba4SMatthias Ringwald */
911*225f4ba4SMatthias Ringwald #define __CLZ (uint8_t)__builtin_clz
912*225f4ba4SMatthias Ringwald
913*225f4ba4SMatthias Ringwald
914*225f4ba4SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
915*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
916*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
917*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
918*225f4ba4SMatthias Ringwald /**
919*225f4ba4SMatthias Ringwald \brief LDR Exclusive (8 bit)
920*225f4ba4SMatthias Ringwald \details Executes a exclusive LDR instruction for 8 bit value.
921*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
922*225f4ba4SMatthias Ringwald \return value of type uint8_t at (*ptr)
923*225f4ba4SMatthias Ringwald */
924*225f4ba4SMatthias Ringwald #define __LDREXB (uint8_t)__builtin_arm_ldrex
925*225f4ba4SMatthias Ringwald
926*225f4ba4SMatthias Ringwald
927*225f4ba4SMatthias Ringwald /**
928*225f4ba4SMatthias Ringwald \brief LDR Exclusive (16 bit)
929*225f4ba4SMatthias Ringwald \details Executes a exclusive LDR instruction for 16 bit values.
930*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
931*225f4ba4SMatthias Ringwald \return value of type uint16_t at (*ptr)
932*225f4ba4SMatthias Ringwald */
933*225f4ba4SMatthias Ringwald #define __LDREXH (uint16_t)__builtin_arm_ldrex
934*225f4ba4SMatthias Ringwald
935*225f4ba4SMatthias Ringwald
936*225f4ba4SMatthias Ringwald /**
937*225f4ba4SMatthias Ringwald \brief LDR Exclusive (32 bit)
938*225f4ba4SMatthias Ringwald \details Executes a exclusive LDR instruction for 32 bit values.
939*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
940*225f4ba4SMatthias Ringwald \return value of type uint32_t at (*ptr)
941*225f4ba4SMatthias Ringwald */
942*225f4ba4SMatthias Ringwald #define __LDREXW (uint32_t)__builtin_arm_ldrex
943*225f4ba4SMatthias Ringwald
944*225f4ba4SMatthias Ringwald
945*225f4ba4SMatthias Ringwald /**
946*225f4ba4SMatthias Ringwald \brief STR Exclusive (8 bit)
947*225f4ba4SMatthias Ringwald \details Executes a exclusive STR instruction for 8 bit values.
948*225f4ba4SMatthias Ringwald \param [in] value Value to store
949*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
950*225f4ba4SMatthias Ringwald \return 0 Function succeeded
951*225f4ba4SMatthias Ringwald \return 1 Function failed
952*225f4ba4SMatthias Ringwald */
953*225f4ba4SMatthias Ringwald #define __STREXB (uint32_t)__builtin_arm_strex
954*225f4ba4SMatthias Ringwald
955*225f4ba4SMatthias Ringwald
956*225f4ba4SMatthias Ringwald /**
957*225f4ba4SMatthias Ringwald \brief STR Exclusive (16 bit)
958*225f4ba4SMatthias Ringwald \details Executes a exclusive STR instruction for 16 bit values.
959*225f4ba4SMatthias Ringwald \param [in] value Value to store
960*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
961*225f4ba4SMatthias Ringwald \return 0 Function succeeded
962*225f4ba4SMatthias Ringwald \return 1 Function failed
963*225f4ba4SMatthias Ringwald */
964*225f4ba4SMatthias Ringwald #define __STREXH (uint32_t)__builtin_arm_strex
965*225f4ba4SMatthias Ringwald
966*225f4ba4SMatthias Ringwald
967*225f4ba4SMatthias Ringwald /**
968*225f4ba4SMatthias Ringwald \brief STR Exclusive (32 bit)
969*225f4ba4SMatthias Ringwald \details Executes a exclusive STR instruction for 32 bit values.
970*225f4ba4SMatthias Ringwald \param [in] value Value to store
971*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
972*225f4ba4SMatthias Ringwald \return 0 Function succeeded
973*225f4ba4SMatthias Ringwald \return 1 Function failed
974*225f4ba4SMatthias Ringwald */
975*225f4ba4SMatthias Ringwald #define __STREXW (uint32_t)__builtin_arm_strex
976*225f4ba4SMatthias Ringwald
977*225f4ba4SMatthias Ringwald
978*225f4ba4SMatthias Ringwald /**
979*225f4ba4SMatthias Ringwald \brief Remove the exclusive lock
980*225f4ba4SMatthias Ringwald \details Removes the exclusive lock which is created by LDREX.
981*225f4ba4SMatthias Ringwald */
982*225f4ba4SMatthias Ringwald #define __CLREX __builtin_arm_clrex
983*225f4ba4SMatthias Ringwald
984*225f4ba4SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
985*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
986*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
987*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
988*225f4ba4SMatthias Ringwald
989*225f4ba4SMatthias Ringwald
990*225f4ba4SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
991*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
992*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
993*225f4ba4SMatthias Ringwald
994*225f4ba4SMatthias Ringwald /**
995*225f4ba4SMatthias Ringwald \brief Signed Saturate
996*225f4ba4SMatthias Ringwald \details Saturates a signed value.
997*225f4ba4SMatthias Ringwald \param [in] value Value to be saturated
998*225f4ba4SMatthias Ringwald \param [in] sat Bit position to saturate to (1..32)
999*225f4ba4SMatthias Ringwald \return Saturated value
1000*225f4ba4SMatthias Ringwald */
1001*225f4ba4SMatthias Ringwald #define __SSAT __builtin_arm_ssat
1002*225f4ba4SMatthias Ringwald
1003*225f4ba4SMatthias Ringwald
1004*225f4ba4SMatthias Ringwald /**
1005*225f4ba4SMatthias Ringwald \brief Unsigned Saturate
1006*225f4ba4SMatthias Ringwald \details Saturates an unsigned value.
1007*225f4ba4SMatthias Ringwald \param [in] value Value to be saturated
1008*225f4ba4SMatthias Ringwald \param [in] sat Bit position to saturate to (0..31)
1009*225f4ba4SMatthias Ringwald \return Saturated value
1010*225f4ba4SMatthias Ringwald */
1011*225f4ba4SMatthias Ringwald #define __USAT __builtin_arm_usat
1012*225f4ba4SMatthias Ringwald
1013*225f4ba4SMatthias Ringwald
1014*225f4ba4SMatthias Ringwald /**
1015*225f4ba4SMatthias Ringwald \brief Rotate Right with Extend (32 bit)
1016*225f4ba4SMatthias Ringwald \details Moves each bit of a bitstring right by one bit.
1017*225f4ba4SMatthias Ringwald The carry input is shifted in at the left end of the bitstring.
1018*225f4ba4SMatthias Ringwald \param [in] value Value to rotate
1019*225f4ba4SMatthias Ringwald \return Rotated value
1020*225f4ba4SMatthias Ringwald */
__RRX(uint32_t value)1021*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1022*225f4ba4SMatthias Ringwald {
1023*225f4ba4SMatthias Ringwald uint32_t result;
1024*225f4ba4SMatthias Ringwald
1025*225f4ba4SMatthias Ringwald __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1026*225f4ba4SMatthias Ringwald return(result);
1027*225f4ba4SMatthias Ringwald }
1028*225f4ba4SMatthias Ringwald
1029*225f4ba4SMatthias Ringwald
1030*225f4ba4SMatthias Ringwald /**
1031*225f4ba4SMatthias Ringwald \brief LDRT Unprivileged (8 bit)
1032*225f4ba4SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 8 bit value.
1033*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1034*225f4ba4SMatthias Ringwald \return value of type uint8_t at (*ptr)
1035*225f4ba4SMatthias Ringwald */
__LDRBT(volatile uint8_t * ptr)1036*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1037*225f4ba4SMatthias Ringwald {
1038*225f4ba4SMatthias Ringwald uint32_t result;
1039*225f4ba4SMatthias Ringwald
1040*225f4ba4SMatthias Ringwald __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1041*225f4ba4SMatthias Ringwald return ((uint8_t) result); /* Add explicit type cast here */
1042*225f4ba4SMatthias Ringwald }
1043*225f4ba4SMatthias Ringwald
1044*225f4ba4SMatthias Ringwald
1045*225f4ba4SMatthias Ringwald /**
1046*225f4ba4SMatthias Ringwald \brief LDRT Unprivileged (16 bit)
1047*225f4ba4SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 16 bit values.
1048*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1049*225f4ba4SMatthias Ringwald \return value of type uint16_t at (*ptr)
1050*225f4ba4SMatthias Ringwald */
__LDRHT(volatile uint16_t * ptr)1051*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1052*225f4ba4SMatthias Ringwald {
1053*225f4ba4SMatthias Ringwald uint32_t result;
1054*225f4ba4SMatthias Ringwald
1055*225f4ba4SMatthias Ringwald __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1056*225f4ba4SMatthias Ringwald return ((uint16_t) result); /* Add explicit type cast here */
1057*225f4ba4SMatthias Ringwald }
1058*225f4ba4SMatthias Ringwald
1059*225f4ba4SMatthias Ringwald
1060*225f4ba4SMatthias Ringwald /**
1061*225f4ba4SMatthias Ringwald \brief LDRT Unprivileged (32 bit)
1062*225f4ba4SMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 32 bit values.
1063*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1064*225f4ba4SMatthias Ringwald \return value of type uint32_t at (*ptr)
1065*225f4ba4SMatthias Ringwald */
__LDRT(volatile uint32_t * ptr)1066*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1067*225f4ba4SMatthias Ringwald {
1068*225f4ba4SMatthias Ringwald uint32_t result;
1069*225f4ba4SMatthias Ringwald
1070*225f4ba4SMatthias Ringwald __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1071*225f4ba4SMatthias Ringwald return(result);
1072*225f4ba4SMatthias Ringwald }
1073*225f4ba4SMatthias Ringwald
1074*225f4ba4SMatthias Ringwald
1075*225f4ba4SMatthias Ringwald /**
1076*225f4ba4SMatthias Ringwald \brief STRT Unprivileged (8 bit)
1077*225f4ba4SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 8 bit values.
1078*225f4ba4SMatthias Ringwald \param [in] value Value to store
1079*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1080*225f4ba4SMatthias Ringwald */
__STRBT(uint8_t value,volatile uint8_t * ptr)1081*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1082*225f4ba4SMatthias Ringwald {
1083*225f4ba4SMatthias Ringwald __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1084*225f4ba4SMatthias Ringwald }
1085*225f4ba4SMatthias Ringwald
1086*225f4ba4SMatthias Ringwald
1087*225f4ba4SMatthias Ringwald /**
1088*225f4ba4SMatthias Ringwald \brief STRT Unprivileged (16 bit)
1089*225f4ba4SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 16 bit values.
1090*225f4ba4SMatthias Ringwald \param [in] value Value to store
1091*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1092*225f4ba4SMatthias Ringwald */
__STRHT(uint16_t value,volatile uint16_t * ptr)1093*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1094*225f4ba4SMatthias Ringwald {
1095*225f4ba4SMatthias Ringwald __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1096*225f4ba4SMatthias Ringwald }
1097*225f4ba4SMatthias Ringwald
1098*225f4ba4SMatthias Ringwald
1099*225f4ba4SMatthias Ringwald /**
1100*225f4ba4SMatthias Ringwald \brief STRT Unprivileged (32 bit)
1101*225f4ba4SMatthias Ringwald \details Executes a Unprivileged STRT instruction for 32 bit values.
1102*225f4ba4SMatthias Ringwald \param [in] value Value to store
1103*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1104*225f4ba4SMatthias Ringwald */
__STRT(uint32_t value,volatile uint32_t * ptr)1105*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1106*225f4ba4SMatthias Ringwald {
1107*225f4ba4SMatthias Ringwald __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1108*225f4ba4SMatthias Ringwald }
1109*225f4ba4SMatthias Ringwald
1110*225f4ba4SMatthias Ringwald #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1111*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1112*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1113*225f4ba4SMatthias Ringwald
1114*225f4ba4SMatthias Ringwald /**
1115*225f4ba4SMatthias Ringwald \brief Signed Saturate
1116*225f4ba4SMatthias Ringwald \details Saturates a signed value.
1117*225f4ba4SMatthias Ringwald \param [in] value Value to be saturated
1118*225f4ba4SMatthias Ringwald \param [in] sat Bit position to saturate to (1..32)
1119*225f4ba4SMatthias Ringwald \return Saturated value
1120*225f4ba4SMatthias Ringwald */
__SSAT(int32_t val,uint32_t sat)1121*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1122*225f4ba4SMatthias Ringwald {
1123*225f4ba4SMatthias Ringwald if ((sat >= 1U) && (sat <= 32U))
1124*225f4ba4SMatthias Ringwald {
1125*225f4ba4SMatthias Ringwald const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1126*225f4ba4SMatthias Ringwald const int32_t min = -1 - max ;
1127*225f4ba4SMatthias Ringwald if (val > max)
1128*225f4ba4SMatthias Ringwald {
1129*225f4ba4SMatthias Ringwald return max;
1130*225f4ba4SMatthias Ringwald }
1131*225f4ba4SMatthias Ringwald else if (val < min)
1132*225f4ba4SMatthias Ringwald {
1133*225f4ba4SMatthias Ringwald return min;
1134*225f4ba4SMatthias Ringwald }
1135*225f4ba4SMatthias Ringwald }
1136*225f4ba4SMatthias Ringwald return val;
1137*225f4ba4SMatthias Ringwald }
1138*225f4ba4SMatthias Ringwald
1139*225f4ba4SMatthias Ringwald /**
1140*225f4ba4SMatthias Ringwald \brief Unsigned Saturate
1141*225f4ba4SMatthias Ringwald \details Saturates an unsigned value.
1142*225f4ba4SMatthias Ringwald \param [in] value Value to be saturated
1143*225f4ba4SMatthias Ringwald \param [in] sat Bit position to saturate to (0..31)
1144*225f4ba4SMatthias Ringwald \return Saturated value
1145*225f4ba4SMatthias Ringwald */
__USAT(int32_t val,uint32_t sat)1146*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1147*225f4ba4SMatthias Ringwald {
1148*225f4ba4SMatthias Ringwald if (sat <= 31U)
1149*225f4ba4SMatthias Ringwald {
1150*225f4ba4SMatthias Ringwald const uint32_t max = ((1U << sat) - 1U);
1151*225f4ba4SMatthias Ringwald if (val > (int32_t)max)
1152*225f4ba4SMatthias Ringwald {
1153*225f4ba4SMatthias Ringwald return max;
1154*225f4ba4SMatthias Ringwald }
1155*225f4ba4SMatthias Ringwald else if (val < 0)
1156*225f4ba4SMatthias Ringwald {
1157*225f4ba4SMatthias Ringwald return 0U;
1158*225f4ba4SMatthias Ringwald }
1159*225f4ba4SMatthias Ringwald }
1160*225f4ba4SMatthias Ringwald return (uint32_t)val;
1161*225f4ba4SMatthias Ringwald }
1162*225f4ba4SMatthias Ringwald
1163*225f4ba4SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1164*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1165*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1166*225f4ba4SMatthias Ringwald
1167*225f4ba4SMatthias Ringwald
1168*225f4ba4SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1169*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1170*225f4ba4SMatthias Ringwald /**
1171*225f4ba4SMatthias Ringwald \brief Load-Acquire (8 bit)
1172*225f4ba4SMatthias Ringwald \details Executes a LDAB instruction for 8 bit value.
1173*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1174*225f4ba4SMatthias Ringwald \return value of type uint8_t at (*ptr)
1175*225f4ba4SMatthias Ringwald */
__LDAB(volatile uint8_t * ptr)1176*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1177*225f4ba4SMatthias Ringwald {
1178*225f4ba4SMatthias Ringwald uint32_t result;
1179*225f4ba4SMatthias Ringwald
1180*225f4ba4SMatthias Ringwald __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1181*225f4ba4SMatthias Ringwald return ((uint8_t) result);
1182*225f4ba4SMatthias Ringwald }
1183*225f4ba4SMatthias Ringwald
1184*225f4ba4SMatthias Ringwald
1185*225f4ba4SMatthias Ringwald /**
1186*225f4ba4SMatthias Ringwald \brief Load-Acquire (16 bit)
1187*225f4ba4SMatthias Ringwald \details Executes a LDAH instruction for 16 bit values.
1188*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1189*225f4ba4SMatthias Ringwald \return value of type uint16_t at (*ptr)
1190*225f4ba4SMatthias Ringwald */
__LDAH(volatile uint16_t * ptr)1191*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1192*225f4ba4SMatthias Ringwald {
1193*225f4ba4SMatthias Ringwald uint32_t result;
1194*225f4ba4SMatthias Ringwald
1195*225f4ba4SMatthias Ringwald __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1196*225f4ba4SMatthias Ringwald return ((uint16_t) result);
1197*225f4ba4SMatthias Ringwald }
1198*225f4ba4SMatthias Ringwald
1199*225f4ba4SMatthias Ringwald
1200*225f4ba4SMatthias Ringwald /**
1201*225f4ba4SMatthias Ringwald \brief Load-Acquire (32 bit)
1202*225f4ba4SMatthias Ringwald \details Executes a LDA instruction for 32 bit values.
1203*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1204*225f4ba4SMatthias Ringwald \return value of type uint32_t at (*ptr)
1205*225f4ba4SMatthias Ringwald */
__LDA(volatile uint32_t * ptr)1206*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1207*225f4ba4SMatthias Ringwald {
1208*225f4ba4SMatthias Ringwald uint32_t result;
1209*225f4ba4SMatthias Ringwald
1210*225f4ba4SMatthias Ringwald __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1211*225f4ba4SMatthias Ringwald return(result);
1212*225f4ba4SMatthias Ringwald }
1213*225f4ba4SMatthias Ringwald
1214*225f4ba4SMatthias Ringwald
1215*225f4ba4SMatthias Ringwald /**
1216*225f4ba4SMatthias Ringwald \brief Store-Release (8 bit)
1217*225f4ba4SMatthias Ringwald \details Executes a STLB instruction for 8 bit values.
1218*225f4ba4SMatthias Ringwald \param [in] value Value to store
1219*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1220*225f4ba4SMatthias Ringwald */
__STLB(uint8_t value,volatile uint8_t * ptr)1221*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1222*225f4ba4SMatthias Ringwald {
1223*225f4ba4SMatthias Ringwald __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1224*225f4ba4SMatthias Ringwald }
1225*225f4ba4SMatthias Ringwald
1226*225f4ba4SMatthias Ringwald
1227*225f4ba4SMatthias Ringwald /**
1228*225f4ba4SMatthias Ringwald \brief Store-Release (16 bit)
1229*225f4ba4SMatthias Ringwald \details Executes a STLH instruction for 16 bit values.
1230*225f4ba4SMatthias Ringwald \param [in] value Value to store
1231*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1232*225f4ba4SMatthias Ringwald */
__STLH(uint16_t value,volatile uint16_t * ptr)1233*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1234*225f4ba4SMatthias Ringwald {
1235*225f4ba4SMatthias Ringwald __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1236*225f4ba4SMatthias Ringwald }
1237*225f4ba4SMatthias Ringwald
1238*225f4ba4SMatthias Ringwald
1239*225f4ba4SMatthias Ringwald /**
1240*225f4ba4SMatthias Ringwald \brief Store-Release (32 bit)
1241*225f4ba4SMatthias Ringwald \details Executes a STL instruction for 32 bit values.
1242*225f4ba4SMatthias Ringwald \param [in] value Value to store
1243*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1244*225f4ba4SMatthias Ringwald */
__STL(uint32_t value,volatile uint32_t * ptr)1245*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1246*225f4ba4SMatthias Ringwald {
1247*225f4ba4SMatthias Ringwald __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1248*225f4ba4SMatthias Ringwald }
1249*225f4ba4SMatthias Ringwald
1250*225f4ba4SMatthias Ringwald
1251*225f4ba4SMatthias Ringwald /**
1252*225f4ba4SMatthias Ringwald \brief Load-Acquire Exclusive (8 bit)
1253*225f4ba4SMatthias Ringwald \details Executes a LDAB exclusive instruction for 8 bit value.
1254*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1255*225f4ba4SMatthias Ringwald \return value of type uint8_t at (*ptr)
1256*225f4ba4SMatthias Ringwald */
1257*225f4ba4SMatthias Ringwald #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1258*225f4ba4SMatthias Ringwald
1259*225f4ba4SMatthias Ringwald
1260*225f4ba4SMatthias Ringwald /**
1261*225f4ba4SMatthias Ringwald \brief Load-Acquire Exclusive (16 bit)
1262*225f4ba4SMatthias Ringwald \details Executes a LDAH exclusive instruction for 16 bit values.
1263*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1264*225f4ba4SMatthias Ringwald \return value of type uint16_t at (*ptr)
1265*225f4ba4SMatthias Ringwald */
1266*225f4ba4SMatthias Ringwald #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1267*225f4ba4SMatthias Ringwald
1268*225f4ba4SMatthias Ringwald
1269*225f4ba4SMatthias Ringwald /**
1270*225f4ba4SMatthias Ringwald \brief Load-Acquire Exclusive (32 bit)
1271*225f4ba4SMatthias Ringwald \details Executes a LDA exclusive instruction for 32 bit values.
1272*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to data
1273*225f4ba4SMatthias Ringwald \return value of type uint32_t at (*ptr)
1274*225f4ba4SMatthias Ringwald */
1275*225f4ba4SMatthias Ringwald #define __LDAEX (uint32_t)__builtin_arm_ldaex
1276*225f4ba4SMatthias Ringwald
1277*225f4ba4SMatthias Ringwald
1278*225f4ba4SMatthias Ringwald /**
1279*225f4ba4SMatthias Ringwald \brief Store-Release Exclusive (8 bit)
1280*225f4ba4SMatthias Ringwald \details Executes a STLB exclusive instruction for 8 bit values.
1281*225f4ba4SMatthias Ringwald \param [in] value Value to store
1282*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1283*225f4ba4SMatthias Ringwald \return 0 Function succeeded
1284*225f4ba4SMatthias Ringwald \return 1 Function failed
1285*225f4ba4SMatthias Ringwald */
1286*225f4ba4SMatthias Ringwald #define __STLEXB (uint32_t)__builtin_arm_stlex
1287*225f4ba4SMatthias Ringwald
1288*225f4ba4SMatthias Ringwald
1289*225f4ba4SMatthias Ringwald /**
1290*225f4ba4SMatthias Ringwald \brief Store-Release Exclusive (16 bit)
1291*225f4ba4SMatthias Ringwald \details Executes a STLH exclusive instruction for 16 bit values.
1292*225f4ba4SMatthias Ringwald \param [in] value Value to store
1293*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1294*225f4ba4SMatthias Ringwald \return 0 Function succeeded
1295*225f4ba4SMatthias Ringwald \return 1 Function failed
1296*225f4ba4SMatthias Ringwald */
1297*225f4ba4SMatthias Ringwald #define __STLEXH (uint32_t)__builtin_arm_stlex
1298*225f4ba4SMatthias Ringwald
1299*225f4ba4SMatthias Ringwald
1300*225f4ba4SMatthias Ringwald /**
1301*225f4ba4SMatthias Ringwald \brief Store-Release Exclusive (32 bit)
1302*225f4ba4SMatthias Ringwald \details Executes a STL exclusive instruction for 32 bit values.
1303*225f4ba4SMatthias Ringwald \param [in] value Value to store
1304*225f4ba4SMatthias Ringwald \param [in] ptr Pointer to location
1305*225f4ba4SMatthias Ringwald \return 0 Function succeeded
1306*225f4ba4SMatthias Ringwald \return 1 Function failed
1307*225f4ba4SMatthias Ringwald */
1308*225f4ba4SMatthias Ringwald #define __STLEX (uint32_t)__builtin_arm_stlex
1309*225f4ba4SMatthias Ringwald
1310*225f4ba4SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1311*225f4ba4SMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1312*225f4ba4SMatthias Ringwald
1313*225f4ba4SMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1314*225f4ba4SMatthias Ringwald
1315*225f4ba4SMatthias Ringwald
1316*225f4ba4SMatthias Ringwald /* ################### Compiler specific Intrinsics ########################### */
1317*225f4ba4SMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1318*225f4ba4SMatthias Ringwald Access to dedicated SIMD instructions
1319*225f4ba4SMatthias Ringwald @{
1320*225f4ba4SMatthias Ringwald */
1321*225f4ba4SMatthias Ringwald
1322*225f4ba4SMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1323*225f4ba4SMatthias Ringwald
__SADD8(uint32_t op1,uint32_t op2)1324*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1325*225f4ba4SMatthias Ringwald {
1326*225f4ba4SMatthias Ringwald uint32_t result;
1327*225f4ba4SMatthias Ringwald
1328*225f4ba4SMatthias Ringwald __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1329*225f4ba4SMatthias Ringwald return(result);
1330*225f4ba4SMatthias Ringwald }
1331*225f4ba4SMatthias Ringwald
__QADD8(uint32_t op1,uint32_t op2)1332*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1333*225f4ba4SMatthias Ringwald {
1334*225f4ba4SMatthias Ringwald uint32_t result;
1335*225f4ba4SMatthias Ringwald
1336*225f4ba4SMatthias Ringwald __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1337*225f4ba4SMatthias Ringwald return(result);
1338*225f4ba4SMatthias Ringwald }
1339*225f4ba4SMatthias Ringwald
__SHADD8(uint32_t op1,uint32_t op2)1340*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1341*225f4ba4SMatthias Ringwald {
1342*225f4ba4SMatthias Ringwald uint32_t result;
1343*225f4ba4SMatthias Ringwald
1344*225f4ba4SMatthias Ringwald __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1345*225f4ba4SMatthias Ringwald return(result);
1346*225f4ba4SMatthias Ringwald }
1347*225f4ba4SMatthias Ringwald
__UADD8(uint32_t op1,uint32_t op2)1348*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1349*225f4ba4SMatthias Ringwald {
1350*225f4ba4SMatthias Ringwald uint32_t result;
1351*225f4ba4SMatthias Ringwald
1352*225f4ba4SMatthias Ringwald __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1353*225f4ba4SMatthias Ringwald return(result);
1354*225f4ba4SMatthias Ringwald }
1355*225f4ba4SMatthias Ringwald
__UQADD8(uint32_t op1,uint32_t op2)1356*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1357*225f4ba4SMatthias Ringwald {
1358*225f4ba4SMatthias Ringwald uint32_t result;
1359*225f4ba4SMatthias Ringwald
1360*225f4ba4SMatthias Ringwald __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1361*225f4ba4SMatthias Ringwald return(result);
1362*225f4ba4SMatthias Ringwald }
1363*225f4ba4SMatthias Ringwald
__UHADD8(uint32_t op1,uint32_t op2)1364*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1365*225f4ba4SMatthias Ringwald {
1366*225f4ba4SMatthias Ringwald uint32_t result;
1367*225f4ba4SMatthias Ringwald
1368*225f4ba4SMatthias Ringwald __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1369*225f4ba4SMatthias Ringwald return(result);
1370*225f4ba4SMatthias Ringwald }
1371*225f4ba4SMatthias Ringwald
1372*225f4ba4SMatthias Ringwald
__SSUB8(uint32_t op1,uint32_t op2)1373*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1374*225f4ba4SMatthias Ringwald {
1375*225f4ba4SMatthias Ringwald uint32_t result;
1376*225f4ba4SMatthias Ringwald
1377*225f4ba4SMatthias Ringwald __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1378*225f4ba4SMatthias Ringwald return(result);
1379*225f4ba4SMatthias Ringwald }
1380*225f4ba4SMatthias Ringwald
__QSUB8(uint32_t op1,uint32_t op2)1381*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1382*225f4ba4SMatthias Ringwald {
1383*225f4ba4SMatthias Ringwald uint32_t result;
1384*225f4ba4SMatthias Ringwald
1385*225f4ba4SMatthias Ringwald __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1386*225f4ba4SMatthias Ringwald return(result);
1387*225f4ba4SMatthias Ringwald }
1388*225f4ba4SMatthias Ringwald
__SHSUB8(uint32_t op1,uint32_t op2)1389*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1390*225f4ba4SMatthias Ringwald {
1391*225f4ba4SMatthias Ringwald uint32_t result;
1392*225f4ba4SMatthias Ringwald
1393*225f4ba4SMatthias Ringwald __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1394*225f4ba4SMatthias Ringwald return(result);
1395*225f4ba4SMatthias Ringwald }
1396*225f4ba4SMatthias Ringwald
__USUB8(uint32_t op1,uint32_t op2)1397*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1398*225f4ba4SMatthias Ringwald {
1399*225f4ba4SMatthias Ringwald uint32_t result;
1400*225f4ba4SMatthias Ringwald
1401*225f4ba4SMatthias Ringwald __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1402*225f4ba4SMatthias Ringwald return(result);
1403*225f4ba4SMatthias Ringwald }
1404*225f4ba4SMatthias Ringwald
__UQSUB8(uint32_t op1,uint32_t op2)1405*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1406*225f4ba4SMatthias Ringwald {
1407*225f4ba4SMatthias Ringwald uint32_t result;
1408*225f4ba4SMatthias Ringwald
1409*225f4ba4SMatthias Ringwald __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1410*225f4ba4SMatthias Ringwald return(result);
1411*225f4ba4SMatthias Ringwald }
1412*225f4ba4SMatthias Ringwald
__UHSUB8(uint32_t op1,uint32_t op2)1413*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1414*225f4ba4SMatthias Ringwald {
1415*225f4ba4SMatthias Ringwald uint32_t result;
1416*225f4ba4SMatthias Ringwald
1417*225f4ba4SMatthias Ringwald __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1418*225f4ba4SMatthias Ringwald return(result);
1419*225f4ba4SMatthias Ringwald }
1420*225f4ba4SMatthias Ringwald
1421*225f4ba4SMatthias Ringwald
__SADD16(uint32_t op1,uint32_t op2)1422*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1423*225f4ba4SMatthias Ringwald {
1424*225f4ba4SMatthias Ringwald uint32_t result;
1425*225f4ba4SMatthias Ringwald
1426*225f4ba4SMatthias Ringwald __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1427*225f4ba4SMatthias Ringwald return(result);
1428*225f4ba4SMatthias Ringwald }
1429*225f4ba4SMatthias Ringwald
__QADD16(uint32_t op1,uint32_t op2)1430*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1431*225f4ba4SMatthias Ringwald {
1432*225f4ba4SMatthias Ringwald uint32_t result;
1433*225f4ba4SMatthias Ringwald
1434*225f4ba4SMatthias Ringwald __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1435*225f4ba4SMatthias Ringwald return(result);
1436*225f4ba4SMatthias Ringwald }
1437*225f4ba4SMatthias Ringwald
__SHADD16(uint32_t op1,uint32_t op2)1438*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1439*225f4ba4SMatthias Ringwald {
1440*225f4ba4SMatthias Ringwald uint32_t result;
1441*225f4ba4SMatthias Ringwald
1442*225f4ba4SMatthias Ringwald __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1443*225f4ba4SMatthias Ringwald return(result);
1444*225f4ba4SMatthias Ringwald }
1445*225f4ba4SMatthias Ringwald
__UADD16(uint32_t op1,uint32_t op2)1446*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1447*225f4ba4SMatthias Ringwald {
1448*225f4ba4SMatthias Ringwald uint32_t result;
1449*225f4ba4SMatthias Ringwald
1450*225f4ba4SMatthias Ringwald __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1451*225f4ba4SMatthias Ringwald return(result);
1452*225f4ba4SMatthias Ringwald }
1453*225f4ba4SMatthias Ringwald
__UQADD16(uint32_t op1,uint32_t op2)1454*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1455*225f4ba4SMatthias Ringwald {
1456*225f4ba4SMatthias Ringwald uint32_t result;
1457*225f4ba4SMatthias Ringwald
1458*225f4ba4SMatthias Ringwald __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1459*225f4ba4SMatthias Ringwald return(result);
1460*225f4ba4SMatthias Ringwald }
1461*225f4ba4SMatthias Ringwald
__UHADD16(uint32_t op1,uint32_t op2)1462*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1463*225f4ba4SMatthias Ringwald {
1464*225f4ba4SMatthias Ringwald uint32_t result;
1465*225f4ba4SMatthias Ringwald
1466*225f4ba4SMatthias Ringwald __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1467*225f4ba4SMatthias Ringwald return(result);
1468*225f4ba4SMatthias Ringwald }
1469*225f4ba4SMatthias Ringwald
__SSUB16(uint32_t op1,uint32_t op2)1470*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1471*225f4ba4SMatthias Ringwald {
1472*225f4ba4SMatthias Ringwald uint32_t result;
1473*225f4ba4SMatthias Ringwald
1474*225f4ba4SMatthias Ringwald __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1475*225f4ba4SMatthias Ringwald return(result);
1476*225f4ba4SMatthias Ringwald }
1477*225f4ba4SMatthias Ringwald
__QSUB16(uint32_t op1,uint32_t op2)1478*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1479*225f4ba4SMatthias Ringwald {
1480*225f4ba4SMatthias Ringwald uint32_t result;
1481*225f4ba4SMatthias Ringwald
1482*225f4ba4SMatthias Ringwald __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1483*225f4ba4SMatthias Ringwald return(result);
1484*225f4ba4SMatthias Ringwald }
1485*225f4ba4SMatthias Ringwald
__SHSUB16(uint32_t op1,uint32_t op2)1486*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1487*225f4ba4SMatthias Ringwald {
1488*225f4ba4SMatthias Ringwald uint32_t result;
1489*225f4ba4SMatthias Ringwald
1490*225f4ba4SMatthias Ringwald __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1491*225f4ba4SMatthias Ringwald return(result);
1492*225f4ba4SMatthias Ringwald }
1493*225f4ba4SMatthias Ringwald
__USUB16(uint32_t op1,uint32_t op2)1494*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1495*225f4ba4SMatthias Ringwald {
1496*225f4ba4SMatthias Ringwald uint32_t result;
1497*225f4ba4SMatthias Ringwald
1498*225f4ba4SMatthias Ringwald __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1499*225f4ba4SMatthias Ringwald return(result);
1500*225f4ba4SMatthias Ringwald }
1501*225f4ba4SMatthias Ringwald
__UQSUB16(uint32_t op1,uint32_t op2)1502*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1503*225f4ba4SMatthias Ringwald {
1504*225f4ba4SMatthias Ringwald uint32_t result;
1505*225f4ba4SMatthias Ringwald
1506*225f4ba4SMatthias Ringwald __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507*225f4ba4SMatthias Ringwald return(result);
1508*225f4ba4SMatthias Ringwald }
1509*225f4ba4SMatthias Ringwald
__UHSUB16(uint32_t op1,uint32_t op2)1510*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1511*225f4ba4SMatthias Ringwald {
1512*225f4ba4SMatthias Ringwald uint32_t result;
1513*225f4ba4SMatthias Ringwald
1514*225f4ba4SMatthias Ringwald __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515*225f4ba4SMatthias Ringwald return(result);
1516*225f4ba4SMatthias Ringwald }
1517*225f4ba4SMatthias Ringwald
__SASX(uint32_t op1,uint32_t op2)1518*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1519*225f4ba4SMatthias Ringwald {
1520*225f4ba4SMatthias Ringwald uint32_t result;
1521*225f4ba4SMatthias Ringwald
1522*225f4ba4SMatthias Ringwald __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523*225f4ba4SMatthias Ringwald return(result);
1524*225f4ba4SMatthias Ringwald }
1525*225f4ba4SMatthias Ringwald
__QASX(uint32_t op1,uint32_t op2)1526*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1527*225f4ba4SMatthias Ringwald {
1528*225f4ba4SMatthias Ringwald uint32_t result;
1529*225f4ba4SMatthias Ringwald
1530*225f4ba4SMatthias Ringwald __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531*225f4ba4SMatthias Ringwald return(result);
1532*225f4ba4SMatthias Ringwald }
1533*225f4ba4SMatthias Ringwald
__SHASX(uint32_t op1,uint32_t op2)1534*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1535*225f4ba4SMatthias Ringwald {
1536*225f4ba4SMatthias Ringwald uint32_t result;
1537*225f4ba4SMatthias Ringwald
1538*225f4ba4SMatthias Ringwald __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539*225f4ba4SMatthias Ringwald return(result);
1540*225f4ba4SMatthias Ringwald }
1541*225f4ba4SMatthias Ringwald
__UASX(uint32_t op1,uint32_t op2)1542*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1543*225f4ba4SMatthias Ringwald {
1544*225f4ba4SMatthias Ringwald uint32_t result;
1545*225f4ba4SMatthias Ringwald
1546*225f4ba4SMatthias Ringwald __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547*225f4ba4SMatthias Ringwald return(result);
1548*225f4ba4SMatthias Ringwald }
1549*225f4ba4SMatthias Ringwald
__UQASX(uint32_t op1,uint32_t op2)1550*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1551*225f4ba4SMatthias Ringwald {
1552*225f4ba4SMatthias Ringwald uint32_t result;
1553*225f4ba4SMatthias Ringwald
1554*225f4ba4SMatthias Ringwald __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555*225f4ba4SMatthias Ringwald return(result);
1556*225f4ba4SMatthias Ringwald }
1557*225f4ba4SMatthias Ringwald
__UHASX(uint32_t op1,uint32_t op2)1558*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1559*225f4ba4SMatthias Ringwald {
1560*225f4ba4SMatthias Ringwald uint32_t result;
1561*225f4ba4SMatthias Ringwald
1562*225f4ba4SMatthias Ringwald __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563*225f4ba4SMatthias Ringwald return(result);
1564*225f4ba4SMatthias Ringwald }
1565*225f4ba4SMatthias Ringwald
__SSAX(uint32_t op1,uint32_t op2)1566*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1567*225f4ba4SMatthias Ringwald {
1568*225f4ba4SMatthias Ringwald uint32_t result;
1569*225f4ba4SMatthias Ringwald
1570*225f4ba4SMatthias Ringwald __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571*225f4ba4SMatthias Ringwald return(result);
1572*225f4ba4SMatthias Ringwald }
1573*225f4ba4SMatthias Ringwald
__QSAX(uint32_t op1,uint32_t op2)1574*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1575*225f4ba4SMatthias Ringwald {
1576*225f4ba4SMatthias Ringwald uint32_t result;
1577*225f4ba4SMatthias Ringwald
1578*225f4ba4SMatthias Ringwald __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579*225f4ba4SMatthias Ringwald return(result);
1580*225f4ba4SMatthias Ringwald }
1581*225f4ba4SMatthias Ringwald
__SHSAX(uint32_t op1,uint32_t op2)1582*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1583*225f4ba4SMatthias Ringwald {
1584*225f4ba4SMatthias Ringwald uint32_t result;
1585*225f4ba4SMatthias Ringwald
1586*225f4ba4SMatthias Ringwald __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587*225f4ba4SMatthias Ringwald return(result);
1588*225f4ba4SMatthias Ringwald }
1589*225f4ba4SMatthias Ringwald
__USAX(uint32_t op1,uint32_t op2)1590*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1591*225f4ba4SMatthias Ringwald {
1592*225f4ba4SMatthias Ringwald uint32_t result;
1593*225f4ba4SMatthias Ringwald
1594*225f4ba4SMatthias Ringwald __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595*225f4ba4SMatthias Ringwald return(result);
1596*225f4ba4SMatthias Ringwald }
1597*225f4ba4SMatthias Ringwald
__UQSAX(uint32_t op1,uint32_t op2)1598*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1599*225f4ba4SMatthias Ringwald {
1600*225f4ba4SMatthias Ringwald uint32_t result;
1601*225f4ba4SMatthias Ringwald
1602*225f4ba4SMatthias Ringwald __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603*225f4ba4SMatthias Ringwald return(result);
1604*225f4ba4SMatthias Ringwald }
1605*225f4ba4SMatthias Ringwald
__UHSAX(uint32_t op1,uint32_t op2)1606*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1607*225f4ba4SMatthias Ringwald {
1608*225f4ba4SMatthias Ringwald uint32_t result;
1609*225f4ba4SMatthias Ringwald
1610*225f4ba4SMatthias Ringwald __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611*225f4ba4SMatthias Ringwald return(result);
1612*225f4ba4SMatthias Ringwald }
1613*225f4ba4SMatthias Ringwald
__USAD8(uint32_t op1,uint32_t op2)1614*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1615*225f4ba4SMatthias Ringwald {
1616*225f4ba4SMatthias Ringwald uint32_t result;
1617*225f4ba4SMatthias Ringwald
1618*225f4ba4SMatthias Ringwald __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619*225f4ba4SMatthias Ringwald return(result);
1620*225f4ba4SMatthias Ringwald }
1621*225f4ba4SMatthias Ringwald
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1622*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1623*225f4ba4SMatthias Ringwald {
1624*225f4ba4SMatthias Ringwald uint32_t result;
1625*225f4ba4SMatthias Ringwald
1626*225f4ba4SMatthias Ringwald __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1627*225f4ba4SMatthias Ringwald return(result);
1628*225f4ba4SMatthias Ringwald }
1629*225f4ba4SMatthias Ringwald
1630*225f4ba4SMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1631*225f4ba4SMatthias Ringwald ({ \
1632*225f4ba4SMatthias Ringwald int32_t __RES, __ARG1 = (ARG1); \
1633*225f4ba4SMatthias Ringwald __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1634*225f4ba4SMatthias Ringwald __RES; \
1635*225f4ba4SMatthias Ringwald })
1636*225f4ba4SMatthias Ringwald
1637*225f4ba4SMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1638*225f4ba4SMatthias Ringwald ({ \
1639*225f4ba4SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1); \
1640*225f4ba4SMatthias Ringwald __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1641*225f4ba4SMatthias Ringwald __RES; \
1642*225f4ba4SMatthias Ringwald })
1643*225f4ba4SMatthias Ringwald
__UXTB16(uint32_t op1)1644*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1645*225f4ba4SMatthias Ringwald {
1646*225f4ba4SMatthias Ringwald uint32_t result;
1647*225f4ba4SMatthias Ringwald
1648*225f4ba4SMatthias Ringwald __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1649*225f4ba4SMatthias Ringwald return(result);
1650*225f4ba4SMatthias Ringwald }
1651*225f4ba4SMatthias Ringwald
__UXTAB16(uint32_t op1,uint32_t op2)1652*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1653*225f4ba4SMatthias Ringwald {
1654*225f4ba4SMatthias Ringwald uint32_t result;
1655*225f4ba4SMatthias Ringwald
1656*225f4ba4SMatthias Ringwald __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657*225f4ba4SMatthias Ringwald return(result);
1658*225f4ba4SMatthias Ringwald }
1659*225f4ba4SMatthias Ringwald
__SXTB16(uint32_t op1)1660*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1661*225f4ba4SMatthias Ringwald {
1662*225f4ba4SMatthias Ringwald uint32_t result;
1663*225f4ba4SMatthias Ringwald
1664*225f4ba4SMatthias Ringwald __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1665*225f4ba4SMatthias Ringwald return(result);
1666*225f4ba4SMatthias Ringwald }
1667*225f4ba4SMatthias Ringwald
__SXTAB16(uint32_t op1,uint32_t op2)1668*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1669*225f4ba4SMatthias Ringwald {
1670*225f4ba4SMatthias Ringwald uint32_t result;
1671*225f4ba4SMatthias Ringwald
1672*225f4ba4SMatthias Ringwald __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673*225f4ba4SMatthias Ringwald return(result);
1674*225f4ba4SMatthias Ringwald }
1675*225f4ba4SMatthias Ringwald
__SMUAD(uint32_t op1,uint32_t op2)1676*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1677*225f4ba4SMatthias Ringwald {
1678*225f4ba4SMatthias Ringwald uint32_t result;
1679*225f4ba4SMatthias Ringwald
1680*225f4ba4SMatthias Ringwald __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681*225f4ba4SMatthias Ringwald return(result);
1682*225f4ba4SMatthias Ringwald }
1683*225f4ba4SMatthias Ringwald
__SMUADX(uint32_t op1,uint32_t op2)1684*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1685*225f4ba4SMatthias Ringwald {
1686*225f4ba4SMatthias Ringwald uint32_t result;
1687*225f4ba4SMatthias Ringwald
1688*225f4ba4SMatthias Ringwald __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689*225f4ba4SMatthias Ringwald return(result);
1690*225f4ba4SMatthias Ringwald }
1691*225f4ba4SMatthias Ringwald
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1692*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1693*225f4ba4SMatthias Ringwald {
1694*225f4ba4SMatthias Ringwald uint32_t result;
1695*225f4ba4SMatthias Ringwald
1696*225f4ba4SMatthias Ringwald __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1697*225f4ba4SMatthias Ringwald return(result);
1698*225f4ba4SMatthias Ringwald }
1699*225f4ba4SMatthias Ringwald
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1700*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1701*225f4ba4SMatthias Ringwald {
1702*225f4ba4SMatthias Ringwald uint32_t result;
1703*225f4ba4SMatthias Ringwald
1704*225f4ba4SMatthias Ringwald __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1705*225f4ba4SMatthias Ringwald return(result);
1706*225f4ba4SMatthias Ringwald }
1707*225f4ba4SMatthias Ringwald
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1708*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1709*225f4ba4SMatthias Ringwald {
1710*225f4ba4SMatthias Ringwald union llreg_u{
1711*225f4ba4SMatthias Ringwald uint32_t w32[2];
1712*225f4ba4SMatthias Ringwald uint64_t w64;
1713*225f4ba4SMatthias Ringwald } llr;
1714*225f4ba4SMatthias Ringwald llr.w64 = acc;
1715*225f4ba4SMatthias Ringwald
1716*225f4ba4SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1717*225f4ba4SMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1718*225f4ba4SMatthias Ringwald #else /* Big endian */
1719*225f4ba4SMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1720*225f4ba4SMatthias Ringwald #endif
1721*225f4ba4SMatthias Ringwald
1722*225f4ba4SMatthias Ringwald return(llr.w64);
1723*225f4ba4SMatthias Ringwald }
1724*225f4ba4SMatthias Ringwald
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1725*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1726*225f4ba4SMatthias Ringwald {
1727*225f4ba4SMatthias Ringwald union llreg_u{
1728*225f4ba4SMatthias Ringwald uint32_t w32[2];
1729*225f4ba4SMatthias Ringwald uint64_t w64;
1730*225f4ba4SMatthias Ringwald } llr;
1731*225f4ba4SMatthias Ringwald llr.w64 = acc;
1732*225f4ba4SMatthias Ringwald
1733*225f4ba4SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1734*225f4ba4SMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1735*225f4ba4SMatthias Ringwald #else /* Big endian */
1736*225f4ba4SMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1737*225f4ba4SMatthias Ringwald #endif
1738*225f4ba4SMatthias Ringwald
1739*225f4ba4SMatthias Ringwald return(llr.w64);
1740*225f4ba4SMatthias Ringwald }
1741*225f4ba4SMatthias Ringwald
__SMUSD(uint32_t op1,uint32_t op2)1742*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1743*225f4ba4SMatthias Ringwald {
1744*225f4ba4SMatthias Ringwald uint32_t result;
1745*225f4ba4SMatthias Ringwald
1746*225f4ba4SMatthias Ringwald __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1747*225f4ba4SMatthias Ringwald return(result);
1748*225f4ba4SMatthias Ringwald }
1749*225f4ba4SMatthias Ringwald
__SMUSDX(uint32_t op1,uint32_t op2)1750*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1751*225f4ba4SMatthias Ringwald {
1752*225f4ba4SMatthias Ringwald uint32_t result;
1753*225f4ba4SMatthias Ringwald
1754*225f4ba4SMatthias Ringwald __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1755*225f4ba4SMatthias Ringwald return(result);
1756*225f4ba4SMatthias Ringwald }
1757*225f4ba4SMatthias Ringwald
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1758*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1759*225f4ba4SMatthias Ringwald {
1760*225f4ba4SMatthias Ringwald uint32_t result;
1761*225f4ba4SMatthias Ringwald
1762*225f4ba4SMatthias Ringwald __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1763*225f4ba4SMatthias Ringwald return(result);
1764*225f4ba4SMatthias Ringwald }
1765*225f4ba4SMatthias Ringwald
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1766*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1767*225f4ba4SMatthias Ringwald {
1768*225f4ba4SMatthias Ringwald uint32_t result;
1769*225f4ba4SMatthias Ringwald
1770*225f4ba4SMatthias Ringwald __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1771*225f4ba4SMatthias Ringwald return(result);
1772*225f4ba4SMatthias Ringwald }
1773*225f4ba4SMatthias Ringwald
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1774*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1775*225f4ba4SMatthias Ringwald {
1776*225f4ba4SMatthias Ringwald union llreg_u{
1777*225f4ba4SMatthias Ringwald uint32_t w32[2];
1778*225f4ba4SMatthias Ringwald uint64_t w64;
1779*225f4ba4SMatthias Ringwald } llr;
1780*225f4ba4SMatthias Ringwald llr.w64 = acc;
1781*225f4ba4SMatthias Ringwald
1782*225f4ba4SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1783*225f4ba4SMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1784*225f4ba4SMatthias Ringwald #else /* Big endian */
1785*225f4ba4SMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1786*225f4ba4SMatthias Ringwald #endif
1787*225f4ba4SMatthias Ringwald
1788*225f4ba4SMatthias Ringwald return(llr.w64);
1789*225f4ba4SMatthias Ringwald }
1790*225f4ba4SMatthias Ringwald
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)1791*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1792*225f4ba4SMatthias Ringwald {
1793*225f4ba4SMatthias Ringwald union llreg_u{
1794*225f4ba4SMatthias Ringwald uint32_t w32[2];
1795*225f4ba4SMatthias Ringwald uint64_t w64;
1796*225f4ba4SMatthias Ringwald } llr;
1797*225f4ba4SMatthias Ringwald llr.w64 = acc;
1798*225f4ba4SMatthias Ringwald
1799*225f4ba4SMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1800*225f4ba4SMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1801*225f4ba4SMatthias Ringwald #else /* Big endian */
1802*225f4ba4SMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1803*225f4ba4SMatthias Ringwald #endif
1804*225f4ba4SMatthias Ringwald
1805*225f4ba4SMatthias Ringwald return(llr.w64);
1806*225f4ba4SMatthias Ringwald }
1807*225f4ba4SMatthias Ringwald
__SEL(uint32_t op1,uint32_t op2)1808*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1809*225f4ba4SMatthias Ringwald {
1810*225f4ba4SMatthias Ringwald uint32_t result;
1811*225f4ba4SMatthias Ringwald
1812*225f4ba4SMatthias Ringwald __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1813*225f4ba4SMatthias Ringwald return(result);
1814*225f4ba4SMatthias Ringwald }
1815*225f4ba4SMatthias Ringwald
__QADD(int32_t op1,int32_t op2)1816*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1817*225f4ba4SMatthias Ringwald {
1818*225f4ba4SMatthias Ringwald int32_t result;
1819*225f4ba4SMatthias Ringwald
1820*225f4ba4SMatthias Ringwald __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1821*225f4ba4SMatthias Ringwald return(result);
1822*225f4ba4SMatthias Ringwald }
1823*225f4ba4SMatthias Ringwald
__QSUB(int32_t op1,int32_t op2)1824*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1825*225f4ba4SMatthias Ringwald {
1826*225f4ba4SMatthias Ringwald int32_t result;
1827*225f4ba4SMatthias Ringwald
1828*225f4ba4SMatthias Ringwald __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1829*225f4ba4SMatthias Ringwald return(result);
1830*225f4ba4SMatthias Ringwald }
1831*225f4ba4SMatthias Ringwald
1832*225f4ba4SMatthias Ringwald #if 0
1833*225f4ba4SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
1834*225f4ba4SMatthias Ringwald ({ \
1835*225f4ba4SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1836*225f4ba4SMatthias Ringwald __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1837*225f4ba4SMatthias Ringwald __RES; \
1838*225f4ba4SMatthias Ringwald })
1839*225f4ba4SMatthias Ringwald
1840*225f4ba4SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
1841*225f4ba4SMatthias Ringwald ({ \
1842*225f4ba4SMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1843*225f4ba4SMatthias Ringwald if (ARG3 == 0) \
1844*225f4ba4SMatthias Ringwald __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1845*225f4ba4SMatthias Ringwald else \
1846*225f4ba4SMatthias Ringwald __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1847*225f4ba4SMatthias Ringwald __RES; \
1848*225f4ba4SMatthias Ringwald })
1849*225f4ba4SMatthias Ringwald #endif
1850*225f4ba4SMatthias Ringwald
1851*225f4ba4SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1852*225f4ba4SMatthias Ringwald ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1853*225f4ba4SMatthias Ringwald
1854*225f4ba4SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1855*225f4ba4SMatthias Ringwald ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1856*225f4ba4SMatthias Ringwald
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1857*225f4ba4SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1858*225f4ba4SMatthias Ringwald {
1859*225f4ba4SMatthias Ringwald int32_t result;
1860*225f4ba4SMatthias Ringwald
1861*225f4ba4SMatthias Ringwald __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1862*225f4ba4SMatthias Ringwald return(result);
1863*225f4ba4SMatthias Ringwald }
1864*225f4ba4SMatthias Ringwald
1865*225f4ba4SMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
1866*225f4ba4SMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
1867*225f4ba4SMatthias Ringwald
1868*225f4ba4SMatthias Ringwald
1869*225f4ba4SMatthias Ringwald #endif /* __CMSIS_ARMCLANG_H */
1870