xref: /btstack/port/stm32-l451-miromico-sx1280/Drivers/CMSIS/Include/cmsis_armclang.h (revision 2fd737d36a1de5d778cacc671d4b4d8c4f3fed82)
1*2fd737d3SMatthias Ringwald /**************************************************************************//**
2*2fd737d3SMatthias Ringwald  * @file     cmsis_armclang.h
3*2fd737d3SMatthias Ringwald  * @brief    CMSIS compiler armclang (Arm Compiler 6) header file
4*2fd737d3SMatthias Ringwald  * @version  V5.0.4
5*2fd737d3SMatthias Ringwald  * @date     10. January 2018
6*2fd737d3SMatthias Ringwald  ******************************************************************************/
7*2fd737d3SMatthias Ringwald /*
8*2fd737d3SMatthias Ringwald  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9*2fd737d3SMatthias Ringwald  *
10*2fd737d3SMatthias Ringwald  * SPDX-License-Identifier: Apache-2.0
11*2fd737d3SMatthias Ringwald  *
12*2fd737d3SMatthias Ringwald  * Licensed under the Apache License, Version 2.0 (the License); you may
13*2fd737d3SMatthias Ringwald  * not use this file except in compliance with the License.
14*2fd737d3SMatthias Ringwald  * You may obtain a copy of the License at
15*2fd737d3SMatthias Ringwald  *
16*2fd737d3SMatthias Ringwald  * www.apache.org/licenses/LICENSE-2.0
17*2fd737d3SMatthias Ringwald  *
18*2fd737d3SMatthias Ringwald  * Unless required by applicable law or agreed to in writing, software
19*2fd737d3SMatthias Ringwald  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*2fd737d3SMatthias Ringwald  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*2fd737d3SMatthias Ringwald  * See the License for the specific language governing permissions and
22*2fd737d3SMatthias Ringwald  * limitations under the License.
23*2fd737d3SMatthias Ringwald  */
24*2fd737d3SMatthias Ringwald 
25*2fd737d3SMatthias Ringwald /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26*2fd737d3SMatthias Ringwald 
27*2fd737d3SMatthias Ringwald #ifndef __CMSIS_ARMCLANG_H
28*2fd737d3SMatthias Ringwald #define __CMSIS_ARMCLANG_H
29*2fd737d3SMatthias Ringwald 
30*2fd737d3SMatthias Ringwald #pragma clang system_header   /* treat file as system include file */
31*2fd737d3SMatthias Ringwald 
32*2fd737d3SMatthias Ringwald #ifndef __ARM_COMPAT_H
33*2fd737d3SMatthias Ringwald #include <arm_compat.h>    /* Compatibility header for Arm Compiler 5 intrinsics */
34*2fd737d3SMatthias Ringwald #endif
35*2fd737d3SMatthias Ringwald 
36*2fd737d3SMatthias Ringwald /* CMSIS compiler specific defines */
37*2fd737d3SMatthias Ringwald #ifndef   __ASM
38*2fd737d3SMatthias Ringwald   #define __ASM                                  __asm
39*2fd737d3SMatthias Ringwald #endif
40*2fd737d3SMatthias Ringwald #ifndef   __INLINE
41*2fd737d3SMatthias Ringwald   #define __INLINE                               __inline
42*2fd737d3SMatthias Ringwald #endif
43*2fd737d3SMatthias Ringwald #ifndef   __STATIC_INLINE
44*2fd737d3SMatthias Ringwald   #define __STATIC_INLINE                        static __inline
45*2fd737d3SMatthias Ringwald #endif
46*2fd737d3SMatthias Ringwald #ifndef   __STATIC_FORCEINLINE
47*2fd737d3SMatthias Ringwald   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static __inline
48*2fd737d3SMatthias Ringwald #endif
49*2fd737d3SMatthias Ringwald #ifndef   __NO_RETURN
50*2fd737d3SMatthias Ringwald   #define __NO_RETURN                            __attribute__((__noreturn__))
51*2fd737d3SMatthias Ringwald #endif
52*2fd737d3SMatthias Ringwald #ifndef   __USED
53*2fd737d3SMatthias Ringwald   #define __USED                                 __attribute__((used))
54*2fd737d3SMatthias Ringwald #endif
55*2fd737d3SMatthias Ringwald #ifndef   __WEAK
56*2fd737d3SMatthias Ringwald   #define __WEAK                                 __attribute__((weak))
57*2fd737d3SMatthias Ringwald #endif
58*2fd737d3SMatthias Ringwald #ifndef   __PACKED
59*2fd737d3SMatthias Ringwald   #define __PACKED                               __attribute__((packed, aligned(1)))
60*2fd737d3SMatthias Ringwald #endif
61*2fd737d3SMatthias Ringwald #ifndef   __PACKED_STRUCT
62*2fd737d3SMatthias Ringwald   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
63*2fd737d3SMatthias Ringwald #endif
64*2fd737d3SMatthias Ringwald #ifndef   __PACKED_UNION
65*2fd737d3SMatthias Ringwald   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
66*2fd737d3SMatthias Ringwald #endif
67*2fd737d3SMatthias Ringwald #ifndef   __UNALIGNED_UINT32        /* deprecated */
68*2fd737d3SMatthias Ringwald   #pragma clang diagnostic push
69*2fd737d3SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
70*2fd737d3SMatthias Ringwald /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71*2fd737d3SMatthias Ringwald   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72*2fd737d3SMatthias Ringwald   #pragma clang diagnostic pop
73*2fd737d3SMatthias Ringwald   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
74*2fd737d3SMatthias Ringwald #endif
75*2fd737d3SMatthias Ringwald #ifndef   __UNALIGNED_UINT16_WRITE
76*2fd737d3SMatthias Ringwald   #pragma clang diagnostic push
77*2fd737d3SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
78*2fd737d3SMatthias Ringwald /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79*2fd737d3SMatthias Ringwald   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80*2fd737d3SMatthias Ringwald   #pragma clang diagnostic pop
81*2fd737d3SMatthias Ringwald   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82*2fd737d3SMatthias Ringwald #endif
83*2fd737d3SMatthias Ringwald #ifndef   __UNALIGNED_UINT16_READ
84*2fd737d3SMatthias Ringwald   #pragma clang diagnostic push
85*2fd737d3SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
86*2fd737d3SMatthias Ringwald /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87*2fd737d3SMatthias Ringwald   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88*2fd737d3SMatthias Ringwald   #pragma clang diagnostic pop
89*2fd737d3SMatthias Ringwald   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90*2fd737d3SMatthias Ringwald #endif
91*2fd737d3SMatthias Ringwald #ifndef   __UNALIGNED_UINT32_WRITE
92*2fd737d3SMatthias Ringwald   #pragma clang diagnostic push
93*2fd737d3SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
94*2fd737d3SMatthias Ringwald /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95*2fd737d3SMatthias Ringwald   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96*2fd737d3SMatthias Ringwald   #pragma clang diagnostic pop
97*2fd737d3SMatthias Ringwald   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98*2fd737d3SMatthias Ringwald #endif
99*2fd737d3SMatthias Ringwald #ifndef   __UNALIGNED_UINT32_READ
100*2fd737d3SMatthias Ringwald   #pragma clang diagnostic push
101*2fd737d3SMatthias Ringwald   #pragma clang diagnostic ignored "-Wpacked"
102*2fd737d3SMatthias Ringwald /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103*2fd737d3SMatthias Ringwald   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104*2fd737d3SMatthias Ringwald   #pragma clang diagnostic pop
105*2fd737d3SMatthias Ringwald   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106*2fd737d3SMatthias Ringwald #endif
107*2fd737d3SMatthias Ringwald #ifndef   __ALIGNED
108*2fd737d3SMatthias Ringwald   #define __ALIGNED(x)                           __attribute__((aligned(x)))
109*2fd737d3SMatthias Ringwald #endif
110*2fd737d3SMatthias Ringwald #ifndef   __RESTRICT
111*2fd737d3SMatthias Ringwald   #define __RESTRICT                             __restrict
112*2fd737d3SMatthias Ringwald #endif
113*2fd737d3SMatthias Ringwald 
114*2fd737d3SMatthias Ringwald 
115*2fd737d3SMatthias Ringwald /* ###########################  Core Function Access  ########################### */
116*2fd737d3SMatthias Ringwald /** \ingroup  CMSIS_Core_FunctionInterface
117*2fd737d3SMatthias Ringwald     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
118*2fd737d3SMatthias Ringwald   @{
119*2fd737d3SMatthias Ringwald  */
120*2fd737d3SMatthias Ringwald 
121*2fd737d3SMatthias Ringwald /**
122*2fd737d3SMatthias Ringwald   \brief   Enable IRQ Interrupts
123*2fd737d3SMatthias Ringwald   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
124*2fd737d3SMatthias Ringwald            Can only be executed in Privileged modes.
125*2fd737d3SMatthias Ringwald  */
126*2fd737d3SMatthias Ringwald /* intrinsic void __enable_irq();  see arm_compat.h */
127*2fd737d3SMatthias Ringwald 
128*2fd737d3SMatthias Ringwald 
129*2fd737d3SMatthias Ringwald /**
130*2fd737d3SMatthias Ringwald   \brief   Disable IRQ Interrupts
131*2fd737d3SMatthias Ringwald   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
132*2fd737d3SMatthias Ringwald            Can only be executed in Privileged modes.
133*2fd737d3SMatthias Ringwald  */
134*2fd737d3SMatthias Ringwald /* intrinsic void __disable_irq();  see arm_compat.h */
135*2fd737d3SMatthias Ringwald 
136*2fd737d3SMatthias Ringwald 
137*2fd737d3SMatthias Ringwald /**
138*2fd737d3SMatthias Ringwald   \brief   Get Control Register
139*2fd737d3SMatthias Ringwald   \details Returns the content of the Control Register.
140*2fd737d3SMatthias Ringwald   \return               Control Register value
141*2fd737d3SMatthias Ringwald  */
__get_CONTROL(void)142*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
143*2fd737d3SMatthias Ringwald {
144*2fd737d3SMatthias Ringwald   uint32_t result;
145*2fd737d3SMatthias Ringwald 
146*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, control" : "=r" (result) );
147*2fd737d3SMatthias Ringwald   return(result);
148*2fd737d3SMatthias Ringwald }
149*2fd737d3SMatthias Ringwald 
150*2fd737d3SMatthias Ringwald 
151*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
152*2fd737d3SMatthias Ringwald /**
153*2fd737d3SMatthias Ringwald   \brief   Get Control Register (non-secure)
154*2fd737d3SMatthias Ringwald   \details Returns the content of the non-secure Control Register when in secure mode.
155*2fd737d3SMatthias Ringwald   \return               non-secure Control Register value
156*2fd737d3SMatthias Ringwald  */
__TZ_get_CONTROL_NS(void)157*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
158*2fd737d3SMatthias Ringwald {
159*2fd737d3SMatthias Ringwald   uint32_t result;
160*2fd737d3SMatthias Ringwald 
161*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
162*2fd737d3SMatthias Ringwald   return(result);
163*2fd737d3SMatthias Ringwald }
164*2fd737d3SMatthias Ringwald #endif
165*2fd737d3SMatthias Ringwald 
166*2fd737d3SMatthias Ringwald 
167*2fd737d3SMatthias Ringwald /**
168*2fd737d3SMatthias Ringwald   \brief   Set Control Register
169*2fd737d3SMatthias Ringwald   \details Writes the given value to the Control Register.
170*2fd737d3SMatthias Ringwald   \param [in]    control  Control Register value to set
171*2fd737d3SMatthias Ringwald  */
__set_CONTROL(uint32_t control)172*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
173*2fd737d3SMatthias Ringwald {
174*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
175*2fd737d3SMatthias Ringwald }
176*2fd737d3SMatthias Ringwald 
177*2fd737d3SMatthias Ringwald 
178*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
179*2fd737d3SMatthias Ringwald /**
180*2fd737d3SMatthias Ringwald   \brief   Set Control Register (non-secure)
181*2fd737d3SMatthias Ringwald   \details Writes the given value to the non-secure Control Register when in secure state.
182*2fd737d3SMatthias Ringwald   \param [in]    control  Control Register value to set
183*2fd737d3SMatthias Ringwald  */
__TZ_set_CONTROL_NS(uint32_t control)184*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
185*2fd737d3SMatthias Ringwald {
186*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
187*2fd737d3SMatthias Ringwald }
188*2fd737d3SMatthias Ringwald #endif
189*2fd737d3SMatthias Ringwald 
190*2fd737d3SMatthias Ringwald 
191*2fd737d3SMatthias Ringwald /**
192*2fd737d3SMatthias Ringwald   \brief   Get IPSR Register
193*2fd737d3SMatthias Ringwald   \details Returns the content of the IPSR Register.
194*2fd737d3SMatthias Ringwald   \return               IPSR Register value
195*2fd737d3SMatthias Ringwald  */
__get_IPSR(void)196*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
197*2fd737d3SMatthias Ringwald {
198*2fd737d3SMatthias Ringwald   uint32_t result;
199*2fd737d3SMatthias Ringwald 
200*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
201*2fd737d3SMatthias Ringwald   return(result);
202*2fd737d3SMatthias Ringwald }
203*2fd737d3SMatthias Ringwald 
204*2fd737d3SMatthias Ringwald 
205*2fd737d3SMatthias Ringwald /**
206*2fd737d3SMatthias Ringwald   \brief   Get APSR Register
207*2fd737d3SMatthias Ringwald   \details Returns the content of the APSR Register.
208*2fd737d3SMatthias Ringwald   \return               APSR Register value
209*2fd737d3SMatthias Ringwald  */
__get_APSR(void)210*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
211*2fd737d3SMatthias Ringwald {
212*2fd737d3SMatthias Ringwald   uint32_t result;
213*2fd737d3SMatthias Ringwald 
214*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
215*2fd737d3SMatthias Ringwald   return(result);
216*2fd737d3SMatthias Ringwald }
217*2fd737d3SMatthias Ringwald 
218*2fd737d3SMatthias Ringwald 
219*2fd737d3SMatthias Ringwald /**
220*2fd737d3SMatthias Ringwald   \brief   Get xPSR Register
221*2fd737d3SMatthias Ringwald   \details Returns the content of the xPSR Register.
222*2fd737d3SMatthias Ringwald   \return               xPSR Register value
223*2fd737d3SMatthias Ringwald  */
__get_xPSR(void)224*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
225*2fd737d3SMatthias Ringwald {
226*2fd737d3SMatthias Ringwald   uint32_t result;
227*2fd737d3SMatthias Ringwald 
228*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
229*2fd737d3SMatthias Ringwald   return(result);
230*2fd737d3SMatthias Ringwald }
231*2fd737d3SMatthias Ringwald 
232*2fd737d3SMatthias Ringwald 
233*2fd737d3SMatthias Ringwald /**
234*2fd737d3SMatthias Ringwald   \brief   Get Process Stack Pointer
235*2fd737d3SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer (PSP).
236*2fd737d3SMatthias Ringwald   \return               PSP Register value
237*2fd737d3SMatthias Ringwald  */
__get_PSP(void)238*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
239*2fd737d3SMatthias Ringwald {
240*2fd737d3SMatthias Ringwald   uint32_t result;
241*2fd737d3SMatthias Ringwald 
242*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
243*2fd737d3SMatthias Ringwald   return(result);
244*2fd737d3SMatthias Ringwald }
245*2fd737d3SMatthias Ringwald 
246*2fd737d3SMatthias Ringwald 
247*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
248*2fd737d3SMatthias Ringwald /**
249*2fd737d3SMatthias Ringwald   \brief   Get Process Stack Pointer (non-secure)
250*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
251*2fd737d3SMatthias Ringwald   \return               PSP Register value
252*2fd737d3SMatthias Ringwald  */
__TZ_get_PSP_NS(void)253*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
254*2fd737d3SMatthias Ringwald {
255*2fd737d3SMatthias Ringwald   uint32_t result;
256*2fd737d3SMatthias Ringwald 
257*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
258*2fd737d3SMatthias Ringwald   return(result);
259*2fd737d3SMatthias Ringwald }
260*2fd737d3SMatthias Ringwald #endif
261*2fd737d3SMatthias Ringwald 
262*2fd737d3SMatthias Ringwald 
263*2fd737d3SMatthias Ringwald /**
264*2fd737d3SMatthias Ringwald   \brief   Set Process Stack Pointer
265*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer (PSP).
266*2fd737d3SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
267*2fd737d3SMatthias Ringwald  */
__set_PSP(uint32_t topOfProcStack)268*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
269*2fd737d3SMatthias Ringwald {
270*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
271*2fd737d3SMatthias Ringwald }
272*2fd737d3SMatthias Ringwald 
273*2fd737d3SMatthias Ringwald 
274*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
275*2fd737d3SMatthias Ringwald /**
276*2fd737d3SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
277*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
278*2fd737d3SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
279*2fd737d3SMatthias Ringwald  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)280*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
281*2fd737d3SMatthias Ringwald {
282*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
283*2fd737d3SMatthias Ringwald }
284*2fd737d3SMatthias Ringwald #endif
285*2fd737d3SMatthias Ringwald 
286*2fd737d3SMatthias Ringwald 
287*2fd737d3SMatthias Ringwald /**
288*2fd737d3SMatthias Ringwald   \brief   Get Main Stack Pointer
289*2fd737d3SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer (MSP).
290*2fd737d3SMatthias Ringwald   \return               MSP Register value
291*2fd737d3SMatthias Ringwald  */
__get_MSP(void)292*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
293*2fd737d3SMatthias Ringwald {
294*2fd737d3SMatthias Ringwald   uint32_t result;
295*2fd737d3SMatthias Ringwald 
296*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, msp" : "=r" (result) );
297*2fd737d3SMatthias Ringwald   return(result);
298*2fd737d3SMatthias Ringwald }
299*2fd737d3SMatthias Ringwald 
300*2fd737d3SMatthias Ringwald 
301*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
302*2fd737d3SMatthias Ringwald /**
303*2fd737d3SMatthias Ringwald   \brief   Get Main Stack Pointer (non-secure)
304*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
305*2fd737d3SMatthias Ringwald   \return               MSP Register value
306*2fd737d3SMatthias Ringwald  */
__TZ_get_MSP_NS(void)307*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
308*2fd737d3SMatthias Ringwald {
309*2fd737d3SMatthias Ringwald   uint32_t result;
310*2fd737d3SMatthias Ringwald 
311*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
312*2fd737d3SMatthias Ringwald   return(result);
313*2fd737d3SMatthias Ringwald }
314*2fd737d3SMatthias Ringwald #endif
315*2fd737d3SMatthias Ringwald 
316*2fd737d3SMatthias Ringwald 
317*2fd737d3SMatthias Ringwald /**
318*2fd737d3SMatthias Ringwald   \brief   Set Main Stack Pointer
319*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer (MSP).
320*2fd737d3SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
321*2fd737d3SMatthias Ringwald  */
__set_MSP(uint32_t topOfMainStack)322*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
323*2fd737d3SMatthias Ringwald {
324*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
325*2fd737d3SMatthias Ringwald }
326*2fd737d3SMatthias Ringwald 
327*2fd737d3SMatthias Ringwald 
328*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
329*2fd737d3SMatthias Ringwald /**
330*2fd737d3SMatthias Ringwald   \brief   Set Main Stack Pointer (non-secure)
331*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
332*2fd737d3SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
333*2fd737d3SMatthias Ringwald  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)334*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
335*2fd737d3SMatthias Ringwald {
336*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
337*2fd737d3SMatthias Ringwald }
338*2fd737d3SMatthias Ringwald #endif
339*2fd737d3SMatthias Ringwald 
340*2fd737d3SMatthias Ringwald 
341*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
342*2fd737d3SMatthias Ringwald /**
343*2fd737d3SMatthias Ringwald   \brief   Get Stack Pointer (non-secure)
344*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
345*2fd737d3SMatthias Ringwald   \return               SP Register value
346*2fd737d3SMatthias Ringwald  */
__TZ_get_SP_NS(void)347*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
348*2fd737d3SMatthias Ringwald {
349*2fd737d3SMatthias Ringwald   uint32_t result;
350*2fd737d3SMatthias Ringwald 
351*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
352*2fd737d3SMatthias Ringwald   return(result);
353*2fd737d3SMatthias Ringwald }
354*2fd737d3SMatthias Ringwald 
355*2fd737d3SMatthias Ringwald 
356*2fd737d3SMatthias Ringwald /**
357*2fd737d3SMatthias Ringwald   \brief   Set Stack Pointer (non-secure)
358*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
359*2fd737d3SMatthias Ringwald   \param [in]    topOfStack  Stack Pointer value to set
360*2fd737d3SMatthias Ringwald  */
__TZ_set_SP_NS(uint32_t topOfStack)361*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
362*2fd737d3SMatthias Ringwald {
363*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
364*2fd737d3SMatthias Ringwald }
365*2fd737d3SMatthias Ringwald #endif
366*2fd737d3SMatthias Ringwald 
367*2fd737d3SMatthias Ringwald 
368*2fd737d3SMatthias Ringwald /**
369*2fd737d3SMatthias Ringwald   \brief   Get Priority Mask
370*2fd737d3SMatthias Ringwald   \details Returns the current state of the priority mask bit from the Priority Mask Register.
371*2fd737d3SMatthias Ringwald   \return               Priority Mask value
372*2fd737d3SMatthias Ringwald  */
__get_PRIMASK(void)373*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
374*2fd737d3SMatthias Ringwald {
375*2fd737d3SMatthias Ringwald   uint32_t result;
376*2fd737d3SMatthias Ringwald 
377*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, primask" : "=r" (result) );
378*2fd737d3SMatthias Ringwald   return(result);
379*2fd737d3SMatthias Ringwald }
380*2fd737d3SMatthias Ringwald 
381*2fd737d3SMatthias Ringwald 
382*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
383*2fd737d3SMatthias Ringwald /**
384*2fd737d3SMatthias Ringwald   \brief   Get Priority Mask (non-secure)
385*2fd737d3SMatthias Ringwald   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
386*2fd737d3SMatthias Ringwald   \return               Priority Mask value
387*2fd737d3SMatthias Ringwald  */
__TZ_get_PRIMASK_NS(void)388*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
389*2fd737d3SMatthias Ringwald {
390*2fd737d3SMatthias Ringwald   uint32_t result;
391*2fd737d3SMatthias Ringwald 
392*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
393*2fd737d3SMatthias Ringwald   return(result);
394*2fd737d3SMatthias Ringwald }
395*2fd737d3SMatthias Ringwald #endif
396*2fd737d3SMatthias Ringwald 
397*2fd737d3SMatthias Ringwald 
398*2fd737d3SMatthias Ringwald /**
399*2fd737d3SMatthias Ringwald   \brief   Set Priority Mask
400*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Priority Mask Register.
401*2fd737d3SMatthias Ringwald   \param [in]    priMask  Priority Mask
402*2fd737d3SMatthias Ringwald  */
__set_PRIMASK(uint32_t priMask)403*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
404*2fd737d3SMatthias Ringwald {
405*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
406*2fd737d3SMatthias Ringwald }
407*2fd737d3SMatthias Ringwald 
408*2fd737d3SMatthias Ringwald 
409*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
410*2fd737d3SMatthias Ringwald /**
411*2fd737d3SMatthias Ringwald   \brief   Set Priority Mask (non-secure)
412*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
413*2fd737d3SMatthias Ringwald   \param [in]    priMask  Priority Mask
414*2fd737d3SMatthias Ringwald  */
__TZ_set_PRIMASK_NS(uint32_t priMask)415*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
416*2fd737d3SMatthias Ringwald {
417*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
418*2fd737d3SMatthias Ringwald }
419*2fd737d3SMatthias Ringwald #endif
420*2fd737d3SMatthias Ringwald 
421*2fd737d3SMatthias Ringwald 
422*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
423*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
424*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
425*2fd737d3SMatthias Ringwald /**
426*2fd737d3SMatthias Ringwald   \brief   Enable FIQ
427*2fd737d3SMatthias Ringwald   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
428*2fd737d3SMatthias Ringwald            Can only be executed in Privileged modes.
429*2fd737d3SMatthias Ringwald  */
430*2fd737d3SMatthias Ringwald #define __enable_fault_irq                __enable_fiq   /* see arm_compat.h */
431*2fd737d3SMatthias Ringwald 
432*2fd737d3SMatthias Ringwald 
433*2fd737d3SMatthias Ringwald /**
434*2fd737d3SMatthias Ringwald   \brief   Disable FIQ
435*2fd737d3SMatthias Ringwald   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
436*2fd737d3SMatthias Ringwald            Can only be executed in Privileged modes.
437*2fd737d3SMatthias Ringwald  */
438*2fd737d3SMatthias Ringwald #define __disable_fault_irq               __disable_fiq   /* see arm_compat.h */
439*2fd737d3SMatthias Ringwald 
440*2fd737d3SMatthias Ringwald 
441*2fd737d3SMatthias Ringwald /**
442*2fd737d3SMatthias Ringwald   \brief   Get Base Priority
443*2fd737d3SMatthias Ringwald   \details Returns the current value of the Base Priority register.
444*2fd737d3SMatthias Ringwald   \return               Base Priority register value
445*2fd737d3SMatthias Ringwald  */
__get_BASEPRI(void)446*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
447*2fd737d3SMatthias Ringwald {
448*2fd737d3SMatthias Ringwald   uint32_t result;
449*2fd737d3SMatthias Ringwald 
450*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
451*2fd737d3SMatthias Ringwald   return(result);
452*2fd737d3SMatthias Ringwald }
453*2fd737d3SMatthias Ringwald 
454*2fd737d3SMatthias Ringwald 
455*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
456*2fd737d3SMatthias Ringwald /**
457*2fd737d3SMatthias Ringwald   \brief   Get Base Priority (non-secure)
458*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Base Priority register when in secure state.
459*2fd737d3SMatthias Ringwald   \return               Base Priority register value
460*2fd737d3SMatthias Ringwald  */
__TZ_get_BASEPRI_NS(void)461*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
462*2fd737d3SMatthias Ringwald {
463*2fd737d3SMatthias Ringwald   uint32_t result;
464*2fd737d3SMatthias Ringwald 
465*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
466*2fd737d3SMatthias Ringwald   return(result);
467*2fd737d3SMatthias Ringwald }
468*2fd737d3SMatthias Ringwald #endif
469*2fd737d3SMatthias Ringwald 
470*2fd737d3SMatthias Ringwald 
471*2fd737d3SMatthias Ringwald /**
472*2fd737d3SMatthias Ringwald   \brief   Set Base Priority
473*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Base Priority register.
474*2fd737d3SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
475*2fd737d3SMatthias Ringwald  */
__set_BASEPRI(uint32_t basePri)476*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
477*2fd737d3SMatthias Ringwald {
478*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
479*2fd737d3SMatthias Ringwald }
480*2fd737d3SMatthias Ringwald 
481*2fd737d3SMatthias Ringwald 
482*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
483*2fd737d3SMatthias Ringwald /**
484*2fd737d3SMatthias Ringwald   \brief   Set Base Priority (non-secure)
485*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Base Priority register when in secure state.
486*2fd737d3SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
487*2fd737d3SMatthias Ringwald  */
__TZ_set_BASEPRI_NS(uint32_t basePri)488*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
489*2fd737d3SMatthias Ringwald {
490*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
491*2fd737d3SMatthias Ringwald }
492*2fd737d3SMatthias Ringwald #endif
493*2fd737d3SMatthias Ringwald 
494*2fd737d3SMatthias Ringwald 
495*2fd737d3SMatthias Ringwald /**
496*2fd737d3SMatthias Ringwald   \brief   Set Base Priority with condition
497*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
498*2fd737d3SMatthias Ringwald            or the new value increases the BASEPRI priority level.
499*2fd737d3SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
500*2fd737d3SMatthias Ringwald  */
__set_BASEPRI_MAX(uint32_t basePri)501*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
502*2fd737d3SMatthias Ringwald {
503*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
504*2fd737d3SMatthias Ringwald }
505*2fd737d3SMatthias Ringwald 
506*2fd737d3SMatthias Ringwald 
507*2fd737d3SMatthias Ringwald /**
508*2fd737d3SMatthias Ringwald   \brief   Get Fault Mask
509*2fd737d3SMatthias Ringwald   \details Returns the current value of the Fault Mask register.
510*2fd737d3SMatthias Ringwald   \return               Fault Mask register value
511*2fd737d3SMatthias Ringwald  */
__get_FAULTMASK(void)512*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
513*2fd737d3SMatthias Ringwald {
514*2fd737d3SMatthias Ringwald   uint32_t result;
515*2fd737d3SMatthias Ringwald 
516*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
517*2fd737d3SMatthias Ringwald   return(result);
518*2fd737d3SMatthias Ringwald }
519*2fd737d3SMatthias Ringwald 
520*2fd737d3SMatthias Ringwald 
521*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
522*2fd737d3SMatthias Ringwald /**
523*2fd737d3SMatthias Ringwald   \brief   Get Fault Mask (non-secure)
524*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Fault Mask register when in secure state.
525*2fd737d3SMatthias Ringwald   \return               Fault Mask register value
526*2fd737d3SMatthias Ringwald  */
__TZ_get_FAULTMASK_NS(void)527*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
528*2fd737d3SMatthias Ringwald {
529*2fd737d3SMatthias Ringwald   uint32_t result;
530*2fd737d3SMatthias Ringwald 
531*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
532*2fd737d3SMatthias Ringwald   return(result);
533*2fd737d3SMatthias Ringwald }
534*2fd737d3SMatthias Ringwald #endif
535*2fd737d3SMatthias Ringwald 
536*2fd737d3SMatthias Ringwald 
537*2fd737d3SMatthias Ringwald /**
538*2fd737d3SMatthias Ringwald   \brief   Set Fault Mask
539*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Fault Mask register.
540*2fd737d3SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
541*2fd737d3SMatthias Ringwald  */
__set_FAULTMASK(uint32_t faultMask)542*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
543*2fd737d3SMatthias Ringwald {
544*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
545*2fd737d3SMatthias Ringwald }
546*2fd737d3SMatthias Ringwald 
547*2fd737d3SMatthias Ringwald 
548*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
549*2fd737d3SMatthias Ringwald /**
550*2fd737d3SMatthias Ringwald   \brief   Set Fault Mask (non-secure)
551*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
552*2fd737d3SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
553*2fd737d3SMatthias Ringwald  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)554*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
555*2fd737d3SMatthias Ringwald {
556*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
557*2fd737d3SMatthias Ringwald }
558*2fd737d3SMatthias Ringwald #endif
559*2fd737d3SMatthias Ringwald 
560*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
561*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
562*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
563*2fd737d3SMatthias Ringwald 
564*2fd737d3SMatthias Ringwald 
565*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
566*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
567*2fd737d3SMatthias Ringwald 
568*2fd737d3SMatthias Ringwald /**
569*2fd737d3SMatthias Ringwald   \brief   Get Process Stack Pointer Limit
570*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
571*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
572*2fd737d3SMatthias Ringwald   mode.
573*2fd737d3SMatthias Ringwald 
574*2fd737d3SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
575*2fd737d3SMatthias Ringwald   \return               PSPLIM Register value
576*2fd737d3SMatthias Ringwald  */
__get_PSPLIM(void)577*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
578*2fd737d3SMatthias Ringwald {
579*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
580*2fd737d3SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
581*2fd737d3SMatthias Ringwald     // without main extensions, the non-secure PSPLIM is RAZ/WI
582*2fd737d3SMatthias Ringwald   return 0U;
583*2fd737d3SMatthias Ringwald #else
584*2fd737d3SMatthias Ringwald   uint32_t result;
585*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
586*2fd737d3SMatthias Ringwald   return result;
587*2fd737d3SMatthias Ringwald #endif
588*2fd737d3SMatthias Ringwald }
589*2fd737d3SMatthias Ringwald 
590*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
591*2fd737d3SMatthias Ringwald /**
592*2fd737d3SMatthias Ringwald   \brief   Get Process Stack Pointer Limit (non-secure)
593*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
594*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
595*2fd737d3SMatthias Ringwald   mode.
596*2fd737d3SMatthias Ringwald 
597*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
598*2fd737d3SMatthias Ringwald   \return               PSPLIM Register value
599*2fd737d3SMatthias Ringwald  */
__TZ_get_PSPLIM_NS(void)600*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
601*2fd737d3SMatthias Ringwald {
602*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
603*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
604*2fd737d3SMatthias Ringwald   return 0U;
605*2fd737d3SMatthias Ringwald #else
606*2fd737d3SMatthias Ringwald   uint32_t result;
607*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
608*2fd737d3SMatthias Ringwald   return result;
609*2fd737d3SMatthias Ringwald #endif
610*2fd737d3SMatthias Ringwald }
611*2fd737d3SMatthias Ringwald #endif
612*2fd737d3SMatthias Ringwald 
613*2fd737d3SMatthias Ringwald 
614*2fd737d3SMatthias Ringwald /**
615*2fd737d3SMatthias Ringwald   \brief   Set Process Stack Pointer Limit
616*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
617*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
618*2fd737d3SMatthias Ringwald   mode.
619*2fd737d3SMatthias Ringwald 
620*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
621*2fd737d3SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
622*2fd737d3SMatthias Ringwald  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)623*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
624*2fd737d3SMatthias Ringwald {
625*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626*2fd737d3SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
627*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
628*2fd737d3SMatthias Ringwald   (void)ProcStackPtrLimit;
629*2fd737d3SMatthias Ringwald #else
630*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
631*2fd737d3SMatthias Ringwald #endif
632*2fd737d3SMatthias Ringwald }
633*2fd737d3SMatthias Ringwald 
634*2fd737d3SMatthias Ringwald 
635*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
636*2fd737d3SMatthias Ringwald /**
637*2fd737d3SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
638*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
639*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
640*2fd737d3SMatthias Ringwald   mode.
641*2fd737d3SMatthias Ringwald 
642*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
643*2fd737d3SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
644*2fd737d3SMatthias Ringwald  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)645*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
646*2fd737d3SMatthias Ringwald {
647*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
648*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
649*2fd737d3SMatthias Ringwald   (void)ProcStackPtrLimit;
650*2fd737d3SMatthias Ringwald #else
651*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
652*2fd737d3SMatthias Ringwald #endif
653*2fd737d3SMatthias Ringwald }
654*2fd737d3SMatthias Ringwald #endif
655*2fd737d3SMatthias Ringwald 
656*2fd737d3SMatthias Ringwald 
657*2fd737d3SMatthias Ringwald /**
658*2fd737d3SMatthias Ringwald   \brief   Get Main Stack Pointer Limit
659*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
660*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
661*2fd737d3SMatthias Ringwald 
662*2fd737d3SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
663*2fd737d3SMatthias Ringwald   \return               MSPLIM Register value
664*2fd737d3SMatthias Ringwald  */
__get_MSPLIM(void)665*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
666*2fd737d3SMatthias Ringwald {
667*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
668*2fd737d3SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
669*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
670*2fd737d3SMatthias Ringwald   return 0U;
671*2fd737d3SMatthias Ringwald #else
672*2fd737d3SMatthias Ringwald   uint32_t result;
673*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
674*2fd737d3SMatthias Ringwald   return result;
675*2fd737d3SMatthias Ringwald #endif
676*2fd737d3SMatthias Ringwald }
677*2fd737d3SMatthias Ringwald 
678*2fd737d3SMatthias Ringwald 
679*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
680*2fd737d3SMatthias Ringwald /**
681*2fd737d3SMatthias Ringwald   \brief   Get Main Stack Pointer Limit (non-secure)
682*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
683*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
684*2fd737d3SMatthias Ringwald 
685*2fd737d3SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
686*2fd737d3SMatthias Ringwald   \return               MSPLIM Register value
687*2fd737d3SMatthias Ringwald  */
__TZ_get_MSPLIM_NS(void)688*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
689*2fd737d3SMatthias Ringwald {
690*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
691*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
692*2fd737d3SMatthias Ringwald   return 0U;
693*2fd737d3SMatthias Ringwald #else
694*2fd737d3SMatthias Ringwald   uint32_t result;
695*2fd737d3SMatthias Ringwald   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
696*2fd737d3SMatthias Ringwald   return result;
697*2fd737d3SMatthias Ringwald #endif
698*2fd737d3SMatthias Ringwald }
699*2fd737d3SMatthias Ringwald #endif
700*2fd737d3SMatthias Ringwald 
701*2fd737d3SMatthias Ringwald 
702*2fd737d3SMatthias Ringwald /**
703*2fd737d3SMatthias Ringwald   \brief   Set Main Stack Pointer Limit
704*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
705*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
706*2fd737d3SMatthias Ringwald 
707*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
708*2fd737d3SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
709*2fd737d3SMatthias Ringwald  */
__set_MSPLIM(uint32_t MainStackPtrLimit)710*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
711*2fd737d3SMatthias Ringwald {
712*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
713*2fd737d3SMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
714*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
715*2fd737d3SMatthias Ringwald   (void)MainStackPtrLimit;
716*2fd737d3SMatthias Ringwald #else
717*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
718*2fd737d3SMatthias Ringwald #endif
719*2fd737d3SMatthias Ringwald }
720*2fd737d3SMatthias Ringwald 
721*2fd737d3SMatthias Ringwald 
722*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
723*2fd737d3SMatthias Ringwald /**
724*2fd737d3SMatthias Ringwald   \brief   Set Main Stack Pointer Limit (non-secure)
725*2fd737d3SMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
726*2fd737d3SMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
727*2fd737d3SMatthias Ringwald 
728*2fd737d3SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
729*2fd737d3SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
730*2fd737d3SMatthias Ringwald  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)731*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
732*2fd737d3SMatthias Ringwald {
733*2fd737d3SMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
734*2fd737d3SMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
735*2fd737d3SMatthias Ringwald   (void)MainStackPtrLimit;
736*2fd737d3SMatthias Ringwald #else
737*2fd737d3SMatthias Ringwald   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
738*2fd737d3SMatthias Ringwald #endif
739*2fd737d3SMatthias Ringwald }
740*2fd737d3SMatthias Ringwald #endif
741*2fd737d3SMatthias Ringwald 
742*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
743*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
744*2fd737d3SMatthias Ringwald 
745*2fd737d3SMatthias Ringwald /**
746*2fd737d3SMatthias Ringwald   \brief   Get FPSCR
747*2fd737d3SMatthias Ringwald   \details Returns the current value of the Floating Point Status/Control register.
748*2fd737d3SMatthias Ringwald   \return               Floating Point Status/Control register value
749*2fd737d3SMatthias Ringwald  */
750*2fd737d3SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
751*2fd737d3SMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
752*2fd737d3SMatthias Ringwald #define __get_FPSCR      (uint32_t)__builtin_arm_get_fpscr
753*2fd737d3SMatthias Ringwald #else
754*2fd737d3SMatthias Ringwald #define __get_FPSCR()      ((uint32_t)0U)
755*2fd737d3SMatthias Ringwald #endif
756*2fd737d3SMatthias Ringwald 
757*2fd737d3SMatthias Ringwald /**
758*2fd737d3SMatthias Ringwald   \brief   Set FPSCR
759*2fd737d3SMatthias Ringwald   \details Assigns the given value to the Floating Point Status/Control register.
760*2fd737d3SMatthias Ringwald   \param [in]    fpscr  Floating Point Status/Control value to set
761*2fd737d3SMatthias Ringwald  */
762*2fd737d3SMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
763*2fd737d3SMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
764*2fd737d3SMatthias Ringwald #define __set_FPSCR      __builtin_arm_set_fpscr
765*2fd737d3SMatthias Ringwald #else
766*2fd737d3SMatthias Ringwald #define __set_FPSCR(x)      ((void)(x))
767*2fd737d3SMatthias Ringwald #endif
768*2fd737d3SMatthias Ringwald 
769*2fd737d3SMatthias Ringwald 
770*2fd737d3SMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
771*2fd737d3SMatthias Ringwald 
772*2fd737d3SMatthias Ringwald 
773*2fd737d3SMatthias Ringwald /* ##########################  Core Instruction Access  ######################### */
774*2fd737d3SMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
775*2fd737d3SMatthias Ringwald   Access to dedicated instructions
776*2fd737d3SMatthias Ringwald   @{
777*2fd737d3SMatthias Ringwald */
778*2fd737d3SMatthias Ringwald 
779*2fd737d3SMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
780*2fd737d3SMatthias Ringwald  * For thumb1, use low register (r0-r7), specified by constraint "l"
781*2fd737d3SMatthias Ringwald  * Otherwise, use general registers, specified by constraint "r" */
782*2fd737d3SMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
783*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
784*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
785*2fd737d3SMatthias Ringwald #else
786*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
787*2fd737d3SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
788*2fd737d3SMatthias Ringwald #endif
789*2fd737d3SMatthias Ringwald 
790*2fd737d3SMatthias Ringwald /**
791*2fd737d3SMatthias Ringwald   \brief   No Operation
792*2fd737d3SMatthias Ringwald   \details No Operation does nothing. This instruction can be used for code alignment purposes.
793*2fd737d3SMatthias Ringwald  */
794*2fd737d3SMatthias Ringwald #define __NOP          __builtin_arm_nop
795*2fd737d3SMatthias Ringwald 
796*2fd737d3SMatthias Ringwald /**
797*2fd737d3SMatthias Ringwald   \brief   Wait For Interrupt
798*2fd737d3SMatthias Ringwald   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
799*2fd737d3SMatthias Ringwald  */
800*2fd737d3SMatthias Ringwald #define __WFI          __builtin_arm_wfi
801*2fd737d3SMatthias Ringwald 
802*2fd737d3SMatthias Ringwald 
803*2fd737d3SMatthias Ringwald /**
804*2fd737d3SMatthias Ringwald   \brief   Wait For Event
805*2fd737d3SMatthias Ringwald   \details Wait For Event is a hint instruction that permits the processor to enter
806*2fd737d3SMatthias Ringwald            a low-power state until one of a number of events occurs.
807*2fd737d3SMatthias Ringwald  */
808*2fd737d3SMatthias Ringwald #define __WFE          __builtin_arm_wfe
809*2fd737d3SMatthias Ringwald 
810*2fd737d3SMatthias Ringwald 
811*2fd737d3SMatthias Ringwald /**
812*2fd737d3SMatthias Ringwald   \brief   Send Event
813*2fd737d3SMatthias Ringwald   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
814*2fd737d3SMatthias Ringwald  */
815*2fd737d3SMatthias Ringwald #define __SEV          __builtin_arm_sev
816*2fd737d3SMatthias Ringwald 
817*2fd737d3SMatthias Ringwald 
818*2fd737d3SMatthias Ringwald /**
819*2fd737d3SMatthias Ringwald   \brief   Instruction Synchronization Barrier
820*2fd737d3SMatthias Ringwald   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
821*2fd737d3SMatthias Ringwald            so that all instructions following the ISB are fetched from cache or memory,
822*2fd737d3SMatthias Ringwald            after the instruction has been completed.
823*2fd737d3SMatthias Ringwald  */
824*2fd737d3SMatthias Ringwald #define __ISB()        __builtin_arm_isb(0xF);
825*2fd737d3SMatthias Ringwald 
826*2fd737d3SMatthias Ringwald /**
827*2fd737d3SMatthias Ringwald   \brief   Data Synchronization Barrier
828*2fd737d3SMatthias Ringwald   \details Acts as a special kind of Data Memory Barrier.
829*2fd737d3SMatthias Ringwald            It completes when all explicit memory accesses before this instruction complete.
830*2fd737d3SMatthias Ringwald  */
831*2fd737d3SMatthias Ringwald #define __DSB()        __builtin_arm_dsb(0xF);
832*2fd737d3SMatthias Ringwald 
833*2fd737d3SMatthias Ringwald 
834*2fd737d3SMatthias Ringwald /**
835*2fd737d3SMatthias Ringwald   \brief   Data Memory Barrier
836*2fd737d3SMatthias Ringwald   \details Ensures the apparent order of the explicit memory operations before
837*2fd737d3SMatthias Ringwald            and after the instruction, without ensuring their completion.
838*2fd737d3SMatthias Ringwald  */
839*2fd737d3SMatthias Ringwald #define __DMB()        __builtin_arm_dmb(0xF);
840*2fd737d3SMatthias Ringwald 
841*2fd737d3SMatthias Ringwald 
842*2fd737d3SMatthias Ringwald /**
843*2fd737d3SMatthias Ringwald   \brief   Reverse byte order (32 bit)
844*2fd737d3SMatthias Ringwald   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
845*2fd737d3SMatthias Ringwald   \param [in]    value  Value to reverse
846*2fd737d3SMatthias Ringwald   \return               Reversed value
847*2fd737d3SMatthias Ringwald  */
848*2fd737d3SMatthias Ringwald #define __REV(value)   __builtin_bswap32(value)
849*2fd737d3SMatthias Ringwald 
850*2fd737d3SMatthias Ringwald 
851*2fd737d3SMatthias Ringwald /**
852*2fd737d3SMatthias Ringwald   \brief   Reverse byte order (16 bit)
853*2fd737d3SMatthias Ringwald   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
854*2fd737d3SMatthias Ringwald   \param [in]    value  Value to reverse
855*2fd737d3SMatthias Ringwald   \return               Reversed value
856*2fd737d3SMatthias Ringwald  */
857*2fd737d3SMatthias Ringwald #define __REV16(value) __ROR(__REV(value), 16)
858*2fd737d3SMatthias Ringwald 
859*2fd737d3SMatthias Ringwald 
860*2fd737d3SMatthias Ringwald /**
861*2fd737d3SMatthias Ringwald   \brief   Reverse byte order (16 bit)
862*2fd737d3SMatthias Ringwald   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
863*2fd737d3SMatthias Ringwald   \param [in]    value  Value to reverse
864*2fd737d3SMatthias Ringwald   \return               Reversed value
865*2fd737d3SMatthias Ringwald  */
866*2fd737d3SMatthias Ringwald #define __REVSH(value) (int16_t)__builtin_bswap16(value)
867*2fd737d3SMatthias Ringwald 
868*2fd737d3SMatthias Ringwald 
869*2fd737d3SMatthias Ringwald /**
870*2fd737d3SMatthias Ringwald   \brief   Rotate Right in unsigned value (32 bit)
871*2fd737d3SMatthias Ringwald   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
872*2fd737d3SMatthias Ringwald   \param [in]    op1  Value to rotate
873*2fd737d3SMatthias Ringwald   \param [in]    op2  Number of Bits to rotate
874*2fd737d3SMatthias Ringwald   \return               Rotated value
875*2fd737d3SMatthias Ringwald  */
__ROR(uint32_t op1,uint32_t op2)876*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
877*2fd737d3SMatthias Ringwald {
878*2fd737d3SMatthias Ringwald   op2 %= 32U;
879*2fd737d3SMatthias Ringwald   if (op2 == 0U)
880*2fd737d3SMatthias Ringwald   {
881*2fd737d3SMatthias Ringwald     return op1;
882*2fd737d3SMatthias Ringwald   }
883*2fd737d3SMatthias Ringwald   return (op1 >> op2) | (op1 << (32U - op2));
884*2fd737d3SMatthias Ringwald }
885*2fd737d3SMatthias Ringwald 
886*2fd737d3SMatthias Ringwald 
887*2fd737d3SMatthias Ringwald /**
888*2fd737d3SMatthias Ringwald   \brief   Breakpoint
889*2fd737d3SMatthias Ringwald   \details Causes the processor to enter Debug state.
890*2fd737d3SMatthias Ringwald            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
891*2fd737d3SMatthias Ringwald   \param [in]    value  is ignored by the processor.
892*2fd737d3SMatthias Ringwald                  If required, a debugger can use it to store additional information about the breakpoint.
893*2fd737d3SMatthias Ringwald  */
894*2fd737d3SMatthias Ringwald #define __BKPT(value)     __ASM volatile ("bkpt "#value)
895*2fd737d3SMatthias Ringwald 
896*2fd737d3SMatthias Ringwald 
897*2fd737d3SMatthias Ringwald /**
898*2fd737d3SMatthias Ringwald   \brief   Reverse bit order of value
899*2fd737d3SMatthias Ringwald   \details Reverses the bit order of the given value.
900*2fd737d3SMatthias Ringwald   \param [in]    value  Value to reverse
901*2fd737d3SMatthias Ringwald   \return               Reversed value
902*2fd737d3SMatthias Ringwald  */
903*2fd737d3SMatthias Ringwald #define __RBIT            __builtin_arm_rbit
904*2fd737d3SMatthias Ringwald 
905*2fd737d3SMatthias Ringwald /**
906*2fd737d3SMatthias Ringwald   \brief   Count leading zeros
907*2fd737d3SMatthias Ringwald   \details Counts the number of leading zeros of a data value.
908*2fd737d3SMatthias Ringwald   \param [in]  value  Value to count the leading zeros
909*2fd737d3SMatthias Ringwald   \return             number of leading zeros in value
910*2fd737d3SMatthias Ringwald  */
911*2fd737d3SMatthias Ringwald #define __CLZ             (uint8_t)__builtin_clz
912*2fd737d3SMatthias Ringwald 
913*2fd737d3SMatthias Ringwald 
914*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
915*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
916*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
917*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
918*2fd737d3SMatthias Ringwald /**
919*2fd737d3SMatthias Ringwald   \brief   LDR Exclusive (8 bit)
920*2fd737d3SMatthias Ringwald   \details Executes a exclusive LDR instruction for 8 bit value.
921*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
922*2fd737d3SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
923*2fd737d3SMatthias Ringwald  */
924*2fd737d3SMatthias Ringwald #define __LDREXB        (uint8_t)__builtin_arm_ldrex
925*2fd737d3SMatthias Ringwald 
926*2fd737d3SMatthias Ringwald 
927*2fd737d3SMatthias Ringwald /**
928*2fd737d3SMatthias Ringwald   \brief   LDR Exclusive (16 bit)
929*2fd737d3SMatthias Ringwald   \details Executes a exclusive LDR instruction for 16 bit values.
930*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
931*2fd737d3SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
932*2fd737d3SMatthias Ringwald  */
933*2fd737d3SMatthias Ringwald #define __LDREXH        (uint16_t)__builtin_arm_ldrex
934*2fd737d3SMatthias Ringwald 
935*2fd737d3SMatthias Ringwald 
936*2fd737d3SMatthias Ringwald /**
937*2fd737d3SMatthias Ringwald   \brief   LDR Exclusive (32 bit)
938*2fd737d3SMatthias Ringwald   \details Executes a exclusive LDR instruction for 32 bit values.
939*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
940*2fd737d3SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
941*2fd737d3SMatthias Ringwald  */
942*2fd737d3SMatthias Ringwald #define __LDREXW        (uint32_t)__builtin_arm_ldrex
943*2fd737d3SMatthias Ringwald 
944*2fd737d3SMatthias Ringwald 
945*2fd737d3SMatthias Ringwald /**
946*2fd737d3SMatthias Ringwald   \brief   STR Exclusive (8 bit)
947*2fd737d3SMatthias Ringwald   \details Executes a exclusive STR instruction for 8 bit values.
948*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
949*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
950*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
951*2fd737d3SMatthias Ringwald   \return          1  Function failed
952*2fd737d3SMatthias Ringwald  */
953*2fd737d3SMatthias Ringwald #define __STREXB        (uint32_t)__builtin_arm_strex
954*2fd737d3SMatthias Ringwald 
955*2fd737d3SMatthias Ringwald 
956*2fd737d3SMatthias Ringwald /**
957*2fd737d3SMatthias Ringwald   \brief   STR Exclusive (16 bit)
958*2fd737d3SMatthias Ringwald   \details Executes a exclusive STR instruction for 16 bit values.
959*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
960*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
961*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
962*2fd737d3SMatthias Ringwald   \return          1  Function failed
963*2fd737d3SMatthias Ringwald  */
964*2fd737d3SMatthias Ringwald #define __STREXH        (uint32_t)__builtin_arm_strex
965*2fd737d3SMatthias Ringwald 
966*2fd737d3SMatthias Ringwald 
967*2fd737d3SMatthias Ringwald /**
968*2fd737d3SMatthias Ringwald   \brief   STR Exclusive (32 bit)
969*2fd737d3SMatthias Ringwald   \details Executes a exclusive STR instruction for 32 bit values.
970*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
971*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
972*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
973*2fd737d3SMatthias Ringwald   \return          1  Function failed
974*2fd737d3SMatthias Ringwald  */
975*2fd737d3SMatthias Ringwald #define __STREXW        (uint32_t)__builtin_arm_strex
976*2fd737d3SMatthias Ringwald 
977*2fd737d3SMatthias Ringwald 
978*2fd737d3SMatthias Ringwald /**
979*2fd737d3SMatthias Ringwald   \brief   Remove the exclusive lock
980*2fd737d3SMatthias Ringwald   \details Removes the exclusive lock which is created by LDREX.
981*2fd737d3SMatthias Ringwald  */
982*2fd737d3SMatthias Ringwald #define __CLREX             __builtin_arm_clrex
983*2fd737d3SMatthias Ringwald 
984*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
985*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
986*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
987*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
988*2fd737d3SMatthias Ringwald 
989*2fd737d3SMatthias Ringwald 
990*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
991*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
992*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
993*2fd737d3SMatthias Ringwald 
994*2fd737d3SMatthias Ringwald /**
995*2fd737d3SMatthias Ringwald   \brief   Signed Saturate
996*2fd737d3SMatthias Ringwald   \details Saturates a signed value.
997*2fd737d3SMatthias Ringwald   \param [in]  value  Value to be saturated
998*2fd737d3SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
999*2fd737d3SMatthias Ringwald   \return             Saturated value
1000*2fd737d3SMatthias Ringwald  */
1001*2fd737d3SMatthias Ringwald #define __SSAT             __builtin_arm_ssat
1002*2fd737d3SMatthias Ringwald 
1003*2fd737d3SMatthias Ringwald 
1004*2fd737d3SMatthias Ringwald /**
1005*2fd737d3SMatthias Ringwald   \brief   Unsigned Saturate
1006*2fd737d3SMatthias Ringwald   \details Saturates an unsigned value.
1007*2fd737d3SMatthias Ringwald   \param [in]  value  Value to be saturated
1008*2fd737d3SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
1009*2fd737d3SMatthias Ringwald   \return             Saturated value
1010*2fd737d3SMatthias Ringwald  */
1011*2fd737d3SMatthias Ringwald #define __USAT             __builtin_arm_usat
1012*2fd737d3SMatthias Ringwald 
1013*2fd737d3SMatthias Ringwald 
1014*2fd737d3SMatthias Ringwald /**
1015*2fd737d3SMatthias Ringwald   \brief   Rotate Right with Extend (32 bit)
1016*2fd737d3SMatthias Ringwald   \details Moves each bit of a bitstring right by one bit.
1017*2fd737d3SMatthias Ringwald            The carry input is shifted in at the left end of the bitstring.
1018*2fd737d3SMatthias Ringwald   \param [in]    value  Value to rotate
1019*2fd737d3SMatthias Ringwald   \return               Rotated value
1020*2fd737d3SMatthias Ringwald  */
__RRX(uint32_t value)1021*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1022*2fd737d3SMatthias Ringwald {
1023*2fd737d3SMatthias Ringwald   uint32_t result;
1024*2fd737d3SMatthias Ringwald 
1025*2fd737d3SMatthias Ringwald   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1026*2fd737d3SMatthias Ringwald   return(result);
1027*2fd737d3SMatthias Ringwald }
1028*2fd737d3SMatthias Ringwald 
1029*2fd737d3SMatthias Ringwald 
1030*2fd737d3SMatthias Ringwald /**
1031*2fd737d3SMatthias Ringwald   \brief   LDRT Unprivileged (8 bit)
1032*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 8 bit value.
1033*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1034*2fd737d3SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1035*2fd737d3SMatthias Ringwald  */
__LDRBT(volatile uint8_t * ptr)1036*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1037*2fd737d3SMatthias Ringwald {
1038*2fd737d3SMatthias Ringwald   uint32_t result;
1039*2fd737d3SMatthias Ringwald 
1040*2fd737d3SMatthias Ringwald   __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1041*2fd737d3SMatthias Ringwald   return ((uint8_t) result);    /* Add explicit type cast here */
1042*2fd737d3SMatthias Ringwald }
1043*2fd737d3SMatthias Ringwald 
1044*2fd737d3SMatthias Ringwald 
1045*2fd737d3SMatthias Ringwald /**
1046*2fd737d3SMatthias Ringwald   \brief   LDRT Unprivileged (16 bit)
1047*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 16 bit values.
1048*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1049*2fd737d3SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1050*2fd737d3SMatthias Ringwald  */
__LDRHT(volatile uint16_t * ptr)1051*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1052*2fd737d3SMatthias Ringwald {
1053*2fd737d3SMatthias Ringwald   uint32_t result;
1054*2fd737d3SMatthias Ringwald 
1055*2fd737d3SMatthias Ringwald   __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1056*2fd737d3SMatthias Ringwald   return ((uint16_t) result);    /* Add explicit type cast here */
1057*2fd737d3SMatthias Ringwald }
1058*2fd737d3SMatthias Ringwald 
1059*2fd737d3SMatthias Ringwald 
1060*2fd737d3SMatthias Ringwald /**
1061*2fd737d3SMatthias Ringwald   \brief   LDRT Unprivileged (32 bit)
1062*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 32 bit values.
1063*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1064*2fd737d3SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1065*2fd737d3SMatthias Ringwald  */
__LDRT(volatile uint32_t * ptr)1066*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1067*2fd737d3SMatthias Ringwald {
1068*2fd737d3SMatthias Ringwald   uint32_t result;
1069*2fd737d3SMatthias Ringwald 
1070*2fd737d3SMatthias Ringwald   __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1071*2fd737d3SMatthias Ringwald   return(result);
1072*2fd737d3SMatthias Ringwald }
1073*2fd737d3SMatthias Ringwald 
1074*2fd737d3SMatthias Ringwald 
1075*2fd737d3SMatthias Ringwald /**
1076*2fd737d3SMatthias Ringwald   \brief   STRT Unprivileged (8 bit)
1077*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 8 bit values.
1078*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1079*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1080*2fd737d3SMatthias Ringwald  */
__STRBT(uint8_t value,volatile uint8_t * ptr)1081*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1082*2fd737d3SMatthias Ringwald {
1083*2fd737d3SMatthias Ringwald   __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1084*2fd737d3SMatthias Ringwald }
1085*2fd737d3SMatthias Ringwald 
1086*2fd737d3SMatthias Ringwald 
1087*2fd737d3SMatthias Ringwald /**
1088*2fd737d3SMatthias Ringwald   \brief   STRT Unprivileged (16 bit)
1089*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 16 bit values.
1090*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1091*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1092*2fd737d3SMatthias Ringwald  */
__STRHT(uint16_t value,volatile uint16_t * ptr)1093*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1094*2fd737d3SMatthias Ringwald {
1095*2fd737d3SMatthias Ringwald   __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1096*2fd737d3SMatthias Ringwald }
1097*2fd737d3SMatthias Ringwald 
1098*2fd737d3SMatthias Ringwald 
1099*2fd737d3SMatthias Ringwald /**
1100*2fd737d3SMatthias Ringwald   \brief   STRT Unprivileged (32 bit)
1101*2fd737d3SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 32 bit values.
1102*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1103*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1104*2fd737d3SMatthias Ringwald  */
__STRT(uint32_t value,volatile uint32_t * ptr)1105*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1106*2fd737d3SMatthias Ringwald {
1107*2fd737d3SMatthias Ringwald   __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1108*2fd737d3SMatthias Ringwald }
1109*2fd737d3SMatthias Ringwald 
1110*2fd737d3SMatthias Ringwald #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1111*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1112*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1113*2fd737d3SMatthias Ringwald 
1114*2fd737d3SMatthias Ringwald /**
1115*2fd737d3SMatthias Ringwald   \brief   Signed Saturate
1116*2fd737d3SMatthias Ringwald   \details Saturates a signed value.
1117*2fd737d3SMatthias Ringwald   \param [in]  value  Value to be saturated
1118*2fd737d3SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
1119*2fd737d3SMatthias Ringwald   \return             Saturated value
1120*2fd737d3SMatthias Ringwald  */
__SSAT(int32_t val,uint32_t sat)1121*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1122*2fd737d3SMatthias Ringwald {
1123*2fd737d3SMatthias Ringwald   if ((sat >= 1U) && (sat <= 32U))
1124*2fd737d3SMatthias Ringwald   {
1125*2fd737d3SMatthias Ringwald     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1126*2fd737d3SMatthias Ringwald     const int32_t min = -1 - max ;
1127*2fd737d3SMatthias Ringwald     if (val > max)
1128*2fd737d3SMatthias Ringwald     {
1129*2fd737d3SMatthias Ringwald       return max;
1130*2fd737d3SMatthias Ringwald     }
1131*2fd737d3SMatthias Ringwald     else if (val < min)
1132*2fd737d3SMatthias Ringwald     {
1133*2fd737d3SMatthias Ringwald       return min;
1134*2fd737d3SMatthias Ringwald     }
1135*2fd737d3SMatthias Ringwald   }
1136*2fd737d3SMatthias Ringwald   return val;
1137*2fd737d3SMatthias Ringwald }
1138*2fd737d3SMatthias Ringwald 
1139*2fd737d3SMatthias Ringwald /**
1140*2fd737d3SMatthias Ringwald   \brief   Unsigned Saturate
1141*2fd737d3SMatthias Ringwald   \details Saturates an unsigned value.
1142*2fd737d3SMatthias Ringwald   \param [in]  value  Value to be saturated
1143*2fd737d3SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
1144*2fd737d3SMatthias Ringwald   \return             Saturated value
1145*2fd737d3SMatthias Ringwald  */
__USAT(int32_t val,uint32_t sat)1146*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1147*2fd737d3SMatthias Ringwald {
1148*2fd737d3SMatthias Ringwald   if (sat <= 31U)
1149*2fd737d3SMatthias Ringwald   {
1150*2fd737d3SMatthias Ringwald     const uint32_t max = ((1U << sat) - 1U);
1151*2fd737d3SMatthias Ringwald     if (val > (int32_t)max)
1152*2fd737d3SMatthias Ringwald     {
1153*2fd737d3SMatthias Ringwald       return max;
1154*2fd737d3SMatthias Ringwald     }
1155*2fd737d3SMatthias Ringwald     else if (val < 0)
1156*2fd737d3SMatthias Ringwald     {
1157*2fd737d3SMatthias Ringwald       return 0U;
1158*2fd737d3SMatthias Ringwald     }
1159*2fd737d3SMatthias Ringwald   }
1160*2fd737d3SMatthias Ringwald   return (uint32_t)val;
1161*2fd737d3SMatthias Ringwald }
1162*2fd737d3SMatthias Ringwald 
1163*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1164*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1165*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1166*2fd737d3SMatthias Ringwald 
1167*2fd737d3SMatthias Ringwald 
1168*2fd737d3SMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1169*2fd737d3SMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1170*2fd737d3SMatthias Ringwald /**
1171*2fd737d3SMatthias Ringwald   \brief   Load-Acquire (8 bit)
1172*2fd737d3SMatthias Ringwald   \details Executes a LDAB instruction for 8 bit value.
1173*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1174*2fd737d3SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1175*2fd737d3SMatthias Ringwald  */
__LDAB(volatile uint8_t * ptr)1176*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1177*2fd737d3SMatthias Ringwald {
1178*2fd737d3SMatthias Ringwald   uint32_t result;
1179*2fd737d3SMatthias Ringwald 
1180*2fd737d3SMatthias Ringwald   __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1181*2fd737d3SMatthias Ringwald   return ((uint8_t) result);
1182*2fd737d3SMatthias Ringwald }
1183*2fd737d3SMatthias Ringwald 
1184*2fd737d3SMatthias Ringwald 
1185*2fd737d3SMatthias Ringwald /**
1186*2fd737d3SMatthias Ringwald   \brief   Load-Acquire (16 bit)
1187*2fd737d3SMatthias Ringwald   \details Executes a LDAH instruction for 16 bit values.
1188*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1189*2fd737d3SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1190*2fd737d3SMatthias Ringwald  */
__LDAH(volatile uint16_t * ptr)1191*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1192*2fd737d3SMatthias Ringwald {
1193*2fd737d3SMatthias Ringwald   uint32_t result;
1194*2fd737d3SMatthias Ringwald 
1195*2fd737d3SMatthias Ringwald   __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1196*2fd737d3SMatthias Ringwald   return ((uint16_t) result);
1197*2fd737d3SMatthias Ringwald }
1198*2fd737d3SMatthias Ringwald 
1199*2fd737d3SMatthias Ringwald 
1200*2fd737d3SMatthias Ringwald /**
1201*2fd737d3SMatthias Ringwald   \brief   Load-Acquire (32 bit)
1202*2fd737d3SMatthias Ringwald   \details Executes a LDA instruction for 32 bit values.
1203*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1204*2fd737d3SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1205*2fd737d3SMatthias Ringwald  */
__LDA(volatile uint32_t * ptr)1206*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1207*2fd737d3SMatthias Ringwald {
1208*2fd737d3SMatthias Ringwald   uint32_t result;
1209*2fd737d3SMatthias Ringwald 
1210*2fd737d3SMatthias Ringwald   __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1211*2fd737d3SMatthias Ringwald   return(result);
1212*2fd737d3SMatthias Ringwald }
1213*2fd737d3SMatthias Ringwald 
1214*2fd737d3SMatthias Ringwald 
1215*2fd737d3SMatthias Ringwald /**
1216*2fd737d3SMatthias Ringwald   \brief   Store-Release (8 bit)
1217*2fd737d3SMatthias Ringwald   \details Executes a STLB instruction for 8 bit values.
1218*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1219*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1220*2fd737d3SMatthias Ringwald  */
__STLB(uint8_t value,volatile uint8_t * ptr)1221*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1222*2fd737d3SMatthias Ringwald {
1223*2fd737d3SMatthias Ringwald   __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1224*2fd737d3SMatthias Ringwald }
1225*2fd737d3SMatthias Ringwald 
1226*2fd737d3SMatthias Ringwald 
1227*2fd737d3SMatthias Ringwald /**
1228*2fd737d3SMatthias Ringwald   \brief   Store-Release (16 bit)
1229*2fd737d3SMatthias Ringwald   \details Executes a STLH instruction for 16 bit values.
1230*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1231*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1232*2fd737d3SMatthias Ringwald  */
__STLH(uint16_t value,volatile uint16_t * ptr)1233*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1234*2fd737d3SMatthias Ringwald {
1235*2fd737d3SMatthias Ringwald   __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1236*2fd737d3SMatthias Ringwald }
1237*2fd737d3SMatthias Ringwald 
1238*2fd737d3SMatthias Ringwald 
1239*2fd737d3SMatthias Ringwald /**
1240*2fd737d3SMatthias Ringwald   \brief   Store-Release (32 bit)
1241*2fd737d3SMatthias Ringwald   \details Executes a STL instruction for 32 bit values.
1242*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1243*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1244*2fd737d3SMatthias Ringwald  */
__STL(uint32_t value,volatile uint32_t * ptr)1245*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1246*2fd737d3SMatthias Ringwald {
1247*2fd737d3SMatthias Ringwald   __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1248*2fd737d3SMatthias Ringwald }
1249*2fd737d3SMatthias Ringwald 
1250*2fd737d3SMatthias Ringwald 
1251*2fd737d3SMatthias Ringwald /**
1252*2fd737d3SMatthias Ringwald   \brief   Load-Acquire Exclusive (8 bit)
1253*2fd737d3SMatthias Ringwald   \details Executes a LDAB exclusive instruction for 8 bit value.
1254*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1255*2fd737d3SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1256*2fd737d3SMatthias Ringwald  */
1257*2fd737d3SMatthias Ringwald #define     __LDAEXB                 (uint8_t)__builtin_arm_ldaex
1258*2fd737d3SMatthias Ringwald 
1259*2fd737d3SMatthias Ringwald 
1260*2fd737d3SMatthias Ringwald /**
1261*2fd737d3SMatthias Ringwald   \brief   Load-Acquire Exclusive (16 bit)
1262*2fd737d3SMatthias Ringwald   \details Executes a LDAH exclusive instruction for 16 bit values.
1263*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1264*2fd737d3SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1265*2fd737d3SMatthias Ringwald  */
1266*2fd737d3SMatthias Ringwald #define     __LDAEXH                 (uint16_t)__builtin_arm_ldaex
1267*2fd737d3SMatthias Ringwald 
1268*2fd737d3SMatthias Ringwald 
1269*2fd737d3SMatthias Ringwald /**
1270*2fd737d3SMatthias Ringwald   \brief   Load-Acquire Exclusive (32 bit)
1271*2fd737d3SMatthias Ringwald   \details Executes a LDA exclusive instruction for 32 bit values.
1272*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to data
1273*2fd737d3SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1274*2fd737d3SMatthias Ringwald  */
1275*2fd737d3SMatthias Ringwald #define     __LDAEX                  (uint32_t)__builtin_arm_ldaex
1276*2fd737d3SMatthias Ringwald 
1277*2fd737d3SMatthias Ringwald 
1278*2fd737d3SMatthias Ringwald /**
1279*2fd737d3SMatthias Ringwald   \brief   Store-Release Exclusive (8 bit)
1280*2fd737d3SMatthias Ringwald   \details Executes a STLB exclusive instruction for 8 bit values.
1281*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1282*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1283*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
1284*2fd737d3SMatthias Ringwald   \return          1  Function failed
1285*2fd737d3SMatthias Ringwald  */
1286*2fd737d3SMatthias Ringwald #define     __STLEXB                 (uint32_t)__builtin_arm_stlex
1287*2fd737d3SMatthias Ringwald 
1288*2fd737d3SMatthias Ringwald 
1289*2fd737d3SMatthias Ringwald /**
1290*2fd737d3SMatthias Ringwald   \brief   Store-Release Exclusive (16 bit)
1291*2fd737d3SMatthias Ringwald   \details Executes a STLH exclusive instruction for 16 bit values.
1292*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1293*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1294*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
1295*2fd737d3SMatthias Ringwald   \return          1  Function failed
1296*2fd737d3SMatthias Ringwald  */
1297*2fd737d3SMatthias Ringwald #define     __STLEXH                 (uint32_t)__builtin_arm_stlex
1298*2fd737d3SMatthias Ringwald 
1299*2fd737d3SMatthias Ringwald 
1300*2fd737d3SMatthias Ringwald /**
1301*2fd737d3SMatthias Ringwald   \brief   Store-Release Exclusive (32 bit)
1302*2fd737d3SMatthias Ringwald   \details Executes a STL exclusive instruction for 32 bit values.
1303*2fd737d3SMatthias Ringwald   \param [in]  value  Value to store
1304*2fd737d3SMatthias Ringwald   \param [in]    ptr  Pointer to location
1305*2fd737d3SMatthias Ringwald   \return          0  Function succeeded
1306*2fd737d3SMatthias Ringwald   \return          1  Function failed
1307*2fd737d3SMatthias Ringwald  */
1308*2fd737d3SMatthias Ringwald #define     __STLEX                  (uint32_t)__builtin_arm_stlex
1309*2fd737d3SMatthias Ringwald 
1310*2fd737d3SMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1311*2fd737d3SMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1312*2fd737d3SMatthias Ringwald 
1313*2fd737d3SMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1314*2fd737d3SMatthias Ringwald 
1315*2fd737d3SMatthias Ringwald 
1316*2fd737d3SMatthias Ringwald /* ###################  Compiler specific Intrinsics  ########################### */
1317*2fd737d3SMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1318*2fd737d3SMatthias Ringwald   Access to dedicated SIMD instructions
1319*2fd737d3SMatthias Ringwald   @{
1320*2fd737d3SMatthias Ringwald */
1321*2fd737d3SMatthias Ringwald 
1322*2fd737d3SMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1323*2fd737d3SMatthias Ringwald 
__SADD8(uint32_t op1,uint32_t op2)1324*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1325*2fd737d3SMatthias Ringwald {
1326*2fd737d3SMatthias Ringwald   uint32_t result;
1327*2fd737d3SMatthias Ringwald 
1328*2fd737d3SMatthias Ringwald   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1329*2fd737d3SMatthias Ringwald   return(result);
1330*2fd737d3SMatthias Ringwald }
1331*2fd737d3SMatthias Ringwald 
__QADD8(uint32_t op1,uint32_t op2)1332*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1333*2fd737d3SMatthias Ringwald {
1334*2fd737d3SMatthias Ringwald   uint32_t result;
1335*2fd737d3SMatthias Ringwald 
1336*2fd737d3SMatthias Ringwald   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1337*2fd737d3SMatthias Ringwald   return(result);
1338*2fd737d3SMatthias Ringwald }
1339*2fd737d3SMatthias Ringwald 
__SHADD8(uint32_t op1,uint32_t op2)1340*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1341*2fd737d3SMatthias Ringwald {
1342*2fd737d3SMatthias Ringwald   uint32_t result;
1343*2fd737d3SMatthias Ringwald 
1344*2fd737d3SMatthias Ringwald   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1345*2fd737d3SMatthias Ringwald   return(result);
1346*2fd737d3SMatthias Ringwald }
1347*2fd737d3SMatthias Ringwald 
__UADD8(uint32_t op1,uint32_t op2)1348*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1349*2fd737d3SMatthias Ringwald {
1350*2fd737d3SMatthias Ringwald   uint32_t result;
1351*2fd737d3SMatthias Ringwald 
1352*2fd737d3SMatthias Ringwald   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1353*2fd737d3SMatthias Ringwald   return(result);
1354*2fd737d3SMatthias Ringwald }
1355*2fd737d3SMatthias Ringwald 
__UQADD8(uint32_t op1,uint32_t op2)1356*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1357*2fd737d3SMatthias Ringwald {
1358*2fd737d3SMatthias Ringwald   uint32_t result;
1359*2fd737d3SMatthias Ringwald 
1360*2fd737d3SMatthias Ringwald   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1361*2fd737d3SMatthias Ringwald   return(result);
1362*2fd737d3SMatthias Ringwald }
1363*2fd737d3SMatthias Ringwald 
__UHADD8(uint32_t op1,uint32_t op2)1364*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1365*2fd737d3SMatthias Ringwald {
1366*2fd737d3SMatthias Ringwald   uint32_t result;
1367*2fd737d3SMatthias Ringwald 
1368*2fd737d3SMatthias Ringwald   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1369*2fd737d3SMatthias Ringwald   return(result);
1370*2fd737d3SMatthias Ringwald }
1371*2fd737d3SMatthias Ringwald 
1372*2fd737d3SMatthias Ringwald 
__SSUB8(uint32_t op1,uint32_t op2)1373*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1374*2fd737d3SMatthias Ringwald {
1375*2fd737d3SMatthias Ringwald   uint32_t result;
1376*2fd737d3SMatthias Ringwald 
1377*2fd737d3SMatthias Ringwald   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1378*2fd737d3SMatthias Ringwald   return(result);
1379*2fd737d3SMatthias Ringwald }
1380*2fd737d3SMatthias Ringwald 
__QSUB8(uint32_t op1,uint32_t op2)1381*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1382*2fd737d3SMatthias Ringwald {
1383*2fd737d3SMatthias Ringwald   uint32_t result;
1384*2fd737d3SMatthias Ringwald 
1385*2fd737d3SMatthias Ringwald   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1386*2fd737d3SMatthias Ringwald   return(result);
1387*2fd737d3SMatthias Ringwald }
1388*2fd737d3SMatthias Ringwald 
__SHSUB8(uint32_t op1,uint32_t op2)1389*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1390*2fd737d3SMatthias Ringwald {
1391*2fd737d3SMatthias Ringwald   uint32_t result;
1392*2fd737d3SMatthias Ringwald 
1393*2fd737d3SMatthias Ringwald   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1394*2fd737d3SMatthias Ringwald   return(result);
1395*2fd737d3SMatthias Ringwald }
1396*2fd737d3SMatthias Ringwald 
__USUB8(uint32_t op1,uint32_t op2)1397*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1398*2fd737d3SMatthias Ringwald {
1399*2fd737d3SMatthias Ringwald   uint32_t result;
1400*2fd737d3SMatthias Ringwald 
1401*2fd737d3SMatthias Ringwald   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1402*2fd737d3SMatthias Ringwald   return(result);
1403*2fd737d3SMatthias Ringwald }
1404*2fd737d3SMatthias Ringwald 
__UQSUB8(uint32_t op1,uint32_t op2)1405*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1406*2fd737d3SMatthias Ringwald {
1407*2fd737d3SMatthias Ringwald   uint32_t result;
1408*2fd737d3SMatthias Ringwald 
1409*2fd737d3SMatthias Ringwald   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1410*2fd737d3SMatthias Ringwald   return(result);
1411*2fd737d3SMatthias Ringwald }
1412*2fd737d3SMatthias Ringwald 
__UHSUB8(uint32_t op1,uint32_t op2)1413*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1414*2fd737d3SMatthias Ringwald {
1415*2fd737d3SMatthias Ringwald   uint32_t result;
1416*2fd737d3SMatthias Ringwald 
1417*2fd737d3SMatthias Ringwald   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1418*2fd737d3SMatthias Ringwald   return(result);
1419*2fd737d3SMatthias Ringwald }
1420*2fd737d3SMatthias Ringwald 
1421*2fd737d3SMatthias Ringwald 
__SADD16(uint32_t op1,uint32_t op2)1422*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1423*2fd737d3SMatthias Ringwald {
1424*2fd737d3SMatthias Ringwald   uint32_t result;
1425*2fd737d3SMatthias Ringwald 
1426*2fd737d3SMatthias Ringwald   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1427*2fd737d3SMatthias Ringwald   return(result);
1428*2fd737d3SMatthias Ringwald }
1429*2fd737d3SMatthias Ringwald 
__QADD16(uint32_t op1,uint32_t op2)1430*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1431*2fd737d3SMatthias Ringwald {
1432*2fd737d3SMatthias Ringwald   uint32_t result;
1433*2fd737d3SMatthias Ringwald 
1434*2fd737d3SMatthias Ringwald   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1435*2fd737d3SMatthias Ringwald   return(result);
1436*2fd737d3SMatthias Ringwald }
1437*2fd737d3SMatthias Ringwald 
__SHADD16(uint32_t op1,uint32_t op2)1438*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1439*2fd737d3SMatthias Ringwald {
1440*2fd737d3SMatthias Ringwald   uint32_t result;
1441*2fd737d3SMatthias Ringwald 
1442*2fd737d3SMatthias Ringwald   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1443*2fd737d3SMatthias Ringwald   return(result);
1444*2fd737d3SMatthias Ringwald }
1445*2fd737d3SMatthias Ringwald 
__UADD16(uint32_t op1,uint32_t op2)1446*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1447*2fd737d3SMatthias Ringwald {
1448*2fd737d3SMatthias Ringwald   uint32_t result;
1449*2fd737d3SMatthias Ringwald 
1450*2fd737d3SMatthias Ringwald   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1451*2fd737d3SMatthias Ringwald   return(result);
1452*2fd737d3SMatthias Ringwald }
1453*2fd737d3SMatthias Ringwald 
__UQADD16(uint32_t op1,uint32_t op2)1454*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1455*2fd737d3SMatthias Ringwald {
1456*2fd737d3SMatthias Ringwald   uint32_t result;
1457*2fd737d3SMatthias Ringwald 
1458*2fd737d3SMatthias Ringwald   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1459*2fd737d3SMatthias Ringwald   return(result);
1460*2fd737d3SMatthias Ringwald }
1461*2fd737d3SMatthias Ringwald 
__UHADD16(uint32_t op1,uint32_t op2)1462*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1463*2fd737d3SMatthias Ringwald {
1464*2fd737d3SMatthias Ringwald   uint32_t result;
1465*2fd737d3SMatthias Ringwald 
1466*2fd737d3SMatthias Ringwald   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1467*2fd737d3SMatthias Ringwald   return(result);
1468*2fd737d3SMatthias Ringwald }
1469*2fd737d3SMatthias Ringwald 
__SSUB16(uint32_t op1,uint32_t op2)1470*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1471*2fd737d3SMatthias Ringwald {
1472*2fd737d3SMatthias Ringwald   uint32_t result;
1473*2fd737d3SMatthias Ringwald 
1474*2fd737d3SMatthias Ringwald   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1475*2fd737d3SMatthias Ringwald   return(result);
1476*2fd737d3SMatthias Ringwald }
1477*2fd737d3SMatthias Ringwald 
__QSUB16(uint32_t op1,uint32_t op2)1478*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1479*2fd737d3SMatthias Ringwald {
1480*2fd737d3SMatthias Ringwald   uint32_t result;
1481*2fd737d3SMatthias Ringwald 
1482*2fd737d3SMatthias Ringwald   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1483*2fd737d3SMatthias Ringwald   return(result);
1484*2fd737d3SMatthias Ringwald }
1485*2fd737d3SMatthias Ringwald 
__SHSUB16(uint32_t op1,uint32_t op2)1486*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1487*2fd737d3SMatthias Ringwald {
1488*2fd737d3SMatthias Ringwald   uint32_t result;
1489*2fd737d3SMatthias Ringwald 
1490*2fd737d3SMatthias Ringwald   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1491*2fd737d3SMatthias Ringwald   return(result);
1492*2fd737d3SMatthias Ringwald }
1493*2fd737d3SMatthias Ringwald 
__USUB16(uint32_t op1,uint32_t op2)1494*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1495*2fd737d3SMatthias Ringwald {
1496*2fd737d3SMatthias Ringwald   uint32_t result;
1497*2fd737d3SMatthias Ringwald 
1498*2fd737d3SMatthias Ringwald   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1499*2fd737d3SMatthias Ringwald   return(result);
1500*2fd737d3SMatthias Ringwald }
1501*2fd737d3SMatthias Ringwald 
__UQSUB16(uint32_t op1,uint32_t op2)1502*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1503*2fd737d3SMatthias Ringwald {
1504*2fd737d3SMatthias Ringwald   uint32_t result;
1505*2fd737d3SMatthias Ringwald 
1506*2fd737d3SMatthias Ringwald   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507*2fd737d3SMatthias Ringwald   return(result);
1508*2fd737d3SMatthias Ringwald }
1509*2fd737d3SMatthias Ringwald 
__UHSUB16(uint32_t op1,uint32_t op2)1510*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1511*2fd737d3SMatthias Ringwald {
1512*2fd737d3SMatthias Ringwald   uint32_t result;
1513*2fd737d3SMatthias Ringwald 
1514*2fd737d3SMatthias Ringwald   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515*2fd737d3SMatthias Ringwald   return(result);
1516*2fd737d3SMatthias Ringwald }
1517*2fd737d3SMatthias Ringwald 
__SASX(uint32_t op1,uint32_t op2)1518*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1519*2fd737d3SMatthias Ringwald {
1520*2fd737d3SMatthias Ringwald   uint32_t result;
1521*2fd737d3SMatthias Ringwald 
1522*2fd737d3SMatthias Ringwald   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523*2fd737d3SMatthias Ringwald   return(result);
1524*2fd737d3SMatthias Ringwald }
1525*2fd737d3SMatthias Ringwald 
__QASX(uint32_t op1,uint32_t op2)1526*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1527*2fd737d3SMatthias Ringwald {
1528*2fd737d3SMatthias Ringwald   uint32_t result;
1529*2fd737d3SMatthias Ringwald 
1530*2fd737d3SMatthias Ringwald   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531*2fd737d3SMatthias Ringwald   return(result);
1532*2fd737d3SMatthias Ringwald }
1533*2fd737d3SMatthias Ringwald 
__SHASX(uint32_t op1,uint32_t op2)1534*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1535*2fd737d3SMatthias Ringwald {
1536*2fd737d3SMatthias Ringwald   uint32_t result;
1537*2fd737d3SMatthias Ringwald 
1538*2fd737d3SMatthias Ringwald   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539*2fd737d3SMatthias Ringwald   return(result);
1540*2fd737d3SMatthias Ringwald }
1541*2fd737d3SMatthias Ringwald 
__UASX(uint32_t op1,uint32_t op2)1542*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1543*2fd737d3SMatthias Ringwald {
1544*2fd737d3SMatthias Ringwald   uint32_t result;
1545*2fd737d3SMatthias Ringwald 
1546*2fd737d3SMatthias Ringwald   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547*2fd737d3SMatthias Ringwald   return(result);
1548*2fd737d3SMatthias Ringwald }
1549*2fd737d3SMatthias Ringwald 
__UQASX(uint32_t op1,uint32_t op2)1550*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1551*2fd737d3SMatthias Ringwald {
1552*2fd737d3SMatthias Ringwald   uint32_t result;
1553*2fd737d3SMatthias Ringwald 
1554*2fd737d3SMatthias Ringwald   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555*2fd737d3SMatthias Ringwald   return(result);
1556*2fd737d3SMatthias Ringwald }
1557*2fd737d3SMatthias Ringwald 
__UHASX(uint32_t op1,uint32_t op2)1558*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1559*2fd737d3SMatthias Ringwald {
1560*2fd737d3SMatthias Ringwald   uint32_t result;
1561*2fd737d3SMatthias Ringwald 
1562*2fd737d3SMatthias Ringwald   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563*2fd737d3SMatthias Ringwald   return(result);
1564*2fd737d3SMatthias Ringwald }
1565*2fd737d3SMatthias Ringwald 
__SSAX(uint32_t op1,uint32_t op2)1566*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1567*2fd737d3SMatthias Ringwald {
1568*2fd737d3SMatthias Ringwald   uint32_t result;
1569*2fd737d3SMatthias Ringwald 
1570*2fd737d3SMatthias Ringwald   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571*2fd737d3SMatthias Ringwald   return(result);
1572*2fd737d3SMatthias Ringwald }
1573*2fd737d3SMatthias Ringwald 
__QSAX(uint32_t op1,uint32_t op2)1574*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1575*2fd737d3SMatthias Ringwald {
1576*2fd737d3SMatthias Ringwald   uint32_t result;
1577*2fd737d3SMatthias Ringwald 
1578*2fd737d3SMatthias Ringwald   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579*2fd737d3SMatthias Ringwald   return(result);
1580*2fd737d3SMatthias Ringwald }
1581*2fd737d3SMatthias Ringwald 
__SHSAX(uint32_t op1,uint32_t op2)1582*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1583*2fd737d3SMatthias Ringwald {
1584*2fd737d3SMatthias Ringwald   uint32_t result;
1585*2fd737d3SMatthias Ringwald 
1586*2fd737d3SMatthias Ringwald   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587*2fd737d3SMatthias Ringwald   return(result);
1588*2fd737d3SMatthias Ringwald }
1589*2fd737d3SMatthias Ringwald 
__USAX(uint32_t op1,uint32_t op2)1590*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1591*2fd737d3SMatthias Ringwald {
1592*2fd737d3SMatthias Ringwald   uint32_t result;
1593*2fd737d3SMatthias Ringwald 
1594*2fd737d3SMatthias Ringwald   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595*2fd737d3SMatthias Ringwald   return(result);
1596*2fd737d3SMatthias Ringwald }
1597*2fd737d3SMatthias Ringwald 
__UQSAX(uint32_t op1,uint32_t op2)1598*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1599*2fd737d3SMatthias Ringwald {
1600*2fd737d3SMatthias Ringwald   uint32_t result;
1601*2fd737d3SMatthias Ringwald 
1602*2fd737d3SMatthias Ringwald   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603*2fd737d3SMatthias Ringwald   return(result);
1604*2fd737d3SMatthias Ringwald }
1605*2fd737d3SMatthias Ringwald 
__UHSAX(uint32_t op1,uint32_t op2)1606*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1607*2fd737d3SMatthias Ringwald {
1608*2fd737d3SMatthias Ringwald   uint32_t result;
1609*2fd737d3SMatthias Ringwald 
1610*2fd737d3SMatthias Ringwald   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611*2fd737d3SMatthias Ringwald   return(result);
1612*2fd737d3SMatthias Ringwald }
1613*2fd737d3SMatthias Ringwald 
__USAD8(uint32_t op1,uint32_t op2)1614*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1615*2fd737d3SMatthias Ringwald {
1616*2fd737d3SMatthias Ringwald   uint32_t result;
1617*2fd737d3SMatthias Ringwald 
1618*2fd737d3SMatthias Ringwald   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619*2fd737d3SMatthias Ringwald   return(result);
1620*2fd737d3SMatthias Ringwald }
1621*2fd737d3SMatthias Ringwald 
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1622*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1623*2fd737d3SMatthias Ringwald {
1624*2fd737d3SMatthias Ringwald   uint32_t result;
1625*2fd737d3SMatthias Ringwald 
1626*2fd737d3SMatthias Ringwald   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1627*2fd737d3SMatthias Ringwald   return(result);
1628*2fd737d3SMatthias Ringwald }
1629*2fd737d3SMatthias Ringwald 
1630*2fd737d3SMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1631*2fd737d3SMatthias Ringwald ({                          \
1632*2fd737d3SMatthias Ringwald   int32_t __RES, __ARG1 = (ARG1); \
1633*2fd737d3SMatthias Ringwald   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1634*2fd737d3SMatthias Ringwald   __RES; \
1635*2fd737d3SMatthias Ringwald  })
1636*2fd737d3SMatthias Ringwald 
1637*2fd737d3SMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1638*2fd737d3SMatthias Ringwald ({                          \
1639*2fd737d3SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1640*2fd737d3SMatthias Ringwald   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1641*2fd737d3SMatthias Ringwald   __RES; \
1642*2fd737d3SMatthias Ringwald  })
1643*2fd737d3SMatthias Ringwald 
__UXTB16(uint32_t op1)1644*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1645*2fd737d3SMatthias Ringwald {
1646*2fd737d3SMatthias Ringwald   uint32_t result;
1647*2fd737d3SMatthias Ringwald 
1648*2fd737d3SMatthias Ringwald   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1649*2fd737d3SMatthias Ringwald   return(result);
1650*2fd737d3SMatthias Ringwald }
1651*2fd737d3SMatthias Ringwald 
__UXTAB16(uint32_t op1,uint32_t op2)1652*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1653*2fd737d3SMatthias Ringwald {
1654*2fd737d3SMatthias Ringwald   uint32_t result;
1655*2fd737d3SMatthias Ringwald 
1656*2fd737d3SMatthias Ringwald   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657*2fd737d3SMatthias Ringwald   return(result);
1658*2fd737d3SMatthias Ringwald }
1659*2fd737d3SMatthias Ringwald 
__SXTB16(uint32_t op1)1660*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1661*2fd737d3SMatthias Ringwald {
1662*2fd737d3SMatthias Ringwald   uint32_t result;
1663*2fd737d3SMatthias Ringwald 
1664*2fd737d3SMatthias Ringwald   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1665*2fd737d3SMatthias Ringwald   return(result);
1666*2fd737d3SMatthias Ringwald }
1667*2fd737d3SMatthias Ringwald 
__SXTAB16(uint32_t op1,uint32_t op2)1668*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1669*2fd737d3SMatthias Ringwald {
1670*2fd737d3SMatthias Ringwald   uint32_t result;
1671*2fd737d3SMatthias Ringwald 
1672*2fd737d3SMatthias Ringwald   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673*2fd737d3SMatthias Ringwald   return(result);
1674*2fd737d3SMatthias Ringwald }
1675*2fd737d3SMatthias Ringwald 
__SMUAD(uint32_t op1,uint32_t op2)1676*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1677*2fd737d3SMatthias Ringwald {
1678*2fd737d3SMatthias Ringwald   uint32_t result;
1679*2fd737d3SMatthias Ringwald 
1680*2fd737d3SMatthias Ringwald   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681*2fd737d3SMatthias Ringwald   return(result);
1682*2fd737d3SMatthias Ringwald }
1683*2fd737d3SMatthias Ringwald 
__SMUADX(uint32_t op1,uint32_t op2)1684*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1685*2fd737d3SMatthias Ringwald {
1686*2fd737d3SMatthias Ringwald   uint32_t result;
1687*2fd737d3SMatthias Ringwald 
1688*2fd737d3SMatthias Ringwald   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689*2fd737d3SMatthias Ringwald   return(result);
1690*2fd737d3SMatthias Ringwald }
1691*2fd737d3SMatthias Ringwald 
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1692*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1693*2fd737d3SMatthias Ringwald {
1694*2fd737d3SMatthias Ringwald   uint32_t result;
1695*2fd737d3SMatthias Ringwald 
1696*2fd737d3SMatthias Ringwald   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1697*2fd737d3SMatthias Ringwald   return(result);
1698*2fd737d3SMatthias Ringwald }
1699*2fd737d3SMatthias Ringwald 
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1700*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1701*2fd737d3SMatthias Ringwald {
1702*2fd737d3SMatthias Ringwald   uint32_t result;
1703*2fd737d3SMatthias Ringwald 
1704*2fd737d3SMatthias Ringwald   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1705*2fd737d3SMatthias Ringwald   return(result);
1706*2fd737d3SMatthias Ringwald }
1707*2fd737d3SMatthias Ringwald 
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1708*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1709*2fd737d3SMatthias Ringwald {
1710*2fd737d3SMatthias Ringwald   union llreg_u{
1711*2fd737d3SMatthias Ringwald     uint32_t w32[2];
1712*2fd737d3SMatthias Ringwald     uint64_t w64;
1713*2fd737d3SMatthias Ringwald   } llr;
1714*2fd737d3SMatthias Ringwald   llr.w64 = acc;
1715*2fd737d3SMatthias Ringwald 
1716*2fd737d3SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1717*2fd737d3SMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1718*2fd737d3SMatthias Ringwald #else               /* Big endian */
1719*2fd737d3SMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1720*2fd737d3SMatthias Ringwald #endif
1721*2fd737d3SMatthias Ringwald 
1722*2fd737d3SMatthias Ringwald   return(llr.w64);
1723*2fd737d3SMatthias Ringwald }
1724*2fd737d3SMatthias Ringwald 
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1725*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1726*2fd737d3SMatthias Ringwald {
1727*2fd737d3SMatthias Ringwald   union llreg_u{
1728*2fd737d3SMatthias Ringwald     uint32_t w32[2];
1729*2fd737d3SMatthias Ringwald     uint64_t w64;
1730*2fd737d3SMatthias Ringwald   } llr;
1731*2fd737d3SMatthias Ringwald   llr.w64 = acc;
1732*2fd737d3SMatthias Ringwald 
1733*2fd737d3SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1734*2fd737d3SMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1735*2fd737d3SMatthias Ringwald #else               /* Big endian */
1736*2fd737d3SMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1737*2fd737d3SMatthias Ringwald #endif
1738*2fd737d3SMatthias Ringwald 
1739*2fd737d3SMatthias Ringwald   return(llr.w64);
1740*2fd737d3SMatthias Ringwald }
1741*2fd737d3SMatthias Ringwald 
__SMUSD(uint32_t op1,uint32_t op2)1742*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1743*2fd737d3SMatthias Ringwald {
1744*2fd737d3SMatthias Ringwald   uint32_t result;
1745*2fd737d3SMatthias Ringwald 
1746*2fd737d3SMatthias Ringwald   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1747*2fd737d3SMatthias Ringwald   return(result);
1748*2fd737d3SMatthias Ringwald }
1749*2fd737d3SMatthias Ringwald 
__SMUSDX(uint32_t op1,uint32_t op2)1750*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1751*2fd737d3SMatthias Ringwald {
1752*2fd737d3SMatthias Ringwald   uint32_t result;
1753*2fd737d3SMatthias Ringwald 
1754*2fd737d3SMatthias Ringwald   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1755*2fd737d3SMatthias Ringwald   return(result);
1756*2fd737d3SMatthias Ringwald }
1757*2fd737d3SMatthias Ringwald 
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1758*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1759*2fd737d3SMatthias Ringwald {
1760*2fd737d3SMatthias Ringwald   uint32_t result;
1761*2fd737d3SMatthias Ringwald 
1762*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1763*2fd737d3SMatthias Ringwald   return(result);
1764*2fd737d3SMatthias Ringwald }
1765*2fd737d3SMatthias Ringwald 
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1766*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1767*2fd737d3SMatthias Ringwald {
1768*2fd737d3SMatthias Ringwald   uint32_t result;
1769*2fd737d3SMatthias Ringwald 
1770*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1771*2fd737d3SMatthias Ringwald   return(result);
1772*2fd737d3SMatthias Ringwald }
1773*2fd737d3SMatthias Ringwald 
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1774*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1775*2fd737d3SMatthias Ringwald {
1776*2fd737d3SMatthias Ringwald   union llreg_u{
1777*2fd737d3SMatthias Ringwald     uint32_t w32[2];
1778*2fd737d3SMatthias Ringwald     uint64_t w64;
1779*2fd737d3SMatthias Ringwald   } llr;
1780*2fd737d3SMatthias Ringwald   llr.w64 = acc;
1781*2fd737d3SMatthias Ringwald 
1782*2fd737d3SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1783*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1784*2fd737d3SMatthias Ringwald #else               /* Big endian */
1785*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1786*2fd737d3SMatthias Ringwald #endif
1787*2fd737d3SMatthias Ringwald 
1788*2fd737d3SMatthias Ringwald   return(llr.w64);
1789*2fd737d3SMatthias Ringwald }
1790*2fd737d3SMatthias Ringwald 
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)1791*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1792*2fd737d3SMatthias Ringwald {
1793*2fd737d3SMatthias Ringwald   union llreg_u{
1794*2fd737d3SMatthias Ringwald     uint32_t w32[2];
1795*2fd737d3SMatthias Ringwald     uint64_t w64;
1796*2fd737d3SMatthias Ringwald   } llr;
1797*2fd737d3SMatthias Ringwald   llr.w64 = acc;
1798*2fd737d3SMatthias Ringwald 
1799*2fd737d3SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1800*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1801*2fd737d3SMatthias Ringwald #else               /* Big endian */
1802*2fd737d3SMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1803*2fd737d3SMatthias Ringwald #endif
1804*2fd737d3SMatthias Ringwald 
1805*2fd737d3SMatthias Ringwald   return(llr.w64);
1806*2fd737d3SMatthias Ringwald }
1807*2fd737d3SMatthias Ringwald 
__SEL(uint32_t op1,uint32_t op2)1808*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
1809*2fd737d3SMatthias Ringwald {
1810*2fd737d3SMatthias Ringwald   uint32_t result;
1811*2fd737d3SMatthias Ringwald 
1812*2fd737d3SMatthias Ringwald   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1813*2fd737d3SMatthias Ringwald   return(result);
1814*2fd737d3SMatthias Ringwald }
1815*2fd737d3SMatthias Ringwald 
__QADD(int32_t op1,int32_t op2)1816*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
1817*2fd737d3SMatthias Ringwald {
1818*2fd737d3SMatthias Ringwald   int32_t result;
1819*2fd737d3SMatthias Ringwald 
1820*2fd737d3SMatthias Ringwald   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1821*2fd737d3SMatthias Ringwald   return(result);
1822*2fd737d3SMatthias Ringwald }
1823*2fd737d3SMatthias Ringwald 
__QSUB(int32_t op1,int32_t op2)1824*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
1825*2fd737d3SMatthias Ringwald {
1826*2fd737d3SMatthias Ringwald   int32_t result;
1827*2fd737d3SMatthias Ringwald 
1828*2fd737d3SMatthias Ringwald   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1829*2fd737d3SMatthias Ringwald   return(result);
1830*2fd737d3SMatthias Ringwald }
1831*2fd737d3SMatthias Ringwald 
1832*2fd737d3SMatthias Ringwald #if 0
1833*2fd737d3SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
1834*2fd737d3SMatthias Ringwald ({                          \
1835*2fd737d3SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1836*2fd737d3SMatthias Ringwald   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1837*2fd737d3SMatthias Ringwald   __RES; \
1838*2fd737d3SMatthias Ringwald  })
1839*2fd737d3SMatthias Ringwald 
1840*2fd737d3SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
1841*2fd737d3SMatthias Ringwald ({                          \
1842*2fd737d3SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1843*2fd737d3SMatthias Ringwald   if (ARG3 == 0) \
1844*2fd737d3SMatthias Ringwald     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
1845*2fd737d3SMatthias Ringwald   else \
1846*2fd737d3SMatthias Ringwald     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1847*2fd737d3SMatthias Ringwald   __RES; \
1848*2fd737d3SMatthias Ringwald  })
1849*2fd737d3SMatthias Ringwald #endif
1850*2fd737d3SMatthias Ringwald 
1851*2fd737d3SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
1852*2fd737d3SMatthias Ringwald                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
1853*2fd737d3SMatthias Ringwald 
1854*2fd737d3SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
1855*2fd737d3SMatthias Ringwald                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
1856*2fd737d3SMatthias Ringwald 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1857*2fd737d3SMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1858*2fd737d3SMatthias Ringwald {
1859*2fd737d3SMatthias Ringwald   int32_t result;
1860*2fd737d3SMatthias Ringwald 
1861*2fd737d3SMatthias Ringwald   __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1862*2fd737d3SMatthias Ringwald   return(result);
1863*2fd737d3SMatthias Ringwald }
1864*2fd737d3SMatthias Ringwald 
1865*2fd737d3SMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
1866*2fd737d3SMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
1867*2fd737d3SMatthias Ringwald 
1868*2fd737d3SMatthias Ringwald 
1869*2fd737d3SMatthias Ringwald #endif /* __CMSIS_ARMCLANG_H */
1870