xref: /btstack/port/stm32-f4discovery-usb/Drivers/CMSIS/Include/cmsis_gcc.h (revision a8f7f3fcbcd51f8d2e92aca076b6a9f812db358c)
1*a8f7f3fcSMatthias Ringwald /**************************************************************************//**
2*a8f7f3fcSMatthias Ringwald  * @file     cmsis_gcc.h
3*a8f7f3fcSMatthias Ringwald  * @brief    CMSIS compiler GCC header file
4*a8f7f3fcSMatthias Ringwald  * @version  V5.0.4
5*a8f7f3fcSMatthias Ringwald  * @date     09. April 2018
6*a8f7f3fcSMatthias Ringwald  ******************************************************************************/
7*a8f7f3fcSMatthias Ringwald /*
8*a8f7f3fcSMatthias Ringwald  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9*a8f7f3fcSMatthias Ringwald  *
10*a8f7f3fcSMatthias Ringwald  * SPDX-License-Identifier: Apache-2.0
11*a8f7f3fcSMatthias Ringwald  *
12*a8f7f3fcSMatthias Ringwald  * Licensed under the Apache License, Version 2.0 (the License); you may
13*a8f7f3fcSMatthias Ringwald  * not use this file except in compliance with the License.
14*a8f7f3fcSMatthias Ringwald  * You may obtain a copy of the License at
15*a8f7f3fcSMatthias Ringwald  *
16*a8f7f3fcSMatthias Ringwald  * www.apache.org/licenses/LICENSE-2.0
17*a8f7f3fcSMatthias Ringwald  *
18*a8f7f3fcSMatthias Ringwald  * Unless required by applicable law or agreed to in writing, software
19*a8f7f3fcSMatthias Ringwald  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*a8f7f3fcSMatthias Ringwald  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*a8f7f3fcSMatthias Ringwald  * See the License for the specific language governing permissions and
22*a8f7f3fcSMatthias Ringwald  * limitations under the License.
23*a8f7f3fcSMatthias Ringwald  */
24*a8f7f3fcSMatthias Ringwald 
25*a8f7f3fcSMatthias Ringwald #ifndef __CMSIS_GCC_H
26*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_H
27*a8f7f3fcSMatthias Ringwald 
28*a8f7f3fcSMatthias Ringwald /* ignore some GCC warnings */
29*a8f7f3fcSMatthias Ringwald #pragma GCC diagnostic push
30*a8f7f3fcSMatthias Ringwald #pragma GCC diagnostic ignored "-Wsign-conversion"
31*a8f7f3fcSMatthias Ringwald #pragma GCC diagnostic ignored "-Wconversion"
32*a8f7f3fcSMatthias Ringwald #pragma GCC diagnostic ignored "-Wunused-parameter"
33*a8f7f3fcSMatthias Ringwald 
34*a8f7f3fcSMatthias Ringwald /* Fallback for __has_builtin */
35*a8f7f3fcSMatthias Ringwald #ifndef __has_builtin
36*a8f7f3fcSMatthias Ringwald   #define __has_builtin(x) (0)
37*a8f7f3fcSMatthias Ringwald #endif
38*a8f7f3fcSMatthias Ringwald 
39*a8f7f3fcSMatthias Ringwald /* CMSIS compiler specific defines */
40*a8f7f3fcSMatthias Ringwald #ifndef   __ASM
41*a8f7f3fcSMatthias Ringwald   #define __ASM                                  __asm
42*a8f7f3fcSMatthias Ringwald #endif
43*a8f7f3fcSMatthias Ringwald #ifndef   __INLINE
44*a8f7f3fcSMatthias Ringwald   #define __INLINE                               inline
45*a8f7f3fcSMatthias Ringwald #endif
46*a8f7f3fcSMatthias Ringwald #ifndef   __STATIC_INLINE
47*a8f7f3fcSMatthias Ringwald   #define __STATIC_INLINE                        static inline
48*a8f7f3fcSMatthias Ringwald #endif
49*a8f7f3fcSMatthias Ringwald #ifndef   __STATIC_FORCEINLINE
50*a8f7f3fcSMatthias Ringwald   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
51*a8f7f3fcSMatthias Ringwald #endif
52*a8f7f3fcSMatthias Ringwald #ifndef   __NO_RETURN
53*a8f7f3fcSMatthias Ringwald   #define __NO_RETURN                            __attribute__((__noreturn__))
54*a8f7f3fcSMatthias Ringwald #endif
55*a8f7f3fcSMatthias Ringwald #ifndef   __USED
56*a8f7f3fcSMatthias Ringwald   #define __USED                                 __attribute__((used))
57*a8f7f3fcSMatthias Ringwald #endif
58*a8f7f3fcSMatthias Ringwald #ifndef   __WEAK
59*a8f7f3fcSMatthias Ringwald   #define __WEAK                                 __attribute__((weak))
60*a8f7f3fcSMatthias Ringwald #endif
61*a8f7f3fcSMatthias Ringwald #ifndef   __PACKED
62*a8f7f3fcSMatthias Ringwald   #define __PACKED                               __attribute__((packed, aligned(1)))
63*a8f7f3fcSMatthias Ringwald #endif
64*a8f7f3fcSMatthias Ringwald #ifndef   __PACKED_STRUCT
65*a8f7f3fcSMatthias Ringwald   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
66*a8f7f3fcSMatthias Ringwald #endif
67*a8f7f3fcSMatthias Ringwald #ifndef   __PACKED_UNION
68*a8f7f3fcSMatthias Ringwald   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
69*a8f7f3fcSMatthias Ringwald #endif
70*a8f7f3fcSMatthias Ringwald #ifndef   __UNALIGNED_UINT32        /* deprecated */
71*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic push
72*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wpacked"
73*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wattributes"
74*a8f7f3fcSMatthias Ringwald   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
75*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic pop
76*a8f7f3fcSMatthias Ringwald   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
77*a8f7f3fcSMatthias Ringwald #endif
78*a8f7f3fcSMatthias Ringwald #ifndef   __UNALIGNED_UINT16_WRITE
79*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic push
80*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wpacked"
81*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wattributes"
82*a8f7f3fcSMatthias Ringwald   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
83*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic pop
84*a8f7f3fcSMatthias Ringwald   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
85*a8f7f3fcSMatthias Ringwald #endif
86*a8f7f3fcSMatthias Ringwald #ifndef   __UNALIGNED_UINT16_READ
87*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic push
88*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wpacked"
89*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wattributes"
90*a8f7f3fcSMatthias Ringwald   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
91*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic pop
92*a8f7f3fcSMatthias Ringwald   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
93*a8f7f3fcSMatthias Ringwald #endif
94*a8f7f3fcSMatthias Ringwald #ifndef   __UNALIGNED_UINT32_WRITE
95*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic push
96*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wpacked"
97*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wattributes"
98*a8f7f3fcSMatthias Ringwald   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
99*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic pop
100*a8f7f3fcSMatthias Ringwald   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
101*a8f7f3fcSMatthias Ringwald #endif
102*a8f7f3fcSMatthias Ringwald #ifndef   __UNALIGNED_UINT32_READ
103*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic push
104*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wpacked"
105*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic ignored "-Wattributes"
106*a8f7f3fcSMatthias Ringwald   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
107*a8f7f3fcSMatthias Ringwald   #pragma GCC diagnostic pop
108*a8f7f3fcSMatthias Ringwald   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
109*a8f7f3fcSMatthias Ringwald #endif
110*a8f7f3fcSMatthias Ringwald #ifndef   __ALIGNED
111*a8f7f3fcSMatthias Ringwald   #define __ALIGNED(x)                           __attribute__((aligned(x)))
112*a8f7f3fcSMatthias Ringwald #endif
113*a8f7f3fcSMatthias Ringwald #ifndef   __RESTRICT
114*a8f7f3fcSMatthias Ringwald   #define __RESTRICT                             __restrict
115*a8f7f3fcSMatthias Ringwald #endif
116*a8f7f3fcSMatthias Ringwald 
117*a8f7f3fcSMatthias Ringwald 
118*a8f7f3fcSMatthias Ringwald /* ###########################  Core Function Access  ########################### */
119*a8f7f3fcSMatthias Ringwald /** \ingroup  CMSIS_Core_FunctionInterface
120*a8f7f3fcSMatthias Ringwald     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
121*a8f7f3fcSMatthias Ringwald   @{
122*a8f7f3fcSMatthias Ringwald  */
123*a8f7f3fcSMatthias Ringwald 
124*a8f7f3fcSMatthias Ringwald /**
125*a8f7f3fcSMatthias Ringwald   \brief   Enable IRQ Interrupts
126*a8f7f3fcSMatthias Ringwald   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
127*a8f7f3fcSMatthias Ringwald            Can only be executed in Privileged modes.
128*a8f7f3fcSMatthias Ringwald  */
__enable_irq(void)129*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __enable_irq(void)
130*a8f7f3fcSMatthias Ringwald {
131*a8f7f3fcSMatthias Ringwald   __ASM volatile ("cpsie i" : : : "memory");
132*a8f7f3fcSMatthias Ringwald }
133*a8f7f3fcSMatthias Ringwald 
134*a8f7f3fcSMatthias Ringwald 
135*a8f7f3fcSMatthias Ringwald /**
136*a8f7f3fcSMatthias Ringwald   \brief   Disable IRQ Interrupts
137*a8f7f3fcSMatthias Ringwald   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
138*a8f7f3fcSMatthias Ringwald            Can only be executed in Privileged modes.
139*a8f7f3fcSMatthias Ringwald  */
__disable_irq(void)140*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __disable_irq(void)
141*a8f7f3fcSMatthias Ringwald {
142*a8f7f3fcSMatthias Ringwald   __ASM volatile ("cpsid i" : : : "memory");
143*a8f7f3fcSMatthias Ringwald }
144*a8f7f3fcSMatthias Ringwald 
145*a8f7f3fcSMatthias Ringwald 
146*a8f7f3fcSMatthias Ringwald /**
147*a8f7f3fcSMatthias Ringwald   \brief   Get Control Register
148*a8f7f3fcSMatthias Ringwald   \details Returns the content of the Control Register.
149*a8f7f3fcSMatthias Ringwald   \return               Control Register value
150*a8f7f3fcSMatthias Ringwald  */
__get_CONTROL(void)151*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
152*a8f7f3fcSMatthias Ringwald {
153*a8f7f3fcSMatthias Ringwald   uint32_t result;
154*a8f7f3fcSMatthias Ringwald 
155*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, control" : "=r" (result) );
156*a8f7f3fcSMatthias Ringwald   return(result);
157*a8f7f3fcSMatthias Ringwald }
158*a8f7f3fcSMatthias Ringwald 
159*a8f7f3fcSMatthias Ringwald 
160*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
161*a8f7f3fcSMatthias Ringwald /**
162*a8f7f3fcSMatthias Ringwald   \brief   Get Control Register (non-secure)
163*a8f7f3fcSMatthias Ringwald   \details Returns the content of the non-secure Control Register when in secure mode.
164*a8f7f3fcSMatthias Ringwald   \return               non-secure Control Register value
165*a8f7f3fcSMatthias Ringwald  */
__TZ_get_CONTROL_NS(void)166*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
167*a8f7f3fcSMatthias Ringwald {
168*a8f7f3fcSMatthias Ringwald   uint32_t result;
169*a8f7f3fcSMatthias Ringwald 
170*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
171*a8f7f3fcSMatthias Ringwald   return(result);
172*a8f7f3fcSMatthias Ringwald }
173*a8f7f3fcSMatthias Ringwald #endif
174*a8f7f3fcSMatthias Ringwald 
175*a8f7f3fcSMatthias Ringwald 
176*a8f7f3fcSMatthias Ringwald /**
177*a8f7f3fcSMatthias Ringwald   \brief   Set Control Register
178*a8f7f3fcSMatthias Ringwald   \details Writes the given value to the Control Register.
179*a8f7f3fcSMatthias Ringwald   \param [in]    control  Control Register value to set
180*a8f7f3fcSMatthias Ringwald  */
__set_CONTROL(uint32_t control)181*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
182*a8f7f3fcSMatthias Ringwald {
183*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
184*a8f7f3fcSMatthias Ringwald }
185*a8f7f3fcSMatthias Ringwald 
186*a8f7f3fcSMatthias Ringwald 
187*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
188*a8f7f3fcSMatthias Ringwald /**
189*a8f7f3fcSMatthias Ringwald   \brief   Set Control Register (non-secure)
190*a8f7f3fcSMatthias Ringwald   \details Writes the given value to the non-secure Control Register when in secure state.
191*a8f7f3fcSMatthias Ringwald   \param [in]    control  Control Register value to set
192*a8f7f3fcSMatthias Ringwald  */
__TZ_set_CONTROL_NS(uint32_t control)193*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
194*a8f7f3fcSMatthias Ringwald {
195*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
196*a8f7f3fcSMatthias Ringwald }
197*a8f7f3fcSMatthias Ringwald #endif
198*a8f7f3fcSMatthias Ringwald 
199*a8f7f3fcSMatthias Ringwald 
200*a8f7f3fcSMatthias Ringwald /**
201*a8f7f3fcSMatthias Ringwald   \brief   Get IPSR Register
202*a8f7f3fcSMatthias Ringwald   \details Returns the content of the IPSR Register.
203*a8f7f3fcSMatthias Ringwald   \return               IPSR Register value
204*a8f7f3fcSMatthias Ringwald  */
__get_IPSR(void)205*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
206*a8f7f3fcSMatthias Ringwald {
207*a8f7f3fcSMatthias Ringwald   uint32_t result;
208*a8f7f3fcSMatthias Ringwald 
209*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
210*a8f7f3fcSMatthias Ringwald   return(result);
211*a8f7f3fcSMatthias Ringwald }
212*a8f7f3fcSMatthias Ringwald 
213*a8f7f3fcSMatthias Ringwald 
214*a8f7f3fcSMatthias Ringwald /**
215*a8f7f3fcSMatthias Ringwald   \brief   Get APSR Register
216*a8f7f3fcSMatthias Ringwald   \details Returns the content of the APSR Register.
217*a8f7f3fcSMatthias Ringwald   \return               APSR Register value
218*a8f7f3fcSMatthias Ringwald  */
__get_APSR(void)219*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
220*a8f7f3fcSMatthias Ringwald {
221*a8f7f3fcSMatthias Ringwald   uint32_t result;
222*a8f7f3fcSMatthias Ringwald 
223*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
224*a8f7f3fcSMatthias Ringwald   return(result);
225*a8f7f3fcSMatthias Ringwald }
226*a8f7f3fcSMatthias Ringwald 
227*a8f7f3fcSMatthias Ringwald 
228*a8f7f3fcSMatthias Ringwald /**
229*a8f7f3fcSMatthias Ringwald   \brief   Get xPSR Register
230*a8f7f3fcSMatthias Ringwald   \details Returns the content of the xPSR Register.
231*a8f7f3fcSMatthias Ringwald   \return               xPSR Register value
232*a8f7f3fcSMatthias Ringwald  */
__get_xPSR(void)233*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
234*a8f7f3fcSMatthias Ringwald {
235*a8f7f3fcSMatthias Ringwald   uint32_t result;
236*a8f7f3fcSMatthias Ringwald 
237*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
238*a8f7f3fcSMatthias Ringwald   return(result);
239*a8f7f3fcSMatthias Ringwald }
240*a8f7f3fcSMatthias Ringwald 
241*a8f7f3fcSMatthias Ringwald 
242*a8f7f3fcSMatthias Ringwald /**
243*a8f7f3fcSMatthias Ringwald   \brief   Get Process Stack Pointer
244*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Process Stack Pointer (PSP).
245*a8f7f3fcSMatthias Ringwald   \return               PSP Register value
246*a8f7f3fcSMatthias Ringwald  */
__get_PSP(void)247*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
248*a8f7f3fcSMatthias Ringwald {
249*a8f7f3fcSMatthias Ringwald   uint32_t result;
250*a8f7f3fcSMatthias Ringwald 
251*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
252*a8f7f3fcSMatthias Ringwald   return(result);
253*a8f7f3fcSMatthias Ringwald }
254*a8f7f3fcSMatthias Ringwald 
255*a8f7f3fcSMatthias Ringwald 
256*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
257*a8f7f3fcSMatthias Ringwald /**
258*a8f7f3fcSMatthias Ringwald   \brief   Get Process Stack Pointer (non-secure)
259*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
260*a8f7f3fcSMatthias Ringwald   \return               PSP Register value
261*a8f7f3fcSMatthias Ringwald  */
__TZ_get_PSP_NS(void)262*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
263*a8f7f3fcSMatthias Ringwald {
264*a8f7f3fcSMatthias Ringwald   uint32_t result;
265*a8f7f3fcSMatthias Ringwald 
266*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
267*a8f7f3fcSMatthias Ringwald   return(result);
268*a8f7f3fcSMatthias Ringwald }
269*a8f7f3fcSMatthias Ringwald #endif
270*a8f7f3fcSMatthias Ringwald 
271*a8f7f3fcSMatthias Ringwald 
272*a8f7f3fcSMatthias Ringwald /**
273*a8f7f3fcSMatthias Ringwald   \brief   Set Process Stack Pointer
274*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer (PSP).
275*a8f7f3fcSMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
276*a8f7f3fcSMatthias Ringwald  */
__set_PSP(uint32_t topOfProcStack)277*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
278*a8f7f3fcSMatthias Ringwald {
279*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
280*a8f7f3fcSMatthias Ringwald }
281*a8f7f3fcSMatthias Ringwald 
282*a8f7f3fcSMatthias Ringwald 
283*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
284*a8f7f3fcSMatthias Ringwald /**
285*a8f7f3fcSMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
286*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
287*a8f7f3fcSMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
288*a8f7f3fcSMatthias Ringwald  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)289*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
290*a8f7f3fcSMatthias Ringwald {
291*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
292*a8f7f3fcSMatthias Ringwald }
293*a8f7f3fcSMatthias Ringwald #endif
294*a8f7f3fcSMatthias Ringwald 
295*a8f7f3fcSMatthias Ringwald 
296*a8f7f3fcSMatthias Ringwald /**
297*a8f7f3fcSMatthias Ringwald   \brief   Get Main Stack Pointer
298*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Main Stack Pointer (MSP).
299*a8f7f3fcSMatthias Ringwald   \return               MSP Register value
300*a8f7f3fcSMatthias Ringwald  */
__get_MSP(void)301*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
302*a8f7f3fcSMatthias Ringwald {
303*a8f7f3fcSMatthias Ringwald   uint32_t result;
304*a8f7f3fcSMatthias Ringwald 
305*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, msp" : "=r" (result) );
306*a8f7f3fcSMatthias Ringwald   return(result);
307*a8f7f3fcSMatthias Ringwald }
308*a8f7f3fcSMatthias Ringwald 
309*a8f7f3fcSMatthias Ringwald 
310*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
311*a8f7f3fcSMatthias Ringwald /**
312*a8f7f3fcSMatthias Ringwald   \brief   Get Main Stack Pointer (non-secure)
313*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
314*a8f7f3fcSMatthias Ringwald   \return               MSP Register value
315*a8f7f3fcSMatthias Ringwald  */
__TZ_get_MSP_NS(void)316*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
317*a8f7f3fcSMatthias Ringwald {
318*a8f7f3fcSMatthias Ringwald   uint32_t result;
319*a8f7f3fcSMatthias Ringwald 
320*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
321*a8f7f3fcSMatthias Ringwald   return(result);
322*a8f7f3fcSMatthias Ringwald }
323*a8f7f3fcSMatthias Ringwald #endif
324*a8f7f3fcSMatthias Ringwald 
325*a8f7f3fcSMatthias Ringwald 
326*a8f7f3fcSMatthias Ringwald /**
327*a8f7f3fcSMatthias Ringwald   \brief   Set Main Stack Pointer
328*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer (MSP).
329*a8f7f3fcSMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
330*a8f7f3fcSMatthias Ringwald  */
__set_MSP(uint32_t topOfMainStack)331*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
332*a8f7f3fcSMatthias Ringwald {
333*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
334*a8f7f3fcSMatthias Ringwald }
335*a8f7f3fcSMatthias Ringwald 
336*a8f7f3fcSMatthias Ringwald 
337*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
338*a8f7f3fcSMatthias Ringwald /**
339*a8f7f3fcSMatthias Ringwald   \brief   Set Main Stack Pointer (non-secure)
340*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
341*a8f7f3fcSMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
342*a8f7f3fcSMatthias Ringwald  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)343*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
344*a8f7f3fcSMatthias Ringwald {
345*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
346*a8f7f3fcSMatthias Ringwald }
347*a8f7f3fcSMatthias Ringwald #endif
348*a8f7f3fcSMatthias Ringwald 
349*a8f7f3fcSMatthias Ringwald 
350*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
351*a8f7f3fcSMatthias Ringwald /**
352*a8f7f3fcSMatthias Ringwald   \brief   Get Stack Pointer (non-secure)
353*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
354*a8f7f3fcSMatthias Ringwald   \return               SP Register value
355*a8f7f3fcSMatthias Ringwald  */
__TZ_get_SP_NS(void)356*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
357*a8f7f3fcSMatthias Ringwald {
358*a8f7f3fcSMatthias Ringwald   uint32_t result;
359*a8f7f3fcSMatthias Ringwald 
360*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
361*a8f7f3fcSMatthias Ringwald   return(result);
362*a8f7f3fcSMatthias Ringwald }
363*a8f7f3fcSMatthias Ringwald 
364*a8f7f3fcSMatthias Ringwald 
365*a8f7f3fcSMatthias Ringwald /**
366*a8f7f3fcSMatthias Ringwald   \brief   Set Stack Pointer (non-secure)
367*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
368*a8f7f3fcSMatthias Ringwald   \param [in]    topOfStack  Stack Pointer value to set
369*a8f7f3fcSMatthias Ringwald  */
__TZ_set_SP_NS(uint32_t topOfStack)370*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
371*a8f7f3fcSMatthias Ringwald {
372*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
373*a8f7f3fcSMatthias Ringwald }
374*a8f7f3fcSMatthias Ringwald #endif
375*a8f7f3fcSMatthias Ringwald 
376*a8f7f3fcSMatthias Ringwald 
377*a8f7f3fcSMatthias Ringwald /**
378*a8f7f3fcSMatthias Ringwald   \brief   Get Priority Mask
379*a8f7f3fcSMatthias Ringwald   \details Returns the current state of the priority mask bit from the Priority Mask Register.
380*a8f7f3fcSMatthias Ringwald   \return               Priority Mask value
381*a8f7f3fcSMatthias Ringwald  */
__get_PRIMASK(void)382*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
383*a8f7f3fcSMatthias Ringwald {
384*a8f7f3fcSMatthias Ringwald   uint32_t result;
385*a8f7f3fcSMatthias Ringwald 
386*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
387*a8f7f3fcSMatthias Ringwald   return(result);
388*a8f7f3fcSMatthias Ringwald }
389*a8f7f3fcSMatthias Ringwald 
390*a8f7f3fcSMatthias Ringwald 
391*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
392*a8f7f3fcSMatthias Ringwald /**
393*a8f7f3fcSMatthias Ringwald   \brief   Get Priority Mask (non-secure)
394*a8f7f3fcSMatthias Ringwald   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
395*a8f7f3fcSMatthias Ringwald   \return               Priority Mask value
396*a8f7f3fcSMatthias Ringwald  */
__TZ_get_PRIMASK_NS(void)397*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
398*a8f7f3fcSMatthias Ringwald {
399*a8f7f3fcSMatthias Ringwald   uint32_t result;
400*a8f7f3fcSMatthias Ringwald 
401*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
402*a8f7f3fcSMatthias Ringwald   return(result);
403*a8f7f3fcSMatthias Ringwald }
404*a8f7f3fcSMatthias Ringwald #endif
405*a8f7f3fcSMatthias Ringwald 
406*a8f7f3fcSMatthias Ringwald 
407*a8f7f3fcSMatthias Ringwald /**
408*a8f7f3fcSMatthias Ringwald   \brief   Set Priority Mask
409*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Priority Mask Register.
410*a8f7f3fcSMatthias Ringwald   \param [in]    priMask  Priority Mask
411*a8f7f3fcSMatthias Ringwald  */
__set_PRIMASK(uint32_t priMask)412*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
413*a8f7f3fcSMatthias Ringwald {
414*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
415*a8f7f3fcSMatthias Ringwald }
416*a8f7f3fcSMatthias Ringwald 
417*a8f7f3fcSMatthias Ringwald 
418*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
419*a8f7f3fcSMatthias Ringwald /**
420*a8f7f3fcSMatthias Ringwald   \brief   Set Priority Mask (non-secure)
421*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
422*a8f7f3fcSMatthias Ringwald   \param [in]    priMask  Priority Mask
423*a8f7f3fcSMatthias Ringwald  */
__TZ_set_PRIMASK_NS(uint32_t priMask)424*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
425*a8f7f3fcSMatthias Ringwald {
426*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
427*a8f7f3fcSMatthias Ringwald }
428*a8f7f3fcSMatthias Ringwald #endif
429*a8f7f3fcSMatthias Ringwald 
430*a8f7f3fcSMatthias Ringwald 
431*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
432*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
433*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
434*a8f7f3fcSMatthias Ringwald /**
435*a8f7f3fcSMatthias Ringwald   \brief   Enable FIQ
436*a8f7f3fcSMatthias Ringwald   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
437*a8f7f3fcSMatthias Ringwald            Can only be executed in Privileged modes.
438*a8f7f3fcSMatthias Ringwald  */
__enable_fault_irq(void)439*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __enable_fault_irq(void)
440*a8f7f3fcSMatthias Ringwald {
441*a8f7f3fcSMatthias Ringwald   __ASM volatile ("cpsie f" : : : "memory");
442*a8f7f3fcSMatthias Ringwald }
443*a8f7f3fcSMatthias Ringwald 
444*a8f7f3fcSMatthias Ringwald 
445*a8f7f3fcSMatthias Ringwald /**
446*a8f7f3fcSMatthias Ringwald   \brief   Disable FIQ
447*a8f7f3fcSMatthias Ringwald   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
448*a8f7f3fcSMatthias Ringwald            Can only be executed in Privileged modes.
449*a8f7f3fcSMatthias Ringwald  */
__disable_fault_irq(void)450*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __disable_fault_irq(void)
451*a8f7f3fcSMatthias Ringwald {
452*a8f7f3fcSMatthias Ringwald   __ASM volatile ("cpsid f" : : : "memory");
453*a8f7f3fcSMatthias Ringwald }
454*a8f7f3fcSMatthias Ringwald 
455*a8f7f3fcSMatthias Ringwald 
456*a8f7f3fcSMatthias Ringwald /**
457*a8f7f3fcSMatthias Ringwald   \brief   Get Base Priority
458*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Base Priority register.
459*a8f7f3fcSMatthias Ringwald   \return               Base Priority register value
460*a8f7f3fcSMatthias Ringwald  */
__get_BASEPRI(void)461*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
462*a8f7f3fcSMatthias Ringwald {
463*a8f7f3fcSMatthias Ringwald   uint32_t result;
464*a8f7f3fcSMatthias Ringwald 
465*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
466*a8f7f3fcSMatthias Ringwald   return(result);
467*a8f7f3fcSMatthias Ringwald }
468*a8f7f3fcSMatthias Ringwald 
469*a8f7f3fcSMatthias Ringwald 
470*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
471*a8f7f3fcSMatthias Ringwald /**
472*a8f7f3fcSMatthias Ringwald   \brief   Get Base Priority (non-secure)
473*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Base Priority register when in secure state.
474*a8f7f3fcSMatthias Ringwald   \return               Base Priority register value
475*a8f7f3fcSMatthias Ringwald  */
__TZ_get_BASEPRI_NS(void)476*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
477*a8f7f3fcSMatthias Ringwald {
478*a8f7f3fcSMatthias Ringwald   uint32_t result;
479*a8f7f3fcSMatthias Ringwald 
480*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
481*a8f7f3fcSMatthias Ringwald   return(result);
482*a8f7f3fcSMatthias Ringwald }
483*a8f7f3fcSMatthias Ringwald #endif
484*a8f7f3fcSMatthias Ringwald 
485*a8f7f3fcSMatthias Ringwald 
486*a8f7f3fcSMatthias Ringwald /**
487*a8f7f3fcSMatthias Ringwald   \brief   Set Base Priority
488*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Base Priority register.
489*a8f7f3fcSMatthias Ringwald   \param [in]    basePri  Base Priority value to set
490*a8f7f3fcSMatthias Ringwald  */
__set_BASEPRI(uint32_t basePri)491*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
492*a8f7f3fcSMatthias Ringwald {
493*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
494*a8f7f3fcSMatthias Ringwald }
495*a8f7f3fcSMatthias Ringwald 
496*a8f7f3fcSMatthias Ringwald 
497*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
498*a8f7f3fcSMatthias Ringwald /**
499*a8f7f3fcSMatthias Ringwald   \brief   Set Base Priority (non-secure)
500*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Base Priority register when in secure state.
501*a8f7f3fcSMatthias Ringwald   \param [in]    basePri  Base Priority value to set
502*a8f7f3fcSMatthias Ringwald  */
__TZ_set_BASEPRI_NS(uint32_t basePri)503*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
504*a8f7f3fcSMatthias Ringwald {
505*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
506*a8f7f3fcSMatthias Ringwald }
507*a8f7f3fcSMatthias Ringwald #endif
508*a8f7f3fcSMatthias Ringwald 
509*a8f7f3fcSMatthias Ringwald 
510*a8f7f3fcSMatthias Ringwald /**
511*a8f7f3fcSMatthias Ringwald   \brief   Set Base Priority with condition
512*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
513*a8f7f3fcSMatthias Ringwald            or the new value increases the BASEPRI priority level.
514*a8f7f3fcSMatthias Ringwald   \param [in]    basePri  Base Priority value to set
515*a8f7f3fcSMatthias Ringwald  */
__set_BASEPRI_MAX(uint32_t basePri)516*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
517*a8f7f3fcSMatthias Ringwald {
518*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
519*a8f7f3fcSMatthias Ringwald }
520*a8f7f3fcSMatthias Ringwald 
521*a8f7f3fcSMatthias Ringwald 
522*a8f7f3fcSMatthias Ringwald /**
523*a8f7f3fcSMatthias Ringwald   \brief   Get Fault Mask
524*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Fault Mask register.
525*a8f7f3fcSMatthias Ringwald   \return               Fault Mask register value
526*a8f7f3fcSMatthias Ringwald  */
__get_FAULTMASK(void)527*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
528*a8f7f3fcSMatthias Ringwald {
529*a8f7f3fcSMatthias Ringwald   uint32_t result;
530*a8f7f3fcSMatthias Ringwald 
531*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
532*a8f7f3fcSMatthias Ringwald   return(result);
533*a8f7f3fcSMatthias Ringwald }
534*a8f7f3fcSMatthias Ringwald 
535*a8f7f3fcSMatthias Ringwald 
536*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
537*a8f7f3fcSMatthias Ringwald /**
538*a8f7f3fcSMatthias Ringwald   \brief   Get Fault Mask (non-secure)
539*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Fault Mask register when in secure state.
540*a8f7f3fcSMatthias Ringwald   \return               Fault Mask register value
541*a8f7f3fcSMatthias Ringwald  */
__TZ_get_FAULTMASK_NS(void)542*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
543*a8f7f3fcSMatthias Ringwald {
544*a8f7f3fcSMatthias Ringwald   uint32_t result;
545*a8f7f3fcSMatthias Ringwald 
546*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
547*a8f7f3fcSMatthias Ringwald   return(result);
548*a8f7f3fcSMatthias Ringwald }
549*a8f7f3fcSMatthias Ringwald #endif
550*a8f7f3fcSMatthias Ringwald 
551*a8f7f3fcSMatthias Ringwald 
552*a8f7f3fcSMatthias Ringwald /**
553*a8f7f3fcSMatthias Ringwald   \brief   Set Fault Mask
554*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Fault Mask register.
555*a8f7f3fcSMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
556*a8f7f3fcSMatthias Ringwald  */
__set_FAULTMASK(uint32_t faultMask)557*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
558*a8f7f3fcSMatthias Ringwald {
559*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
560*a8f7f3fcSMatthias Ringwald }
561*a8f7f3fcSMatthias Ringwald 
562*a8f7f3fcSMatthias Ringwald 
563*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
564*a8f7f3fcSMatthias Ringwald /**
565*a8f7f3fcSMatthias Ringwald   \brief   Set Fault Mask (non-secure)
566*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
567*a8f7f3fcSMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
568*a8f7f3fcSMatthias Ringwald  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)569*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
570*a8f7f3fcSMatthias Ringwald {
571*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
572*a8f7f3fcSMatthias Ringwald }
573*a8f7f3fcSMatthias Ringwald #endif
574*a8f7f3fcSMatthias Ringwald 
575*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
576*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
577*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
578*a8f7f3fcSMatthias Ringwald 
579*a8f7f3fcSMatthias Ringwald 
580*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
581*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
582*a8f7f3fcSMatthias Ringwald 
583*a8f7f3fcSMatthias Ringwald /**
584*a8f7f3fcSMatthias Ringwald   \brief   Get Process Stack Pointer Limit
585*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
586*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
587*a8f7f3fcSMatthias Ringwald   mode.
588*a8f7f3fcSMatthias Ringwald 
589*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
590*a8f7f3fcSMatthias Ringwald   \return               PSPLIM Register value
591*a8f7f3fcSMatthias Ringwald  */
__get_PSPLIM(void)592*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
593*a8f7f3fcSMatthias Ringwald {
594*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
595*a8f7f3fcSMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
596*a8f7f3fcSMatthias Ringwald     // without main extensions, the non-secure PSPLIM is RAZ/WI
597*a8f7f3fcSMatthias Ringwald   return 0U;
598*a8f7f3fcSMatthias Ringwald #else
599*a8f7f3fcSMatthias Ringwald   uint32_t result;
600*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
601*a8f7f3fcSMatthias Ringwald   return result;
602*a8f7f3fcSMatthias Ringwald #endif
603*a8f7f3fcSMatthias Ringwald }
604*a8f7f3fcSMatthias Ringwald 
605*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
606*a8f7f3fcSMatthias Ringwald /**
607*a8f7f3fcSMatthias Ringwald   \brief   Get Process Stack Pointer Limit (non-secure)
608*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
609*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
610*a8f7f3fcSMatthias Ringwald 
611*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
612*a8f7f3fcSMatthias Ringwald   \return               PSPLIM Register value
613*a8f7f3fcSMatthias Ringwald  */
__TZ_get_PSPLIM_NS(void)614*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
615*a8f7f3fcSMatthias Ringwald {
616*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
617*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
618*a8f7f3fcSMatthias Ringwald   return 0U;
619*a8f7f3fcSMatthias Ringwald #else
620*a8f7f3fcSMatthias Ringwald   uint32_t result;
621*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
622*a8f7f3fcSMatthias Ringwald   return result;
623*a8f7f3fcSMatthias Ringwald #endif
624*a8f7f3fcSMatthias Ringwald }
625*a8f7f3fcSMatthias Ringwald #endif
626*a8f7f3fcSMatthias Ringwald 
627*a8f7f3fcSMatthias Ringwald 
628*a8f7f3fcSMatthias Ringwald /**
629*a8f7f3fcSMatthias Ringwald   \brief   Set Process Stack Pointer Limit
630*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
631*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
632*a8f7f3fcSMatthias Ringwald   mode.
633*a8f7f3fcSMatthias Ringwald 
634*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
635*a8f7f3fcSMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
636*a8f7f3fcSMatthias Ringwald  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)637*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
638*a8f7f3fcSMatthias Ringwald {
639*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
640*a8f7f3fcSMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
641*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
642*a8f7f3fcSMatthias Ringwald   (void)ProcStackPtrLimit;
643*a8f7f3fcSMatthias Ringwald #else
644*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
645*a8f7f3fcSMatthias Ringwald #endif
646*a8f7f3fcSMatthias Ringwald }
647*a8f7f3fcSMatthias Ringwald 
648*a8f7f3fcSMatthias Ringwald 
649*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
650*a8f7f3fcSMatthias Ringwald /**
651*a8f7f3fcSMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
652*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
653*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
654*a8f7f3fcSMatthias Ringwald 
655*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
656*a8f7f3fcSMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
657*a8f7f3fcSMatthias Ringwald  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)658*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
659*a8f7f3fcSMatthias Ringwald {
660*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
661*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure PSPLIM is RAZ/WI
662*a8f7f3fcSMatthias Ringwald   (void)ProcStackPtrLimit;
663*a8f7f3fcSMatthias Ringwald #else
664*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
665*a8f7f3fcSMatthias Ringwald #endif
666*a8f7f3fcSMatthias Ringwald }
667*a8f7f3fcSMatthias Ringwald #endif
668*a8f7f3fcSMatthias Ringwald 
669*a8f7f3fcSMatthias Ringwald 
670*a8f7f3fcSMatthias Ringwald /**
671*a8f7f3fcSMatthias Ringwald   \brief   Get Main Stack Pointer Limit
672*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
673*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence zero is returned always in non-secure
674*a8f7f3fcSMatthias Ringwald   mode.
675*a8f7f3fcSMatthias Ringwald 
676*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
677*a8f7f3fcSMatthias Ringwald   \return               MSPLIM Register value
678*a8f7f3fcSMatthias Ringwald  */
__get_MSPLIM(void)679*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
680*a8f7f3fcSMatthias Ringwald {
681*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
682*a8f7f3fcSMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
683*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
684*a8f7f3fcSMatthias Ringwald   return 0U;
685*a8f7f3fcSMatthias Ringwald #else
686*a8f7f3fcSMatthias Ringwald   uint32_t result;
687*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
688*a8f7f3fcSMatthias Ringwald   return result;
689*a8f7f3fcSMatthias Ringwald #endif
690*a8f7f3fcSMatthias Ringwald }
691*a8f7f3fcSMatthias Ringwald 
692*a8f7f3fcSMatthias Ringwald 
693*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
694*a8f7f3fcSMatthias Ringwald /**
695*a8f7f3fcSMatthias Ringwald   \brief   Get Main Stack Pointer Limit (non-secure)
696*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
697*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence zero is returned always.
698*a8f7f3fcSMatthias Ringwald 
699*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
700*a8f7f3fcSMatthias Ringwald   \return               MSPLIM Register value
701*a8f7f3fcSMatthias Ringwald  */
__TZ_get_MSPLIM_NS(void)702*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
703*a8f7f3fcSMatthias Ringwald {
704*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
705*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
706*a8f7f3fcSMatthias Ringwald   return 0U;
707*a8f7f3fcSMatthias Ringwald #else
708*a8f7f3fcSMatthias Ringwald   uint32_t result;
709*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
710*a8f7f3fcSMatthias Ringwald   return result;
711*a8f7f3fcSMatthias Ringwald #endif
712*a8f7f3fcSMatthias Ringwald }
713*a8f7f3fcSMatthias Ringwald #endif
714*a8f7f3fcSMatthias Ringwald 
715*a8f7f3fcSMatthias Ringwald 
716*a8f7f3fcSMatthias Ringwald /**
717*a8f7f3fcSMatthias Ringwald   \brief   Set Main Stack Pointer Limit
718*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
719*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored in non-secure
720*a8f7f3fcSMatthias Ringwald   mode.
721*a8f7f3fcSMatthias Ringwald 
722*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
723*a8f7f3fcSMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
724*a8f7f3fcSMatthias Ringwald  */
__set_MSPLIM(uint32_t MainStackPtrLimit)725*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
726*a8f7f3fcSMatthias Ringwald {
727*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
728*a8f7f3fcSMatthias Ringwald     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
729*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
730*a8f7f3fcSMatthias Ringwald   (void)MainStackPtrLimit;
731*a8f7f3fcSMatthias Ringwald #else
732*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
733*a8f7f3fcSMatthias Ringwald #endif
734*a8f7f3fcSMatthias Ringwald }
735*a8f7f3fcSMatthias Ringwald 
736*a8f7f3fcSMatthias Ringwald 
737*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
738*a8f7f3fcSMatthias Ringwald /**
739*a8f7f3fcSMatthias Ringwald   \brief   Set Main Stack Pointer Limit (non-secure)
740*a8f7f3fcSMatthias Ringwald   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
741*a8f7f3fcSMatthias Ringwald   Stack Pointer Limit register hence the write is silently ignored.
742*a8f7f3fcSMatthias Ringwald 
743*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
744*a8f7f3fcSMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
745*a8f7f3fcSMatthias Ringwald  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)746*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
747*a8f7f3fcSMatthias Ringwald {
748*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
749*a8f7f3fcSMatthias Ringwald   // without main extensions, the non-secure MSPLIM is RAZ/WI
750*a8f7f3fcSMatthias Ringwald   (void)MainStackPtrLimit;
751*a8f7f3fcSMatthias Ringwald #else
752*a8f7f3fcSMatthias Ringwald   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
753*a8f7f3fcSMatthias Ringwald #endif
754*a8f7f3fcSMatthias Ringwald }
755*a8f7f3fcSMatthias Ringwald #endif
756*a8f7f3fcSMatthias Ringwald 
757*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
758*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
759*a8f7f3fcSMatthias Ringwald 
760*a8f7f3fcSMatthias Ringwald 
761*a8f7f3fcSMatthias Ringwald /**
762*a8f7f3fcSMatthias Ringwald   \brief   Get FPSCR
763*a8f7f3fcSMatthias Ringwald   \details Returns the current value of the Floating Point Status/Control register.
764*a8f7f3fcSMatthias Ringwald   \return               Floating Point Status/Control register value
765*a8f7f3fcSMatthias Ringwald  */
__get_FPSCR(void)766*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
767*a8f7f3fcSMatthias Ringwald {
768*a8f7f3fcSMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
769*a8f7f3fcSMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
770*a8f7f3fcSMatthias Ringwald #if __has_builtin(__builtin_arm_get_fpscr)
771*a8f7f3fcSMatthias Ringwald // Re-enable using built-in when GCC has been fixed
772*a8f7f3fcSMatthias Ringwald // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
773*a8f7f3fcSMatthias Ringwald   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
774*a8f7f3fcSMatthias Ringwald   return __builtin_arm_get_fpscr();
775*a8f7f3fcSMatthias Ringwald #else
776*a8f7f3fcSMatthias Ringwald   uint32_t result;
777*a8f7f3fcSMatthias Ringwald 
778*a8f7f3fcSMatthias Ringwald   __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
779*a8f7f3fcSMatthias Ringwald   return(result);
780*a8f7f3fcSMatthias Ringwald #endif
781*a8f7f3fcSMatthias Ringwald #else
782*a8f7f3fcSMatthias Ringwald   return(0U);
783*a8f7f3fcSMatthias Ringwald #endif
784*a8f7f3fcSMatthias Ringwald }
785*a8f7f3fcSMatthias Ringwald 
786*a8f7f3fcSMatthias Ringwald 
787*a8f7f3fcSMatthias Ringwald /**
788*a8f7f3fcSMatthias Ringwald   \brief   Set FPSCR
789*a8f7f3fcSMatthias Ringwald   \details Assigns the given value to the Floating Point Status/Control register.
790*a8f7f3fcSMatthias Ringwald   \param [in]    fpscr  Floating Point Status/Control value to set
791*a8f7f3fcSMatthias Ringwald  */
__set_FPSCR(uint32_t fpscr)792*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
793*a8f7f3fcSMatthias Ringwald {
794*a8f7f3fcSMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
795*a8f7f3fcSMatthias Ringwald      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
796*a8f7f3fcSMatthias Ringwald #if __has_builtin(__builtin_arm_set_fpscr)
797*a8f7f3fcSMatthias Ringwald // Re-enable using built-in when GCC has been fixed
798*a8f7f3fcSMatthias Ringwald // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
799*a8f7f3fcSMatthias Ringwald   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
800*a8f7f3fcSMatthias Ringwald   __builtin_arm_set_fpscr(fpscr);
801*a8f7f3fcSMatthias Ringwald #else
802*a8f7f3fcSMatthias Ringwald   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
803*a8f7f3fcSMatthias Ringwald #endif
804*a8f7f3fcSMatthias Ringwald #else
805*a8f7f3fcSMatthias Ringwald   (void)fpscr;
806*a8f7f3fcSMatthias Ringwald #endif
807*a8f7f3fcSMatthias Ringwald }
808*a8f7f3fcSMatthias Ringwald 
809*a8f7f3fcSMatthias Ringwald 
810*a8f7f3fcSMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
811*a8f7f3fcSMatthias Ringwald 
812*a8f7f3fcSMatthias Ringwald 
813*a8f7f3fcSMatthias Ringwald /* ##########################  Core Instruction Access  ######################### */
814*a8f7f3fcSMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
815*a8f7f3fcSMatthias Ringwald   Access to dedicated instructions
816*a8f7f3fcSMatthias Ringwald   @{
817*a8f7f3fcSMatthias Ringwald */
818*a8f7f3fcSMatthias Ringwald 
819*a8f7f3fcSMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
820*a8f7f3fcSMatthias Ringwald  * For thumb1, use low register (r0-r7), specified by constraint "l"
821*a8f7f3fcSMatthias Ringwald  * Otherwise, use general registers, specified by constraint "r" */
822*a8f7f3fcSMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
823*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
824*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+l" (r)
825*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
826*a8f7f3fcSMatthias Ringwald #else
827*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
828*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_RW_REG(r) "+r" (r)
829*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
830*a8f7f3fcSMatthias Ringwald #endif
831*a8f7f3fcSMatthias Ringwald 
832*a8f7f3fcSMatthias Ringwald /**
833*a8f7f3fcSMatthias Ringwald   \brief   No Operation
834*a8f7f3fcSMatthias Ringwald   \details No Operation does nothing. This instruction can be used for code alignment purposes.
835*a8f7f3fcSMatthias Ringwald  */
836*a8f7f3fcSMatthias Ringwald #define __NOP()                             __ASM volatile ("nop")
837*a8f7f3fcSMatthias Ringwald 
838*a8f7f3fcSMatthias Ringwald /**
839*a8f7f3fcSMatthias Ringwald   \brief   Wait For Interrupt
840*a8f7f3fcSMatthias Ringwald   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
841*a8f7f3fcSMatthias Ringwald  */
842*a8f7f3fcSMatthias Ringwald #define __WFI()                             __ASM volatile ("wfi")
843*a8f7f3fcSMatthias Ringwald 
844*a8f7f3fcSMatthias Ringwald 
845*a8f7f3fcSMatthias Ringwald /**
846*a8f7f3fcSMatthias Ringwald   \brief   Wait For Event
847*a8f7f3fcSMatthias Ringwald   \details Wait For Event is a hint instruction that permits the processor to enter
848*a8f7f3fcSMatthias Ringwald            a low-power state until one of a number of events occurs.
849*a8f7f3fcSMatthias Ringwald  */
850*a8f7f3fcSMatthias Ringwald #define __WFE()                             __ASM volatile ("wfe")
851*a8f7f3fcSMatthias Ringwald 
852*a8f7f3fcSMatthias Ringwald 
853*a8f7f3fcSMatthias Ringwald /**
854*a8f7f3fcSMatthias Ringwald   \brief   Send Event
855*a8f7f3fcSMatthias Ringwald   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
856*a8f7f3fcSMatthias Ringwald  */
857*a8f7f3fcSMatthias Ringwald #define __SEV()                             __ASM volatile ("sev")
858*a8f7f3fcSMatthias Ringwald 
859*a8f7f3fcSMatthias Ringwald 
860*a8f7f3fcSMatthias Ringwald /**
861*a8f7f3fcSMatthias Ringwald   \brief   Instruction Synchronization Barrier
862*a8f7f3fcSMatthias Ringwald   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
863*a8f7f3fcSMatthias Ringwald            so that all instructions following the ISB are fetched from cache or memory,
864*a8f7f3fcSMatthias Ringwald            after the instruction has been completed.
865*a8f7f3fcSMatthias Ringwald  */
__ISB(void)866*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __ISB(void)
867*a8f7f3fcSMatthias Ringwald {
868*a8f7f3fcSMatthias Ringwald   __ASM volatile ("isb 0xF":::"memory");
869*a8f7f3fcSMatthias Ringwald }
870*a8f7f3fcSMatthias Ringwald 
871*a8f7f3fcSMatthias Ringwald 
872*a8f7f3fcSMatthias Ringwald /**
873*a8f7f3fcSMatthias Ringwald   \brief   Data Synchronization Barrier
874*a8f7f3fcSMatthias Ringwald   \details Acts as a special kind of Data Memory Barrier.
875*a8f7f3fcSMatthias Ringwald            It completes when all explicit memory accesses before this instruction complete.
876*a8f7f3fcSMatthias Ringwald  */
__DSB(void)877*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __DSB(void)
878*a8f7f3fcSMatthias Ringwald {
879*a8f7f3fcSMatthias Ringwald   __ASM volatile ("dsb 0xF":::"memory");
880*a8f7f3fcSMatthias Ringwald }
881*a8f7f3fcSMatthias Ringwald 
882*a8f7f3fcSMatthias Ringwald 
883*a8f7f3fcSMatthias Ringwald /**
884*a8f7f3fcSMatthias Ringwald   \brief   Data Memory Barrier
885*a8f7f3fcSMatthias Ringwald   \details Ensures the apparent order of the explicit memory operations before
886*a8f7f3fcSMatthias Ringwald            and after the instruction, without ensuring their completion.
887*a8f7f3fcSMatthias Ringwald  */
__DMB(void)888*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __DMB(void)
889*a8f7f3fcSMatthias Ringwald {
890*a8f7f3fcSMatthias Ringwald   __ASM volatile ("dmb 0xF":::"memory");
891*a8f7f3fcSMatthias Ringwald }
892*a8f7f3fcSMatthias Ringwald 
893*a8f7f3fcSMatthias Ringwald 
894*a8f7f3fcSMatthias Ringwald /**
895*a8f7f3fcSMatthias Ringwald   \brief   Reverse byte order (32 bit)
896*a8f7f3fcSMatthias Ringwald   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
897*a8f7f3fcSMatthias Ringwald   \param [in]    value  Value to reverse
898*a8f7f3fcSMatthias Ringwald   \return               Reversed value
899*a8f7f3fcSMatthias Ringwald  */
__REV(uint32_t value)900*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
901*a8f7f3fcSMatthias Ringwald {
902*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
903*a8f7f3fcSMatthias Ringwald   return __builtin_bswap32(value);
904*a8f7f3fcSMatthias Ringwald #else
905*a8f7f3fcSMatthias Ringwald   uint32_t result;
906*a8f7f3fcSMatthias Ringwald 
907*a8f7f3fcSMatthias Ringwald   __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
908*a8f7f3fcSMatthias Ringwald   return result;
909*a8f7f3fcSMatthias Ringwald #endif
910*a8f7f3fcSMatthias Ringwald }
911*a8f7f3fcSMatthias Ringwald 
912*a8f7f3fcSMatthias Ringwald 
913*a8f7f3fcSMatthias Ringwald /**
914*a8f7f3fcSMatthias Ringwald   \brief   Reverse byte order (16 bit)
915*a8f7f3fcSMatthias Ringwald   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
916*a8f7f3fcSMatthias Ringwald   \param [in]    value  Value to reverse
917*a8f7f3fcSMatthias Ringwald   \return               Reversed value
918*a8f7f3fcSMatthias Ringwald  */
__REV16(uint32_t value)919*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
920*a8f7f3fcSMatthias Ringwald {
921*a8f7f3fcSMatthias Ringwald   uint32_t result;
922*a8f7f3fcSMatthias Ringwald 
923*a8f7f3fcSMatthias Ringwald   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
924*a8f7f3fcSMatthias Ringwald   return result;
925*a8f7f3fcSMatthias Ringwald }
926*a8f7f3fcSMatthias Ringwald 
927*a8f7f3fcSMatthias Ringwald 
928*a8f7f3fcSMatthias Ringwald /**
929*a8f7f3fcSMatthias Ringwald   \brief   Reverse byte order (16 bit)
930*a8f7f3fcSMatthias Ringwald   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
931*a8f7f3fcSMatthias Ringwald   \param [in]    value  Value to reverse
932*a8f7f3fcSMatthias Ringwald   \return               Reversed value
933*a8f7f3fcSMatthias Ringwald  */
__REVSH(int16_t value)934*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
935*a8f7f3fcSMatthias Ringwald {
936*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
937*a8f7f3fcSMatthias Ringwald   return (int16_t)__builtin_bswap16(value);
938*a8f7f3fcSMatthias Ringwald #else
939*a8f7f3fcSMatthias Ringwald   int16_t result;
940*a8f7f3fcSMatthias Ringwald 
941*a8f7f3fcSMatthias Ringwald   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
942*a8f7f3fcSMatthias Ringwald   return result;
943*a8f7f3fcSMatthias Ringwald #endif
944*a8f7f3fcSMatthias Ringwald }
945*a8f7f3fcSMatthias Ringwald 
946*a8f7f3fcSMatthias Ringwald 
947*a8f7f3fcSMatthias Ringwald /**
948*a8f7f3fcSMatthias Ringwald   \brief   Rotate Right in unsigned value (32 bit)
949*a8f7f3fcSMatthias Ringwald   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
950*a8f7f3fcSMatthias Ringwald   \param [in]    op1  Value to rotate
951*a8f7f3fcSMatthias Ringwald   \param [in]    op2  Number of Bits to rotate
952*a8f7f3fcSMatthias Ringwald   \return               Rotated value
953*a8f7f3fcSMatthias Ringwald  */
__ROR(uint32_t op1,uint32_t op2)954*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
955*a8f7f3fcSMatthias Ringwald {
956*a8f7f3fcSMatthias Ringwald   op2 %= 32U;
957*a8f7f3fcSMatthias Ringwald   if (op2 == 0U)
958*a8f7f3fcSMatthias Ringwald   {
959*a8f7f3fcSMatthias Ringwald     return op1;
960*a8f7f3fcSMatthias Ringwald   }
961*a8f7f3fcSMatthias Ringwald   return (op1 >> op2) | (op1 << (32U - op2));
962*a8f7f3fcSMatthias Ringwald }
963*a8f7f3fcSMatthias Ringwald 
964*a8f7f3fcSMatthias Ringwald 
965*a8f7f3fcSMatthias Ringwald /**
966*a8f7f3fcSMatthias Ringwald   \brief   Breakpoint
967*a8f7f3fcSMatthias Ringwald   \details Causes the processor to enter Debug state.
968*a8f7f3fcSMatthias Ringwald            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
969*a8f7f3fcSMatthias Ringwald   \param [in]    value  is ignored by the processor.
970*a8f7f3fcSMatthias Ringwald                  If required, a debugger can use it to store additional information about the breakpoint.
971*a8f7f3fcSMatthias Ringwald  */
972*a8f7f3fcSMatthias Ringwald #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
973*a8f7f3fcSMatthias Ringwald 
974*a8f7f3fcSMatthias Ringwald 
975*a8f7f3fcSMatthias Ringwald /**
976*a8f7f3fcSMatthias Ringwald   \brief   Reverse bit order of value
977*a8f7f3fcSMatthias Ringwald   \details Reverses the bit order of the given value.
978*a8f7f3fcSMatthias Ringwald   \param [in]    value  Value to reverse
979*a8f7f3fcSMatthias Ringwald   \return               Reversed value
980*a8f7f3fcSMatthias Ringwald  */
__RBIT(uint32_t value)981*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
982*a8f7f3fcSMatthias Ringwald {
983*a8f7f3fcSMatthias Ringwald   uint32_t result;
984*a8f7f3fcSMatthias Ringwald 
985*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
986*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
987*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
988*a8f7f3fcSMatthias Ringwald    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
989*a8f7f3fcSMatthias Ringwald #else
990*a8f7f3fcSMatthias Ringwald   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
991*a8f7f3fcSMatthias Ringwald 
992*a8f7f3fcSMatthias Ringwald   result = value;                      /* r will be reversed bits of v; first get LSB of v */
993*a8f7f3fcSMatthias Ringwald   for (value >>= 1U; value != 0U; value >>= 1U)
994*a8f7f3fcSMatthias Ringwald   {
995*a8f7f3fcSMatthias Ringwald     result <<= 1U;
996*a8f7f3fcSMatthias Ringwald     result |= value & 1U;
997*a8f7f3fcSMatthias Ringwald     s--;
998*a8f7f3fcSMatthias Ringwald   }
999*a8f7f3fcSMatthias Ringwald   result <<= s;                        /* shift when v's highest bits are zero */
1000*a8f7f3fcSMatthias Ringwald #endif
1001*a8f7f3fcSMatthias Ringwald   return result;
1002*a8f7f3fcSMatthias Ringwald }
1003*a8f7f3fcSMatthias Ringwald 
1004*a8f7f3fcSMatthias Ringwald 
1005*a8f7f3fcSMatthias Ringwald /**
1006*a8f7f3fcSMatthias Ringwald   \brief   Count leading zeros
1007*a8f7f3fcSMatthias Ringwald   \details Counts the number of leading zeros of a data value.
1008*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to count the leading zeros
1009*a8f7f3fcSMatthias Ringwald   \return             number of leading zeros in value
1010*a8f7f3fcSMatthias Ringwald  */
1011*a8f7f3fcSMatthias Ringwald #define __CLZ             (uint8_t)__builtin_clz
1012*a8f7f3fcSMatthias Ringwald 
1013*a8f7f3fcSMatthias Ringwald 
1014*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1015*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1016*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1017*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1018*a8f7f3fcSMatthias Ringwald /**
1019*a8f7f3fcSMatthias Ringwald   \brief   LDR Exclusive (8 bit)
1020*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive LDR instruction for 8 bit value.
1021*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1022*a8f7f3fcSMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1023*a8f7f3fcSMatthias Ringwald  */
__LDREXB(volatile uint8_t * addr)1024*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
1025*a8f7f3fcSMatthias Ringwald {
1026*a8f7f3fcSMatthias Ringwald     uint32_t result;
1027*a8f7f3fcSMatthias Ringwald 
1028*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1029*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
1030*a8f7f3fcSMatthias Ringwald #else
1031*a8f7f3fcSMatthias Ringwald     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1032*a8f7f3fcSMatthias Ringwald        accepted by assembler. So has to use following less efficient pattern.
1033*a8f7f3fcSMatthias Ringwald     */
1034*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1035*a8f7f3fcSMatthias Ringwald #endif
1036*a8f7f3fcSMatthias Ringwald    return ((uint8_t) result);    /* Add explicit type cast here */
1037*a8f7f3fcSMatthias Ringwald }
1038*a8f7f3fcSMatthias Ringwald 
1039*a8f7f3fcSMatthias Ringwald 
1040*a8f7f3fcSMatthias Ringwald /**
1041*a8f7f3fcSMatthias Ringwald   \brief   LDR Exclusive (16 bit)
1042*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive LDR instruction for 16 bit values.
1043*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1044*a8f7f3fcSMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1045*a8f7f3fcSMatthias Ringwald  */
__LDREXH(volatile uint16_t * addr)1046*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
1047*a8f7f3fcSMatthias Ringwald {
1048*a8f7f3fcSMatthias Ringwald     uint32_t result;
1049*a8f7f3fcSMatthias Ringwald 
1050*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1051*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
1052*a8f7f3fcSMatthias Ringwald #else
1053*a8f7f3fcSMatthias Ringwald     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1054*a8f7f3fcSMatthias Ringwald        accepted by assembler. So has to use following less efficient pattern.
1055*a8f7f3fcSMatthias Ringwald     */
1056*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
1057*a8f7f3fcSMatthias Ringwald #endif
1058*a8f7f3fcSMatthias Ringwald    return ((uint16_t) result);    /* Add explicit type cast here */
1059*a8f7f3fcSMatthias Ringwald }
1060*a8f7f3fcSMatthias Ringwald 
1061*a8f7f3fcSMatthias Ringwald 
1062*a8f7f3fcSMatthias Ringwald /**
1063*a8f7f3fcSMatthias Ringwald   \brief   LDR Exclusive (32 bit)
1064*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive LDR instruction for 32 bit values.
1065*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1066*a8f7f3fcSMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1067*a8f7f3fcSMatthias Ringwald  */
__LDREXW(volatile uint32_t * addr)1068*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
1069*a8f7f3fcSMatthias Ringwald {
1070*a8f7f3fcSMatthias Ringwald     uint32_t result;
1071*a8f7f3fcSMatthias Ringwald 
1072*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
1073*a8f7f3fcSMatthias Ringwald    return(result);
1074*a8f7f3fcSMatthias Ringwald }
1075*a8f7f3fcSMatthias Ringwald 
1076*a8f7f3fcSMatthias Ringwald 
1077*a8f7f3fcSMatthias Ringwald /**
1078*a8f7f3fcSMatthias Ringwald   \brief   STR Exclusive (8 bit)
1079*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive STR instruction for 8 bit values.
1080*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1081*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1082*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1083*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1084*a8f7f3fcSMatthias Ringwald  */
__STREXB(uint8_t value,volatile uint8_t * addr)1085*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
1086*a8f7f3fcSMatthias Ringwald {
1087*a8f7f3fcSMatthias Ringwald    uint32_t result;
1088*a8f7f3fcSMatthias Ringwald 
1089*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1090*a8f7f3fcSMatthias Ringwald    return(result);
1091*a8f7f3fcSMatthias Ringwald }
1092*a8f7f3fcSMatthias Ringwald 
1093*a8f7f3fcSMatthias Ringwald 
1094*a8f7f3fcSMatthias Ringwald /**
1095*a8f7f3fcSMatthias Ringwald   \brief   STR Exclusive (16 bit)
1096*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive STR instruction for 16 bit values.
1097*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1098*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1099*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1100*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1101*a8f7f3fcSMatthias Ringwald  */
__STREXH(uint16_t value,volatile uint16_t * addr)1102*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
1103*a8f7f3fcSMatthias Ringwald {
1104*a8f7f3fcSMatthias Ringwald    uint32_t result;
1105*a8f7f3fcSMatthias Ringwald 
1106*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
1107*a8f7f3fcSMatthias Ringwald    return(result);
1108*a8f7f3fcSMatthias Ringwald }
1109*a8f7f3fcSMatthias Ringwald 
1110*a8f7f3fcSMatthias Ringwald 
1111*a8f7f3fcSMatthias Ringwald /**
1112*a8f7f3fcSMatthias Ringwald   \brief   STR Exclusive (32 bit)
1113*a8f7f3fcSMatthias Ringwald   \details Executes a exclusive STR instruction for 32 bit values.
1114*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1115*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1116*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1117*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1118*a8f7f3fcSMatthias Ringwald  */
__STREXW(uint32_t value,volatile uint32_t * addr)1119*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
1120*a8f7f3fcSMatthias Ringwald {
1121*a8f7f3fcSMatthias Ringwald    uint32_t result;
1122*a8f7f3fcSMatthias Ringwald 
1123*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
1124*a8f7f3fcSMatthias Ringwald    return(result);
1125*a8f7f3fcSMatthias Ringwald }
1126*a8f7f3fcSMatthias Ringwald 
1127*a8f7f3fcSMatthias Ringwald 
1128*a8f7f3fcSMatthias Ringwald /**
1129*a8f7f3fcSMatthias Ringwald   \brief   Remove the exclusive lock
1130*a8f7f3fcSMatthias Ringwald   \details Removes the exclusive lock which is created by LDREX.
1131*a8f7f3fcSMatthias Ringwald  */
__CLREX(void)1132*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __CLREX(void)
1133*a8f7f3fcSMatthias Ringwald {
1134*a8f7f3fcSMatthias Ringwald   __ASM volatile ("clrex" ::: "memory");
1135*a8f7f3fcSMatthias Ringwald }
1136*a8f7f3fcSMatthias Ringwald 
1137*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1138*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1139*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1140*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1141*a8f7f3fcSMatthias Ringwald 
1142*a8f7f3fcSMatthias Ringwald 
1143*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1144*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1145*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
1146*a8f7f3fcSMatthias Ringwald /**
1147*a8f7f3fcSMatthias Ringwald   \brief   Signed Saturate
1148*a8f7f3fcSMatthias Ringwald   \details Saturates a signed value.
1149*a8f7f3fcSMatthias Ringwald   \param [in]  ARG1  Value to be saturated
1150*a8f7f3fcSMatthias Ringwald   \param [in]  ARG2  Bit position to saturate to (1..32)
1151*a8f7f3fcSMatthias Ringwald   \return             Saturated value
1152*a8f7f3fcSMatthias Ringwald  */
1153*a8f7f3fcSMatthias Ringwald #define __SSAT(ARG1,ARG2) \
1154*a8f7f3fcSMatthias Ringwald __extension__ \
1155*a8f7f3fcSMatthias Ringwald ({                          \
1156*a8f7f3fcSMatthias Ringwald   int32_t __RES, __ARG1 = (ARG1); \
1157*a8f7f3fcSMatthias Ringwald   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1158*a8f7f3fcSMatthias Ringwald   __RES; \
1159*a8f7f3fcSMatthias Ringwald  })
1160*a8f7f3fcSMatthias Ringwald 
1161*a8f7f3fcSMatthias Ringwald 
1162*a8f7f3fcSMatthias Ringwald /**
1163*a8f7f3fcSMatthias Ringwald   \brief   Unsigned Saturate
1164*a8f7f3fcSMatthias Ringwald   \details Saturates an unsigned value.
1165*a8f7f3fcSMatthias Ringwald   \param [in]  ARG1  Value to be saturated
1166*a8f7f3fcSMatthias Ringwald   \param [in]  ARG2  Bit position to saturate to (0..31)
1167*a8f7f3fcSMatthias Ringwald   \return             Saturated value
1168*a8f7f3fcSMatthias Ringwald  */
1169*a8f7f3fcSMatthias Ringwald #define __USAT(ARG1,ARG2) \
1170*a8f7f3fcSMatthias Ringwald  __extension__ \
1171*a8f7f3fcSMatthias Ringwald ({                          \
1172*a8f7f3fcSMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1173*a8f7f3fcSMatthias Ringwald   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1174*a8f7f3fcSMatthias Ringwald   __RES; \
1175*a8f7f3fcSMatthias Ringwald  })
1176*a8f7f3fcSMatthias Ringwald 
1177*a8f7f3fcSMatthias Ringwald 
1178*a8f7f3fcSMatthias Ringwald /**
1179*a8f7f3fcSMatthias Ringwald   \brief   Rotate Right with Extend (32 bit)
1180*a8f7f3fcSMatthias Ringwald   \details Moves each bit of a bitstring right by one bit.
1181*a8f7f3fcSMatthias Ringwald            The carry input is shifted in at the left end of the bitstring.
1182*a8f7f3fcSMatthias Ringwald   \param [in]    value  Value to rotate
1183*a8f7f3fcSMatthias Ringwald   \return               Rotated value
1184*a8f7f3fcSMatthias Ringwald  */
__RRX(uint32_t value)1185*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1186*a8f7f3fcSMatthias Ringwald {
1187*a8f7f3fcSMatthias Ringwald   uint32_t result;
1188*a8f7f3fcSMatthias Ringwald 
1189*a8f7f3fcSMatthias Ringwald   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1190*a8f7f3fcSMatthias Ringwald   return(result);
1191*a8f7f3fcSMatthias Ringwald }
1192*a8f7f3fcSMatthias Ringwald 
1193*a8f7f3fcSMatthias Ringwald 
1194*a8f7f3fcSMatthias Ringwald /**
1195*a8f7f3fcSMatthias Ringwald   \brief   LDRT Unprivileged (8 bit)
1196*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 8 bit value.
1197*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1198*a8f7f3fcSMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1199*a8f7f3fcSMatthias Ringwald  */
__LDRBT(volatile uint8_t * ptr)1200*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1201*a8f7f3fcSMatthias Ringwald {
1202*a8f7f3fcSMatthias Ringwald     uint32_t result;
1203*a8f7f3fcSMatthias Ringwald 
1204*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1205*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1206*a8f7f3fcSMatthias Ringwald #else
1207*a8f7f3fcSMatthias Ringwald     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1208*a8f7f3fcSMatthias Ringwald        accepted by assembler. So has to use following less efficient pattern.
1209*a8f7f3fcSMatthias Ringwald     */
1210*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1211*a8f7f3fcSMatthias Ringwald #endif
1212*a8f7f3fcSMatthias Ringwald    return ((uint8_t) result);    /* Add explicit type cast here */
1213*a8f7f3fcSMatthias Ringwald }
1214*a8f7f3fcSMatthias Ringwald 
1215*a8f7f3fcSMatthias Ringwald 
1216*a8f7f3fcSMatthias Ringwald /**
1217*a8f7f3fcSMatthias Ringwald   \brief   LDRT Unprivileged (16 bit)
1218*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 16 bit values.
1219*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1220*a8f7f3fcSMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1221*a8f7f3fcSMatthias Ringwald  */
__LDRHT(volatile uint16_t * ptr)1222*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1223*a8f7f3fcSMatthias Ringwald {
1224*a8f7f3fcSMatthias Ringwald     uint32_t result;
1225*a8f7f3fcSMatthias Ringwald 
1226*a8f7f3fcSMatthias Ringwald #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
1227*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1228*a8f7f3fcSMatthias Ringwald #else
1229*a8f7f3fcSMatthias Ringwald     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
1230*a8f7f3fcSMatthias Ringwald        accepted by assembler. So has to use following less efficient pattern.
1231*a8f7f3fcSMatthias Ringwald     */
1232*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
1233*a8f7f3fcSMatthias Ringwald #endif
1234*a8f7f3fcSMatthias Ringwald    return ((uint16_t) result);    /* Add explicit type cast here */
1235*a8f7f3fcSMatthias Ringwald }
1236*a8f7f3fcSMatthias Ringwald 
1237*a8f7f3fcSMatthias Ringwald 
1238*a8f7f3fcSMatthias Ringwald /**
1239*a8f7f3fcSMatthias Ringwald   \brief   LDRT Unprivileged (32 bit)
1240*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 32 bit values.
1241*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1242*a8f7f3fcSMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1243*a8f7f3fcSMatthias Ringwald  */
__LDRT(volatile uint32_t * ptr)1244*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1245*a8f7f3fcSMatthias Ringwald {
1246*a8f7f3fcSMatthias Ringwald     uint32_t result;
1247*a8f7f3fcSMatthias Ringwald 
1248*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1249*a8f7f3fcSMatthias Ringwald    return(result);
1250*a8f7f3fcSMatthias Ringwald }
1251*a8f7f3fcSMatthias Ringwald 
1252*a8f7f3fcSMatthias Ringwald 
1253*a8f7f3fcSMatthias Ringwald /**
1254*a8f7f3fcSMatthias Ringwald   \brief   STRT Unprivileged (8 bit)
1255*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 8 bit values.
1256*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1257*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1258*a8f7f3fcSMatthias Ringwald  */
__STRBT(uint8_t value,volatile uint8_t * ptr)1259*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1260*a8f7f3fcSMatthias Ringwald {
1261*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1262*a8f7f3fcSMatthias Ringwald }
1263*a8f7f3fcSMatthias Ringwald 
1264*a8f7f3fcSMatthias Ringwald 
1265*a8f7f3fcSMatthias Ringwald /**
1266*a8f7f3fcSMatthias Ringwald   \brief   STRT Unprivileged (16 bit)
1267*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 16 bit values.
1268*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1269*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1270*a8f7f3fcSMatthias Ringwald  */
__STRHT(uint16_t value,volatile uint16_t * ptr)1271*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1272*a8f7f3fcSMatthias Ringwald {
1273*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1274*a8f7f3fcSMatthias Ringwald }
1275*a8f7f3fcSMatthias Ringwald 
1276*a8f7f3fcSMatthias Ringwald 
1277*a8f7f3fcSMatthias Ringwald /**
1278*a8f7f3fcSMatthias Ringwald   \brief   STRT Unprivileged (32 bit)
1279*a8f7f3fcSMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 32 bit values.
1280*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1281*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1282*a8f7f3fcSMatthias Ringwald  */
__STRT(uint32_t value,volatile uint32_t * ptr)1283*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1284*a8f7f3fcSMatthias Ringwald {
1285*a8f7f3fcSMatthias Ringwald    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1286*a8f7f3fcSMatthias Ringwald }
1287*a8f7f3fcSMatthias Ringwald 
1288*a8f7f3fcSMatthias Ringwald #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1289*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1290*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1291*a8f7f3fcSMatthias Ringwald 
1292*a8f7f3fcSMatthias Ringwald /**
1293*a8f7f3fcSMatthias Ringwald   \brief   Signed Saturate
1294*a8f7f3fcSMatthias Ringwald   \details Saturates a signed value.
1295*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to be saturated
1296*a8f7f3fcSMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
1297*a8f7f3fcSMatthias Ringwald   \return             Saturated value
1298*a8f7f3fcSMatthias Ringwald  */
__SSAT(int32_t val,uint32_t sat)1299*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1300*a8f7f3fcSMatthias Ringwald {
1301*a8f7f3fcSMatthias Ringwald   if ((sat >= 1U) && (sat <= 32U))
1302*a8f7f3fcSMatthias Ringwald   {
1303*a8f7f3fcSMatthias Ringwald     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1304*a8f7f3fcSMatthias Ringwald     const int32_t min = -1 - max ;
1305*a8f7f3fcSMatthias Ringwald     if (val > max)
1306*a8f7f3fcSMatthias Ringwald     {
1307*a8f7f3fcSMatthias Ringwald       return max;
1308*a8f7f3fcSMatthias Ringwald     }
1309*a8f7f3fcSMatthias Ringwald     else if (val < min)
1310*a8f7f3fcSMatthias Ringwald     {
1311*a8f7f3fcSMatthias Ringwald       return min;
1312*a8f7f3fcSMatthias Ringwald     }
1313*a8f7f3fcSMatthias Ringwald   }
1314*a8f7f3fcSMatthias Ringwald   return val;
1315*a8f7f3fcSMatthias Ringwald }
1316*a8f7f3fcSMatthias Ringwald 
1317*a8f7f3fcSMatthias Ringwald /**
1318*a8f7f3fcSMatthias Ringwald   \brief   Unsigned Saturate
1319*a8f7f3fcSMatthias Ringwald   \details Saturates an unsigned value.
1320*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to be saturated
1321*a8f7f3fcSMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
1322*a8f7f3fcSMatthias Ringwald   \return             Saturated value
1323*a8f7f3fcSMatthias Ringwald  */
__USAT(int32_t val,uint32_t sat)1324*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1325*a8f7f3fcSMatthias Ringwald {
1326*a8f7f3fcSMatthias Ringwald   if (sat <= 31U)
1327*a8f7f3fcSMatthias Ringwald   {
1328*a8f7f3fcSMatthias Ringwald     const uint32_t max = ((1U << sat) - 1U);
1329*a8f7f3fcSMatthias Ringwald     if (val > (int32_t)max)
1330*a8f7f3fcSMatthias Ringwald     {
1331*a8f7f3fcSMatthias Ringwald       return max;
1332*a8f7f3fcSMatthias Ringwald     }
1333*a8f7f3fcSMatthias Ringwald     else if (val < 0)
1334*a8f7f3fcSMatthias Ringwald     {
1335*a8f7f3fcSMatthias Ringwald       return 0U;
1336*a8f7f3fcSMatthias Ringwald     }
1337*a8f7f3fcSMatthias Ringwald   }
1338*a8f7f3fcSMatthias Ringwald   return (uint32_t)val;
1339*a8f7f3fcSMatthias Ringwald }
1340*a8f7f3fcSMatthias Ringwald 
1341*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1342*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1343*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1344*a8f7f3fcSMatthias Ringwald 
1345*a8f7f3fcSMatthias Ringwald 
1346*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1347*a8f7f3fcSMatthias Ringwald      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1348*a8f7f3fcSMatthias Ringwald /**
1349*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire (8 bit)
1350*a8f7f3fcSMatthias Ringwald   \details Executes a LDAB instruction for 8 bit value.
1351*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1352*a8f7f3fcSMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1353*a8f7f3fcSMatthias Ringwald  */
__LDAB(volatile uint8_t * ptr)1354*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1355*a8f7f3fcSMatthias Ringwald {
1356*a8f7f3fcSMatthias Ringwald     uint32_t result;
1357*a8f7f3fcSMatthias Ringwald 
1358*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1359*a8f7f3fcSMatthias Ringwald    return ((uint8_t) result);
1360*a8f7f3fcSMatthias Ringwald }
1361*a8f7f3fcSMatthias Ringwald 
1362*a8f7f3fcSMatthias Ringwald 
1363*a8f7f3fcSMatthias Ringwald /**
1364*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire (16 bit)
1365*a8f7f3fcSMatthias Ringwald   \details Executes a LDAH instruction for 16 bit values.
1366*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1367*a8f7f3fcSMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1368*a8f7f3fcSMatthias Ringwald  */
__LDAH(volatile uint16_t * ptr)1369*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1370*a8f7f3fcSMatthias Ringwald {
1371*a8f7f3fcSMatthias Ringwald     uint32_t result;
1372*a8f7f3fcSMatthias Ringwald 
1373*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1374*a8f7f3fcSMatthias Ringwald    return ((uint16_t) result);
1375*a8f7f3fcSMatthias Ringwald }
1376*a8f7f3fcSMatthias Ringwald 
1377*a8f7f3fcSMatthias Ringwald 
1378*a8f7f3fcSMatthias Ringwald /**
1379*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire (32 bit)
1380*a8f7f3fcSMatthias Ringwald   \details Executes a LDA instruction for 32 bit values.
1381*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1382*a8f7f3fcSMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1383*a8f7f3fcSMatthias Ringwald  */
__LDA(volatile uint32_t * ptr)1384*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1385*a8f7f3fcSMatthias Ringwald {
1386*a8f7f3fcSMatthias Ringwald     uint32_t result;
1387*a8f7f3fcSMatthias Ringwald 
1388*a8f7f3fcSMatthias Ringwald    __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1389*a8f7f3fcSMatthias Ringwald    return(result);
1390*a8f7f3fcSMatthias Ringwald }
1391*a8f7f3fcSMatthias Ringwald 
1392*a8f7f3fcSMatthias Ringwald 
1393*a8f7f3fcSMatthias Ringwald /**
1394*a8f7f3fcSMatthias Ringwald   \brief   Store-Release (8 bit)
1395*a8f7f3fcSMatthias Ringwald   \details Executes a STLB instruction for 8 bit values.
1396*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1397*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1398*a8f7f3fcSMatthias Ringwald  */
__STLB(uint8_t value,volatile uint8_t * ptr)1399*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1400*a8f7f3fcSMatthias Ringwald {
1401*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1402*a8f7f3fcSMatthias Ringwald }
1403*a8f7f3fcSMatthias Ringwald 
1404*a8f7f3fcSMatthias Ringwald 
1405*a8f7f3fcSMatthias Ringwald /**
1406*a8f7f3fcSMatthias Ringwald   \brief   Store-Release (16 bit)
1407*a8f7f3fcSMatthias Ringwald   \details Executes a STLH instruction for 16 bit values.
1408*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1409*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1410*a8f7f3fcSMatthias Ringwald  */
__STLH(uint16_t value,volatile uint16_t * ptr)1411*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1412*a8f7f3fcSMatthias Ringwald {
1413*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1414*a8f7f3fcSMatthias Ringwald }
1415*a8f7f3fcSMatthias Ringwald 
1416*a8f7f3fcSMatthias Ringwald 
1417*a8f7f3fcSMatthias Ringwald /**
1418*a8f7f3fcSMatthias Ringwald   \brief   Store-Release (32 bit)
1419*a8f7f3fcSMatthias Ringwald   \details Executes a STL instruction for 32 bit values.
1420*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1421*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1422*a8f7f3fcSMatthias Ringwald  */
__STL(uint32_t value,volatile uint32_t * ptr)1423*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1424*a8f7f3fcSMatthias Ringwald {
1425*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1426*a8f7f3fcSMatthias Ringwald }
1427*a8f7f3fcSMatthias Ringwald 
1428*a8f7f3fcSMatthias Ringwald 
1429*a8f7f3fcSMatthias Ringwald /**
1430*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire Exclusive (8 bit)
1431*a8f7f3fcSMatthias Ringwald   \details Executes a LDAB exclusive instruction for 8 bit value.
1432*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1433*a8f7f3fcSMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1434*a8f7f3fcSMatthias Ringwald  */
__LDAEXB(volatile uint8_t * ptr)1435*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
1436*a8f7f3fcSMatthias Ringwald {
1437*a8f7f3fcSMatthias Ringwald     uint32_t result;
1438*a8f7f3fcSMatthias Ringwald 
1439*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
1440*a8f7f3fcSMatthias Ringwald    return ((uint8_t) result);
1441*a8f7f3fcSMatthias Ringwald }
1442*a8f7f3fcSMatthias Ringwald 
1443*a8f7f3fcSMatthias Ringwald 
1444*a8f7f3fcSMatthias Ringwald /**
1445*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire Exclusive (16 bit)
1446*a8f7f3fcSMatthias Ringwald   \details Executes a LDAH exclusive instruction for 16 bit values.
1447*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1448*a8f7f3fcSMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1449*a8f7f3fcSMatthias Ringwald  */
__LDAEXH(volatile uint16_t * ptr)1450*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
1451*a8f7f3fcSMatthias Ringwald {
1452*a8f7f3fcSMatthias Ringwald     uint32_t result;
1453*a8f7f3fcSMatthias Ringwald 
1454*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
1455*a8f7f3fcSMatthias Ringwald    return ((uint16_t) result);
1456*a8f7f3fcSMatthias Ringwald }
1457*a8f7f3fcSMatthias Ringwald 
1458*a8f7f3fcSMatthias Ringwald 
1459*a8f7f3fcSMatthias Ringwald /**
1460*a8f7f3fcSMatthias Ringwald   \brief   Load-Acquire Exclusive (32 bit)
1461*a8f7f3fcSMatthias Ringwald   \details Executes a LDA exclusive instruction for 32 bit values.
1462*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to data
1463*a8f7f3fcSMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1464*a8f7f3fcSMatthias Ringwald  */
__LDAEX(volatile uint32_t * ptr)1465*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
1466*a8f7f3fcSMatthias Ringwald {
1467*a8f7f3fcSMatthias Ringwald     uint32_t result;
1468*a8f7f3fcSMatthias Ringwald 
1469*a8f7f3fcSMatthias Ringwald    __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
1470*a8f7f3fcSMatthias Ringwald    return(result);
1471*a8f7f3fcSMatthias Ringwald }
1472*a8f7f3fcSMatthias Ringwald 
1473*a8f7f3fcSMatthias Ringwald 
1474*a8f7f3fcSMatthias Ringwald /**
1475*a8f7f3fcSMatthias Ringwald   \brief   Store-Release Exclusive (8 bit)
1476*a8f7f3fcSMatthias Ringwald   \details Executes a STLB exclusive instruction for 8 bit values.
1477*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1478*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1479*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1480*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1481*a8f7f3fcSMatthias Ringwald  */
__STLEXB(uint8_t value,volatile uint8_t * ptr)1482*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
1483*a8f7f3fcSMatthias Ringwald {
1484*a8f7f3fcSMatthias Ringwald    uint32_t result;
1485*a8f7f3fcSMatthias Ringwald 
1486*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1487*a8f7f3fcSMatthias Ringwald    return(result);
1488*a8f7f3fcSMatthias Ringwald }
1489*a8f7f3fcSMatthias Ringwald 
1490*a8f7f3fcSMatthias Ringwald 
1491*a8f7f3fcSMatthias Ringwald /**
1492*a8f7f3fcSMatthias Ringwald   \brief   Store-Release Exclusive (16 bit)
1493*a8f7f3fcSMatthias Ringwald   \details Executes a STLH exclusive instruction for 16 bit values.
1494*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1495*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1496*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1497*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1498*a8f7f3fcSMatthias Ringwald  */
__STLEXH(uint16_t value,volatile uint16_t * ptr)1499*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
1500*a8f7f3fcSMatthias Ringwald {
1501*a8f7f3fcSMatthias Ringwald    uint32_t result;
1502*a8f7f3fcSMatthias Ringwald 
1503*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1504*a8f7f3fcSMatthias Ringwald    return(result);
1505*a8f7f3fcSMatthias Ringwald }
1506*a8f7f3fcSMatthias Ringwald 
1507*a8f7f3fcSMatthias Ringwald 
1508*a8f7f3fcSMatthias Ringwald /**
1509*a8f7f3fcSMatthias Ringwald   \brief   Store-Release Exclusive (32 bit)
1510*a8f7f3fcSMatthias Ringwald   \details Executes a STL exclusive instruction for 32 bit values.
1511*a8f7f3fcSMatthias Ringwald   \param [in]  value  Value to store
1512*a8f7f3fcSMatthias Ringwald   \param [in]    ptr  Pointer to location
1513*a8f7f3fcSMatthias Ringwald   \return          0  Function succeeded
1514*a8f7f3fcSMatthias Ringwald   \return          1  Function failed
1515*a8f7f3fcSMatthias Ringwald  */
__STLEX(uint32_t value,volatile uint32_t * ptr)1516*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
1517*a8f7f3fcSMatthias Ringwald {
1518*a8f7f3fcSMatthias Ringwald    uint32_t result;
1519*a8f7f3fcSMatthias Ringwald 
1520*a8f7f3fcSMatthias Ringwald    __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
1521*a8f7f3fcSMatthias Ringwald    return(result);
1522*a8f7f3fcSMatthias Ringwald }
1523*a8f7f3fcSMatthias Ringwald 
1524*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1525*a8f7f3fcSMatthias Ringwald            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1526*a8f7f3fcSMatthias Ringwald 
1527*a8f7f3fcSMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1528*a8f7f3fcSMatthias Ringwald 
1529*a8f7f3fcSMatthias Ringwald 
1530*a8f7f3fcSMatthias Ringwald /* ###################  Compiler specific Intrinsics  ########################### */
1531*a8f7f3fcSMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1532*a8f7f3fcSMatthias Ringwald   Access to dedicated SIMD instructions
1533*a8f7f3fcSMatthias Ringwald   @{
1534*a8f7f3fcSMatthias Ringwald */
1535*a8f7f3fcSMatthias Ringwald 
1536*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1537*a8f7f3fcSMatthias Ringwald 
__SADD8(uint32_t op1,uint32_t op2)1538*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1539*a8f7f3fcSMatthias Ringwald {
1540*a8f7f3fcSMatthias Ringwald   uint32_t result;
1541*a8f7f3fcSMatthias Ringwald 
1542*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1543*a8f7f3fcSMatthias Ringwald   return(result);
1544*a8f7f3fcSMatthias Ringwald }
1545*a8f7f3fcSMatthias Ringwald 
__QADD8(uint32_t op1,uint32_t op2)1546*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1547*a8f7f3fcSMatthias Ringwald {
1548*a8f7f3fcSMatthias Ringwald   uint32_t result;
1549*a8f7f3fcSMatthias Ringwald 
1550*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1551*a8f7f3fcSMatthias Ringwald   return(result);
1552*a8f7f3fcSMatthias Ringwald }
1553*a8f7f3fcSMatthias Ringwald 
__SHADD8(uint32_t op1,uint32_t op2)1554*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1555*a8f7f3fcSMatthias Ringwald {
1556*a8f7f3fcSMatthias Ringwald   uint32_t result;
1557*a8f7f3fcSMatthias Ringwald 
1558*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1559*a8f7f3fcSMatthias Ringwald   return(result);
1560*a8f7f3fcSMatthias Ringwald }
1561*a8f7f3fcSMatthias Ringwald 
__UADD8(uint32_t op1,uint32_t op2)1562*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1563*a8f7f3fcSMatthias Ringwald {
1564*a8f7f3fcSMatthias Ringwald   uint32_t result;
1565*a8f7f3fcSMatthias Ringwald 
1566*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1567*a8f7f3fcSMatthias Ringwald   return(result);
1568*a8f7f3fcSMatthias Ringwald }
1569*a8f7f3fcSMatthias Ringwald 
__UQADD8(uint32_t op1,uint32_t op2)1570*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1571*a8f7f3fcSMatthias Ringwald {
1572*a8f7f3fcSMatthias Ringwald   uint32_t result;
1573*a8f7f3fcSMatthias Ringwald 
1574*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1575*a8f7f3fcSMatthias Ringwald   return(result);
1576*a8f7f3fcSMatthias Ringwald }
1577*a8f7f3fcSMatthias Ringwald 
__UHADD8(uint32_t op1,uint32_t op2)1578*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1579*a8f7f3fcSMatthias Ringwald {
1580*a8f7f3fcSMatthias Ringwald   uint32_t result;
1581*a8f7f3fcSMatthias Ringwald 
1582*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1583*a8f7f3fcSMatthias Ringwald   return(result);
1584*a8f7f3fcSMatthias Ringwald }
1585*a8f7f3fcSMatthias Ringwald 
1586*a8f7f3fcSMatthias Ringwald 
__SSUB8(uint32_t op1,uint32_t op2)1587*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1588*a8f7f3fcSMatthias Ringwald {
1589*a8f7f3fcSMatthias Ringwald   uint32_t result;
1590*a8f7f3fcSMatthias Ringwald 
1591*a8f7f3fcSMatthias Ringwald   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1592*a8f7f3fcSMatthias Ringwald   return(result);
1593*a8f7f3fcSMatthias Ringwald }
1594*a8f7f3fcSMatthias Ringwald 
__QSUB8(uint32_t op1,uint32_t op2)1595*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1596*a8f7f3fcSMatthias Ringwald {
1597*a8f7f3fcSMatthias Ringwald   uint32_t result;
1598*a8f7f3fcSMatthias Ringwald 
1599*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1600*a8f7f3fcSMatthias Ringwald   return(result);
1601*a8f7f3fcSMatthias Ringwald }
1602*a8f7f3fcSMatthias Ringwald 
__SHSUB8(uint32_t op1,uint32_t op2)1603*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1604*a8f7f3fcSMatthias Ringwald {
1605*a8f7f3fcSMatthias Ringwald   uint32_t result;
1606*a8f7f3fcSMatthias Ringwald 
1607*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1608*a8f7f3fcSMatthias Ringwald   return(result);
1609*a8f7f3fcSMatthias Ringwald }
1610*a8f7f3fcSMatthias Ringwald 
__USUB8(uint32_t op1,uint32_t op2)1611*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1612*a8f7f3fcSMatthias Ringwald {
1613*a8f7f3fcSMatthias Ringwald   uint32_t result;
1614*a8f7f3fcSMatthias Ringwald 
1615*a8f7f3fcSMatthias Ringwald   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1616*a8f7f3fcSMatthias Ringwald   return(result);
1617*a8f7f3fcSMatthias Ringwald }
1618*a8f7f3fcSMatthias Ringwald 
__UQSUB8(uint32_t op1,uint32_t op2)1619*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1620*a8f7f3fcSMatthias Ringwald {
1621*a8f7f3fcSMatthias Ringwald   uint32_t result;
1622*a8f7f3fcSMatthias Ringwald 
1623*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1624*a8f7f3fcSMatthias Ringwald   return(result);
1625*a8f7f3fcSMatthias Ringwald }
1626*a8f7f3fcSMatthias Ringwald 
__UHSUB8(uint32_t op1,uint32_t op2)1627*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1628*a8f7f3fcSMatthias Ringwald {
1629*a8f7f3fcSMatthias Ringwald   uint32_t result;
1630*a8f7f3fcSMatthias Ringwald 
1631*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1632*a8f7f3fcSMatthias Ringwald   return(result);
1633*a8f7f3fcSMatthias Ringwald }
1634*a8f7f3fcSMatthias Ringwald 
1635*a8f7f3fcSMatthias Ringwald 
__SADD16(uint32_t op1,uint32_t op2)1636*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1637*a8f7f3fcSMatthias Ringwald {
1638*a8f7f3fcSMatthias Ringwald   uint32_t result;
1639*a8f7f3fcSMatthias Ringwald 
1640*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1641*a8f7f3fcSMatthias Ringwald   return(result);
1642*a8f7f3fcSMatthias Ringwald }
1643*a8f7f3fcSMatthias Ringwald 
__QADD16(uint32_t op1,uint32_t op2)1644*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1645*a8f7f3fcSMatthias Ringwald {
1646*a8f7f3fcSMatthias Ringwald   uint32_t result;
1647*a8f7f3fcSMatthias Ringwald 
1648*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1649*a8f7f3fcSMatthias Ringwald   return(result);
1650*a8f7f3fcSMatthias Ringwald }
1651*a8f7f3fcSMatthias Ringwald 
__SHADD16(uint32_t op1,uint32_t op2)1652*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1653*a8f7f3fcSMatthias Ringwald {
1654*a8f7f3fcSMatthias Ringwald   uint32_t result;
1655*a8f7f3fcSMatthias Ringwald 
1656*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657*a8f7f3fcSMatthias Ringwald   return(result);
1658*a8f7f3fcSMatthias Ringwald }
1659*a8f7f3fcSMatthias Ringwald 
__UADD16(uint32_t op1,uint32_t op2)1660*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1661*a8f7f3fcSMatthias Ringwald {
1662*a8f7f3fcSMatthias Ringwald   uint32_t result;
1663*a8f7f3fcSMatthias Ringwald 
1664*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665*a8f7f3fcSMatthias Ringwald   return(result);
1666*a8f7f3fcSMatthias Ringwald }
1667*a8f7f3fcSMatthias Ringwald 
__UQADD16(uint32_t op1,uint32_t op2)1668*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1669*a8f7f3fcSMatthias Ringwald {
1670*a8f7f3fcSMatthias Ringwald   uint32_t result;
1671*a8f7f3fcSMatthias Ringwald 
1672*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673*a8f7f3fcSMatthias Ringwald   return(result);
1674*a8f7f3fcSMatthias Ringwald }
1675*a8f7f3fcSMatthias Ringwald 
__UHADD16(uint32_t op1,uint32_t op2)1676*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1677*a8f7f3fcSMatthias Ringwald {
1678*a8f7f3fcSMatthias Ringwald   uint32_t result;
1679*a8f7f3fcSMatthias Ringwald 
1680*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681*a8f7f3fcSMatthias Ringwald   return(result);
1682*a8f7f3fcSMatthias Ringwald }
1683*a8f7f3fcSMatthias Ringwald 
__SSUB16(uint32_t op1,uint32_t op2)1684*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1685*a8f7f3fcSMatthias Ringwald {
1686*a8f7f3fcSMatthias Ringwald   uint32_t result;
1687*a8f7f3fcSMatthias Ringwald 
1688*a8f7f3fcSMatthias Ringwald   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689*a8f7f3fcSMatthias Ringwald   return(result);
1690*a8f7f3fcSMatthias Ringwald }
1691*a8f7f3fcSMatthias Ringwald 
__QSUB16(uint32_t op1,uint32_t op2)1692*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1693*a8f7f3fcSMatthias Ringwald {
1694*a8f7f3fcSMatthias Ringwald   uint32_t result;
1695*a8f7f3fcSMatthias Ringwald 
1696*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697*a8f7f3fcSMatthias Ringwald   return(result);
1698*a8f7f3fcSMatthias Ringwald }
1699*a8f7f3fcSMatthias Ringwald 
__SHSUB16(uint32_t op1,uint32_t op2)1700*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1701*a8f7f3fcSMatthias Ringwald {
1702*a8f7f3fcSMatthias Ringwald   uint32_t result;
1703*a8f7f3fcSMatthias Ringwald 
1704*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705*a8f7f3fcSMatthias Ringwald   return(result);
1706*a8f7f3fcSMatthias Ringwald }
1707*a8f7f3fcSMatthias Ringwald 
__USUB16(uint32_t op1,uint32_t op2)1708*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1709*a8f7f3fcSMatthias Ringwald {
1710*a8f7f3fcSMatthias Ringwald   uint32_t result;
1711*a8f7f3fcSMatthias Ringwald 
1712*a8f7f3fcSMatthias Ringwald   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1713*a8f7f3fcSMatthias Ringwald   return(result);
1714*a8f7f3fcSMatthias Ringwald }
1715*a8f7f3fcSMatthias Ringwald 
__UQSUB16(uint32_t op1,uint32_t op2)1716*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1717*a8f7f3fcSMatthias Ringwald {
1718*a8f7f3fcSMatthias Ringwald   uint32_t result;
1719*a8f7f3fcSMatthias Ringwald 
1720*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1721*a8f7f3fcSMatthias Ringwald   return(result);
1722*a8f7f3fcSMatthias Ringwald }
1723*a8f7f3fcSMatthias Ringwald 
__UHSUB16(uint32_t op1,uint32_t op2)1724*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1725*a8f7f3fcSMatthias Ringwald {
1726*a8f7f3fcSMatthias Ringwald   uint32_t result;
1727*a8f7f3fcSMatthias Ringwald 
1728*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1729*a8f7f3fcSMatthias Ringwald   return(result);
1730*a8f7f3fcSMatthias Ringwald }
1731*a8f7f3fcSMatthias Ringwald 
__SASX(uint32_t op1,uint32_t op2)1732*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1733*a8f7f3fcSMatthias Ringwald {
1734*a8f7f3fcSMatthias Ringwald   uint32_t result;
1735*a8f7f3fcSMatthias Ringwald 
1736*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1737*a8f7f3fcSMatthias Ringwald   return(result);
1738*a8f7f3fcSMatthias Ringwald }
1739*a8f7f3fcSMatthias Ringwald 
__QASX(uint32_t op1,uint32_t op2)1740*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1741*a8f7f3fcSMatthias Ringwald {
1742*a8f7f3fcSMatthias Ringwald   uint32_t result;
1743*a8f7f3fcSMatthias Ringwald 
1744*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1745*a8f7f3fcSMatthias Ringwald   return(result);
1746*a8f7f3fcSMatthias Ringwald }
1747*a8f7f3fcSMatthias Ringwald 
__SHASX(uint32_t op1,uint32_t op2)1748*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1749*a8f7f3fcSMatthias Ringwald {
1750*a8f7f3fcSMatthias Ringwald   uint32_t result;
1751*a8f7f3fcSMatthias Ringwald 
1752*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1753*a8f7f3fcSMatthias Ringwald   return(result);
1754*a8f7f3fcSMatthias Ringwald }
1755*a8f7f3fcSMatthias Ringwald 
__UASX(uint32_t op1,uint32_t op2)1756*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1757*a8f7f3fcSMatthias Ringwald {
1758*a8f7f3fcSMatthias Ringwald   uint32_t result;
1759*a8f7f3fcSMatthias Ringwald 
1760*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1761*a8f7f3fcSMatthias Ringwald   return(result);
1762*a8f7f3fcSMatthias Ringwald }
1763*a8f7f3fcSMatthias Ringwald 
__UQASX(uint32_t op1,uint32_t op2)1764*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1765*a8f7f3fcSMatthias Ringwald {
1766*a8f7f3fcSMatthias Ringwald   uint32_t result;
1767*a8f7f3fcSMatthias Ringwald 
1768*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1769*a8f7f3fcSMatthias Ringwald   return(result);
1770*a8f7f3fcSMatthias Ringwald }
1771*a8f7f3fcSMatthias Ringwald 
__UHASX(uint32_t op1,uint32_t op2)1772*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1773*a8f7f3fcSMatthias Ringwald {
1774*a8f7f3fcSMatthias Ringwald   uint32_t result;
1775*a8f7f3fcSMatthias Ringwald 
1776*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1777*a8f7f3fcSMatthias Ringwald   return(result);
1778*a8f7f3fcSMatthias Ringwald }
1779*a8f7f3fcSMatthias Ringwald 
__SSAX(uint32_t op1,uint32_t op2)1780*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1781*a8f7f3fcSMatthias Ringwald {
1782*a8f7f3fcSMatthias Ringwald   uint32_t result;
1783*a8f7f3fcSMatthias Ringwald 
1784*a8f7f3fcSMatthias Ringwald   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1785*a8f7f3fcSMatthias Ringwald   return(result);
1786*a8f7f3fcSMatthias Ringwald }
1787*a8f7f3fcSMatthias Ringwald 
__QSAX(uint32_t op1,uint32_t op2)1788*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1789*a8f7f3fcSMatthias Ringwald {
1790*a8f7f3fcSMatthias Ringwald   uint32_t result;
1791*a8f7f3fcSMatthias Ringwald 
1792*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1793*a8f7f3fcSMatthias Ringwald   return(result);
1794*a8f7f3fcSMatthias Ringwald }
1795*a8f7f3fcSMatthias Ringwald 
__SHSAX(uint32_t op1,uint32_t op2)1796*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1797*a8f7f3fcSMatthias Ringwald {
1798*a8f7f3fcSMatthias Ringwald   uint32_t result;
1799*a8f7f3fcSMatthias Ringwald 
1800*a8f7f3fcSMatthias Ringwald   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1801*a8f7f3fcSMatthias Ringwald   return(result);
1802*a8f7f3fcSMatthias Ringwald }
1803*a8f7f3fcSMatthias Ringwald 
__USAX(uint32_t op1,uint32_t op2)1804*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1805*a8f7f3fcSMatthias Ringwald {
1806*a8f7f3fcSMatthias Ringwald   uint32_t result;
1807*a8f7f3fcSMatthias Ringwald 
1808*a8f7f3fcSMatthias Ringwald   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1809*a8f7f3fcSMatthias Ringwald   return(result);
1810*a8f7f3fcSMatthias Ringwald }
1811*a8f7f3fcSMatthias Ringwald 
__UQSAX(uint32_t op1,uint32_t op2)1812*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1813*a8f7f3fcSMatthias Ringwald {
1814*a8f7f3fcSMatthias Ringwald   uint32_t result;
1815*a8f7f3fcSMatthias Ringwald 
1816*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1817*a8f7f3fcSMatthias Ringwald   return(result);
1818*a8f7f3fcSMatthias Ringwald }
1819*a8f7f3fcSMatthias Ringwald 
__UHSAX(uint32_t op1,uint32_t op2)1820*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1821*a8f7f3fcSMatthias Ringwald {
1822*a8f7f3fcSMatthias Ringwald   uint32_t result;
1823*a8f7f3fcSMatthias Ringwald 
1824*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1825*a8f7f3fcSMatthias Ringwald   return(result);
1826*a8f7f3fcSMatthias Ringwald }
1827*a8f7f3fcSMatthias Ringwald 
__USAD8(uint32_t op1,uint32_t op2)1828*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1829*a8f7f3fcSMatthias Ringwald {
1830*a8f7f3fcSMatthias Ringwald   uint32_t result;
1831*a8f7f3fcSMatthias Ringwald 
1832*a8f7f3fcSMatthias Ringwald   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1833*a8f7f3fcSMatthias Ringwald   return(result);
1834*a8f7f3fcSMatthias Ringwald }
1835*a8f7f3fcSMatthias Ringwald 
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1836*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1837*a8f7f3fcSMatthias Ringwald {
1838*a8f7f3fcSMatthias Ringwald   uint32_t result;
1839*a8f7f3fcSMatthias Ringwald 
1840*a8f7f3fcSMatthias Ringwald   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1841*a8f7f3fcSMatthias Ringwald   return(result);
1842*a8f7f3fcSMatthias Ringwald }
1843*a8f7f3fcSMatthias Ringwald 
1844*a8f7f3fcSMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1845*a8f7f3fcSMatthias Ringwald ({                          \
1846*a8f7f3fcSMatthias Ringwald   int32_t __RES, __ARG1 = (ARG1); \
1847*a8f7f3fcSMatthias Ringwald   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1848*a8f7f3fcSMatthias Ringwald   __RES; \
1849*a8f7f3fcSMatthias Ringwald  })
1850*a8f7f3fcSMatthias Ringwald 
1851*a8f7f3fcSMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1852*a8f7f3fcSMatthias Ringwald ({                          \
1853*a8f7f3fcSMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1854*a8f7f3fcSMatthias Ringwald   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1855*a8f7f3fcSMatthias Ringwald   __RES; \
1856*a8f7f3fcSMatthias Ringwald  })
1857*a8f7f3fcSMatthias Ringwald 
__UXTB16(uint32_t op1)1858*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1859*a8f7f3fcSMatthias Ringwald {
1860*a8f7f3fcSMatthias Ringwald   uint32_t result;
1861*a8f7f3fcSMatthias Ringwald 
1862*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1863*a8f7f3fcSMatthias Ringwald   return(result);
1864*a8f7f3fcSMatthias Ringwald }
1865*a8f7f3fcSMatthias Ringwald 
__UXTAB16(uint32_t op1,uint32_t op2)1866*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1867*a8f7f3fcSMatthias Ringwald {
1868*a8f7f3fcSMatthias Ringwald   uint32_t result;
1869*a8f7f3fcSMatthias Ringwald 
1870*a8f7f3fcSMatthias Ringwald   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1871*a8f7f3fcSMatthias Ringwald   return(result);
1872*a8f7f3fcSMatthias Ringwald }
1873*a8f7f3fcSMatthias Ringwald 
__SXTB16(uint32_t op1)1874*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1875*a8f7f3fcSMatthias Ringwald {
1876*a8f7f3fcSMatthias Ringwald   uint32_t result;
1877*a8f7f3fcSMatthias Ringwald 
1878*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1879*a8f7f3fcSMatthias Ringwald   return(result);
1880*a8f7f3fcSMatthias Ringwald }
1881*a8f7f3fcSMatthias Ringwald 
__SXTAB16(uint32_t op1,uint32_t op2)1882*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1883*a8f7f3fcSMatthias Ringwald {
1884*a8f7f3fcSMatthias Ringwald   uint32_t result;
1885*a8f7f3fcSMatthias Ringwald 
1886*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1887*a8f7f3fcSMatthias Ringwald   return(result);
1888*a8f7f3fcSMatthias Ringwald }
1889*a8f7f3fcSMatthias Ringwald 
__SMUAD(uint32_t op1,uint32_t op2)1890*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1891*a8f7f3fcSMatthias Ringwald {
1892*a8f7f3fcSMatthias Ringwald   uint32_t result;
1893*a8f7f3fcSMatthias Ringwald 
1894*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1895*a8f7f3fcSMatthias Ringwald   return(result);
1896*a8f7f3fcSMatthias Ringwald }
1897*a8f7f3fcSMatthias Ringwald 
__SMUADX(uint32_t op1,uint32_t op2)1898*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1899*a8f7f3fcSMatthias Ringwald {
1900*a8f7f3fcSMatthias Ringwald   uint32_t result;
1901*a8f7f3fcSMatthias Ringwald 
1902*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1903*a8f7f3fcSMatthias Ringwald   return(result);
1904*a8f7f3fcSMatthias Ringwald }
1905*a8f7f3fcSMatthias Ringwald 
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1906*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1907*a8f7f3fcSMatthias Ringwald {
1908*a8f7f3fcSMatthias Ringwald   uint32_t result;
1909*a8f7f3fcSMatthias Ringwald 
1910*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1911*a8f7f3fcSMatthias Ringwald   return(result);
1912*a8f7f3fcSMatthias Ringwald }
1913*a8f7f3fcSMatthias Ringwald 
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1914*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1915*a8f7f3fcSMatthias Ringwald {
1916*a8f7f3fcSMatthias Ringwald   uint32_t result;
1917*a8f7f3fcSMatthias Ringwald 
1918*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1919*a8f7f3fcSMatthias Ringwald   return(result);
1920*a8f7f3fcSMatthias Ringwald }
1921*a8f7f3fcSMatthias Ringwald 
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1922*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1923*a8f7f3fcSMatthias Ringwald {
1924*a8f7f3fcSMatthias Ringwald   union llreg_u{
1925*a8f7f3fcSMatthias Ringwald     uint32_t w32[2];
1926*a8f7f3fcSMatthias Ringwald     uint64_t w64;
1927*a8f7f3fcSMatthias Ringwald   } llr;
1928*a8f7f3fcSMatthias Ringwald   llr.w64 = acc;
1929*a8f7f3fcSMatthias Ringwald 
1930*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1931*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1932*a8f7f3fcSMatthias Ringwald #else               /* Big endian */
1933*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1934*a8f7f3fcSMatthias Ringwald #endif
1935*a8f7f3fcSMatthias Ringwald 
1936*a8f7f3fcSMatthias Ringwald   return(llr.w64);
1937*a8f7f3fcSMatthias Ringwald }
1938*a8f7f3fcSMatthias Ringwald 
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1939*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1940*a8f7f3fcSMatthias Ringwald {
1941*a8f7f3fcSMatthias Ringwald   union llreg_u{
1942*a8f7f3fcSMatthias Ringwald     uint32_t w32[2];
1943*a8f7f3fcSMatthias Ringwald     uint64_t w64;
1944*a8f7f3fcSMatthias Ringwald   } llr;
1945*a8f7f3fcSMatthias Ringwald   llr.w64 = acc;
1946*a8f7f3fcSMatthias Ringwald 
1947*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1948*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1949*a8f7f3fcSMatthias Ringwald #else               /* Big endian */
1950*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1951*a8f7f3fcSMatthias Ringwald #endif
1952*a8f7f3fcSMatthias Ringwald 
1953*a8f7f3fcSMatthias Ringwald   return(llr.w64);
1954*a8f7f3fcSMatthias Ringwald }
1955*a8f7f3fcSMatthias Ringwald 
__SMUSD(uint32_t op1,uint32_t op2)1956*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1957*a8f7f3fcSMatthias Ringwald {
1958*a8f7f3fcSMatthias Ringwald   uint32_t result;
1959*a8f7f3fcSMatthias Ringwald 
1960*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1961*a8f7f3fcSMatthias Ringwald   return(result);
1962*a8f7f3fcSMatthias Ringwald }
1963*a8f7f3fcSMatthias Ringwald 
__SMUSDX(uint32_t op1,uint32_t op2)1964*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1965*a8f7f3fcSMatthias Ringwald {
1966*a8f7f3fcSMatthias Ringwald   uint32_t result;
1967*a8f7f3fcSMatthias Ringwald 
1968*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1969*a8f7f3fcSMatthias Ringwald   return(result);
1970*a8f7f3fcSMatthias Ringwald }
1971*a8f7f3fcSMatthias Ringwald 
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1972*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1973*a8f7f3fcSMatthias Ringwald {
1974*a8f7f3fcSMatthias Ringwald   uint32_t result;
1975*a8f7f3fcSMatthias Ringwald 
1976*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1977*a8f7f3fcSMatthias Ringwald   return(result);
1978*a8f7f3fcSMatthias Ringwald }
1979*a8f7f3fcSMatthias Ringwald 
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1980*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1981*a8f7f3fcSMatthias Ringwald {
1982*a8f7f3fcSMatthias Ringwald   uint32_t result;
1983*a8f7f3fcSMatthias Ringwald 
1984*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1985*a8f7f3fcSMatthias Ringwald   return(result);
1986*a8f7f3fcSMatthias Ringwald }
1987*a8f7f3fcSMatthias Ringwald 
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1988*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1989*a8f7f3fcSMatthias Ringwald {
1990*a8f7f3fcSMatthias Ringwald   union llreg_u{
1991*a8f7f3fcSMatthias Ringwald     uint32_t w32[2];
1992*a8f7f3fcSMatthias Ringwald     uint64_t w64;
1993*a8f7f3fcSMatthias Ringwald   } llr;
1994*a8f7f3fcSMatthias Ringwald   llr.w64 = acc;
1995*a8f7f3fcSMatthias Ringwald 
1996*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1997*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1998*a8f7f3fcSMatthias Ringwald #else               /* Big endian */
1999*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2000*a8f7f3fcSMatthias Ringwald #endif
2001*a8f7f3fcSMatthias Ringwald 
2002*a8f7f3fcSMatthias Ringwald   return(llr.w64);
2003*a8f7f3fcSMatthias Ringwald }
2004*a8f7f3fcSMatthias Ringwald 
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)2005*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2006*a8f7f3fcSMatthias Ringwald {
2007*a8f7f3fcSMatthias Ringwald   union llreg_u{
2008*a8f7f3fcSMatthias Ringwald     uint32_t w32[2];
2009*a8f7f3fcSMatthias Ringwald     uint64_t w64;
2010*a8f7f3fcSMatthias Ringwald   } llr;
2011*a8f7f3fcSMatthias Ringwald   llr.w64 = acc;
2012*a8f7f3fcSMatthias Ringwald 
2013*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
2014*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2015*a8f7f3fcSMatthias Ringwald #else               /* Big endian */
2016*a8f7f3fcSMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2017*a8f7f3fcSMatthias Ringwald #endif
2018*a8f7f3fcSMatthias Ringwald 
2019*a8f7f3fcSMatthias Ringwald   return(llr.w64);
2020*a8f7f3fcSMatthias Ringwald }
2021*a8f7f3fcSMatthias Ringwald 
__SEL(uint32_t op1,uint32_t op2)2022*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
2023*a8f7f3fcSMatthias Ringwald {
2024*a8f7f3fcSMatthias Ringwald   uint32_t result;
2025*a8f7f3fcSMatthias Ringwald 
2026*a8f7f3fcSMatthias Ringwald   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2027*a8f7f3fcSMatthias Ringwald   return(result);
2028*a8f7f3fcSMatthias Ringwald }
2029*a8f7f3fcSMatthias Ringwald 
__QADD(int32_t op1,int32_t op2)2030*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
2031*a8f7f3fcSMatthias Ringwald {
2032*a8f7f3fcSMatthias Ringwald   int32_t result;
2033*a8f7f3fcSMatthias Ringwald 
2034*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2035*a8f7f3fcSMatthias Ringwald   return(result);
2036*a8f7f3fcSMatthias Ringwald }
2037*a8f7f3fcSMatthias Ringwald 
__QSUB(int32_t op1,int32_t op2)2038*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
2039*a8f7f3fcSMatthias Ringwald {
2040*a8f7f3fcSMatthias Ringwald   int32_t result;
2041*a8f7f3fcSMatthias Ringwald 
2042*a8f7f3fcSMatthias Ringwald   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2043*a8f7f3fcSMatthias Ringwald   return(result);
2044*a8f7f3fcSMatthias Ringwald }
2045*a8f7f3fcSMatthias Ringwald 
2046*a8f7f3fcSMatthias Ringwald #if 0
2047*a8f7f3fcSMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
2048*a8f7f3fcSMatthias Ringwald ({                          \
2049*a8f7f3fcSMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2050*a8f7f3fcSMatthias Ringwald   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2051*a8f7f3fcSMatthias Ringwald   __RES; \
2052*a8f7f3fcSMatthias Ringwald  })
2053*a8f7f3fcSMatthias Ringwald 
2054*a8f7f3fcSMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
2055*a8f7f3fcSMatthias Ringwald ({                          \
2056*a8f7f3fcSMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2057*a8f7f3fcSMatthias Ringwald   if (ARG3 == 0) \
2058*a8f7f3fcSMatthias Ringwald     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
2059*a8f7f3fcSMatthias Ringwald   else \
2060*a8f7f3fcSMatthias Ringwald     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2061*a8f7f3fcSMatthias Ringwald   __RES; \
2062*a8f7f3fcSMatthias Ringwald  })
2063*a8f7f3fcSMatthias Ringwald #endif
2064*a8f7f3fcSMatthias Ringwald 
2065*a8f7f3fcSMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
2066*a8f7f3fcSMatthias Ringwald                                            ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
2067*a8f7f3fcSMatthias Ringwald 
2068*a8f7f3fcSMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
2069*a8f7f3fcSMatthias Ringwald                                            ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
2070*a8f7f3fcSMatthias Ringwald 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)2071*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2072*a8f7f3fcSMatthias Ringwald {
2073*a8f7f3fcSMatthias Ringwald  int32_t result;
2074*a8f7f3fcSMatthias Ringwald 
2075*a8f7f3fcSMatthias Ringwald  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
2076*a8f7f3fcSMatthias Ringwald  return(result);
2077*a8f7f3fcSMatthias Ringwald }
2078*a8f7f3fcSMatthias Ringwald 
2079*a8f7f3fcSMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
2080*a8f7f3fcSMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
2081*a8f7f3fcSMatthias Ringwald 
2082*a8f7f3fcSMatthias Ringwald 
2083*a8f7f3fcSMatthias Ringwald #pragma GCC diagnostic pop
2084*a8f7f3fcSMatthias Ringwald 
2085*a8f7f3fcSMatthias Ringwald #endif /* __CMSIS_GCC_H */
2086