1*a8f7f3fcSMatthias Ringwald /**************************************************************************//**
2*a8f7f3fcSMatthias Ringwald * @file cmsis_armclang.h
3*a8f7f3fcSMatthias Ringwald * @brief CMSIS compiler armclang (Arm Compiler 6) header file
4*a8f7f3fcSMatthias Ringwald * @version V5.0.4
5*a8f7f3fcSMatthias Ringwald * @date 10. January 2018
6*a8f7f3fcSMatthias Ringwald ******************************************************************************/
7*a8f7f3fcSMatthias Ringwald /*
8*a8f7f3fcSMatthias Ringwald * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9*a8f7f3fcSMatthias Ringwald *
10*a8f7f3fcSMatthias Ringwald * SPDX-License-Identifier: Apache-2.0
11*a8f7f3fcSMatthias Ringwald *
12*a8f7f3fcSMatthias Ringwald * Licensed under the Apache License, Version 2.0 (the License); you may
13*a8f7f3fcSMatthias Ringwald * not use this file except in compliance with the License.
14*a8f7f3fcSMatthias Ringwald * You may obtain a copy of the License at
15*a8f7f3fcSMatthias Ringwald *
16*a8f7f3fcSMatthias Ringwald * www.apache.org/licenses/LICENSE-2.0
17*a8f7f3fcSMatthias Ringwald *
18*a8f7f3fcSMatthias Ringwald * Unless required by applicable law or agreed to in writing, software
19*a8f7f3fcSMatthias Ringwald * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20*a8f7f3fcSMatthias Ringwald * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21*a8f7f3fcSMatthias Ringwald * See the License for the specific language governing permissions and
22*a8f7f3fcSMatthias Ringwald * limitations under the License.
23*a8f7f3fcSMatthias Ringwald */
24*a8f7f3fcSMatthias Ringwald
25*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26*a8f7f3fcSMatthias Ringwald
27*a8f7f3fcSMatthias Ringwald #ifndef __CMSIS_ARMCLANG_H
28*a8f7f3fcSMatthias Ringwald #define __CMSIS_ARMCLANG_H
29*a8f7f3fcSMatthias Ringwald
30*a8f7f3fcSMatthias Ringwald #pragma clang system_header /* treat file as system include file */
31*a8f7f3fcSMatthias Ringwald
32*a8f7f3fcSMatthias Ringwald #ifndef __ARM_COMPAT_H
33*a8f7f3fcSMatthias Ringwald #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34*a8f7f3fcSMatthias Ringwald #endif
35*a8f7f3fcSMatthias Ringwald
36*a8f7f3fcSMatthias Ringwald /* CMSIS compiler specific defines */
37*a8f7f3fcSMatthias Ringwald #ifndef __ASM
38*a8f7f3fcSMatthias Ringwald #define __ASM __asm
39*a8f7f3fcSMatthias Ringwald #endif
40*a8f7f3fcSMatthias Ringwald #ifndef __INLINE
41*a8f7f3fcSMatthias Ringwald #define __INLINE __inline
42*a8f7f3fcSMatthias Ringwald #endif
43*a8f7f3fcSMatthias Ringwald #ifndef __STATIC_INLINE
44*a8f7f3fcSMatthias Ringwald #define __STATIC_INLINE static __inline
45*a8f7f3fcSMatthias Ringwald #endif
46*a8f7f3fcSMatthias Ringwald #ifndef __STATIC_FORCEINLINE
47*a8f7f3fcSMatthias Ringwald #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48*a8f7f3fcSMatthias Ringwald #endif
49*a8f7f3fcSMatthias Ringwald #ifndef __NO_RETURN
50*a8f7f3fcSMatthias Ringwald #define __NO_RETURN __attribute__((__noreturn__))
51*a8f7f3fcSMatthias Ringwald #endif
52*a8f7f3fcSMatthias Ringwald #ifndef __USED
53*a8f7f3fcSMatthias Ringwald #define __USED __attribute__((used))
54*a8f7f3fcSMatthias Ringwald #endif
55*a8f7f3fcSMatthias Ringwald #ifndef __WEAK
56*a8f7f3fcSMatthias Ringwald #define __WEAK __attribute__((weak))
57*a8f7f3fcSMatthias Ringwald #endif
58*a8f7f3fcSMatthias Ringwald #ifndef __PACKED
59*a8f7f3fcSMatthias Ringwald #define __PACKED __attribute__((packed, aligned(1)))
60*a8f7f3fcSMatthias Ringwald #endif
61*a8f7f3fcSMatthias Ringwald #ifndef __PACKED_STRUCT
62*a8f7f3fcSMatthias Ringwald #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63*a8f7f3fcSMatthias Ringwald #endif
64*a8f7f3fcSMatthias Ringwald #ifndef __PACKED_UNION
65*a8f7f3fcSMatthias Ringwald #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66*a8f7f3fcSMatthias Ringwald #endif
67*a8f7f3fcSMatthias Ringwald #ifndef __UNALIGNED_UINT32 /* deprecated */
68*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic push
69*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
70*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71*a8f7f3fcSMatthias Ringwald struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic pop
73*a8f7f3fcSMatthias Ringwald #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74*a8f7f3fcSMatthias Ringwald #endif
75*a8f7f3fcSMatthias Ringwald #ifndef __UNALIGNED_UINT16_WRITE
76*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic push
77*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
78*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79*a8f7f3fcSMatthias Ringwald __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic pop
81*a8f7f3fcSMatthias Ringwald #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82*a8f7f3fcSMatthias Ringwald #endif
83*a8f7f3fcSMatthias Ringwald #ifndef __UNALIGNED_UINT16_READ
84*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic push
85*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
86*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87*a8f7f3fcSMatthias Ringwald __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic pop
89*a8f7f3fcSMatthias Ringwald #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90*a8f7f3fcSMatthias Ringwald #endif
91*a8f7f3fcSMatthias Ringwald #ifndef __UNALIGNED_UINT32_WRITE
92*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic push
93*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
94*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95*a8f7f3fcSMatthias Ringwald __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic pop
97*a8f7f3fcSMatthias Ringwald #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98*a8f7f3fcSMatthias Ringwald #endif
99*a8f7f3fcSMatthias Ringwald #ifndef __UNALIGNED_UINT32_READ
100*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic push
101*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic ignored "-Wpacked"
102*a8f7f3fcSMatthias Ringwald /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103*a8f7f3fcSMatthias Ringwald __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104*a8f7f3fcSMatthias Ringwald #pragma clang diagnostic pop
105*a8f7f3fcSMatthias Ringwald #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106*a8f7f3fcSMatthias Ringwald #endif
107*a8f7f3fcSMatthias Ringwald #ifndef __ALIGNED
108*a8f7f3fcSMatthias Ringwald #define __ALIGNED(x) __attribute__((aligned(x)))
109*a8f7f3fcSMatthias Ringwald #endif
110*a8f7f3fcSMatthias Ringwald #ifndef __RESTRICT
111*a8f7f3fcSMatthias Ringwald #define __RESTRICT __restrict
112*a8f7f3fcSMatthias Ringwald #endif
113*a8f7f3fcSMatthias Ringwald
114*a8f7f3fcSMatthias Ringwald
115*a8f7f3fcSMatthias Ringwald /* ########################### Core Function Access ########################### */
116*a8f7f3fcSMatthias Ringwald /** \ingroup CMSIS_Core_FunctionInterface
117*a8f7f3fcSMatthias Ringwald \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
118*a8f7f3fcSMatthias Ringwald @{
119*a8f7f3fcSMatthias Ringwald */
120*a8f7f3fcSMatthias Ringwald
121*a8f7f3fcSMatthias Ringwald /**
122*a8f7f3fcSMatthias Ringwald \brief Enable IRQ Interrupts
123*a8f7f3fcSMatthias Ringwald \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
124*a8f7f3fcSMatthias Ringwald Can only be executed in Privileged modes.
125*a8f7f3fcSMatthias Ringwald */
126*a8f7f3fcSMatthias Ringwald /* intrinsic void __enable_irq(); see arm_compat.h */
127*a8f7f3fcSMatthias Ringwald
128*a8f7f3fcSMatthias Ringwald
129*a8f7f3fcSMatthias Ringwald /**
130*a8f7f3fcSMatthias Ringwald \brief Disable IRQ Interrupts
131*a8f7f3fcSMatthias Ringwald \details Disables IRQ interrupts by setting the I-bit in the CPSR.
132*a8f7f3fcSMatthias Ringwald Can only be executed in Privileged modes.
133*a8f7f3fcSMatthias Ringwald */
134*a8f7f3fcSMatthias Ringwald /* intrinsic void __disable_irq(); see arm_compat.h */
135*a8f7f3fcSMatthias Ringwald
136*a8f7f3fcSMatthias Ringwald
137*a8f7f3fcSMatthias Ringwald /**
138*a8f7f3fcSMatthias Ringwald \brief Get Control Register
139*a8f7f3fcSMatthias Ringwald \details Returns the content of the Control Register.
140*a8f7f3fcSMatthias Ringwald \return Control Register value
141*a8f7f3fcSMatthias Ringwald */
__get_CONTROL(void)142*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
143*a8f7f3fcSMatthias Ringwald {
144*a8f7f3fcSMatthias Ringwald uint32_t result;
145*a8f7f3fcSMatthias Ringwald
146*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, control" : "=r" (result) );
147*a8f7f3fcSMatthias Ringwald return(result);
148*a8f7f3fcSMatthias Ringwald }
149*a8f7f3fcSMatthias Ringwald
150*a8f7f3fcSMatthias Ringwald
151*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
152*a8f7f3fcSMatthias Ringwald /**
153*a8f7f3fcSMatthias Ringwald \brief Get Control Register (non-secure)
154*a8f7f3fcSMatthias Ringwald \details Returns the content of the non-secure Control Register when in secure mode.
155*a8f7f3fcSMatthias Ringwald \return non-secure Control Register value
156*a8f7f3fcSMatthias Ringwald */
__TZ_get_CONTROL_NS(void)157*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
158*a8f7f3fcSMatthias Ringwald {
159*a8f7f3fcSMatthias Ringwald uint32_t result;
160*a8f7f3fcSMatthias Ringwald
161*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
162*a8f7f3fcSMatthias Ringwald return(result);
163*a8f7f3fcSMatthias Ringwald }
164*a8f7f3fcSMatthias Ringwald #endif
165*a8f7f3fcSMatthias Ringwald
166*a8f7f3fcSMatthias Ringwald
167*a8f7f3fcSMatthias Ringwald /**
168*a8f7f3fcSMatthias Ringwald \brief Set Control Register
169*a8f7f3fcSMatthias Ringwald \details Writes the given value to the Control Register.
170*a8f7f3fcSMatthias Ringwald \param [in] control Control Register value to set
171*a8f7f3fcSMatthias Ringwald */
__set_CONTROL(uint32_t control)172*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
173*a8f7f3fcSMatthias Ringwald {
174*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
175*a8f7f3fcSMatthias Ringwald }
176*a8f7f3fcSMatthias Ringwald
177*a8f7f3fcSMatthias Ringwald
178*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
179*a8f7f3fcSMatthias Ringwald /**
180*a8f7f3fcSMatthias Ringwald \brief Set Control Register (non-secure)
181*a8f7f3fcSMatthias Ringwald \details Writes the given value to the non-secure Control Register when in secure state.
182*a8f7f3fcSMatthias Ringwald \param [in] control Control Register value to set
183*a8f7f3fcSMatthias Ringwald */
__TZ_set_CONTROL_NS(uint32_t control)184*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
185*a8f7f3fcSMatthias Ringwald {
186*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
187*a8f7f3fcSMatthias Ringwald }
188*a8f7f3fcSMatthias Ringwald #endif
189*a8f7f3fcSMatthias Ringwald
190*a8f7f3fcSMatthias Ringwald
191*a8f7f3fcSMatthias Ringwald /**
192*a8f7f3fcSMatthias Ringwald \brief Get IPSR Register
193*a8f7f3fcSMatthias Ringwald \details Returns the content of the IPSR Register.
194*a8f7f3fcSMatthias Ringwald \return IPSR Register value
195*a8f7f3fcSMatthias Ringwald */
__get_IPSR(void)196*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
197*a8f7f3fcSMatthias Ringwald {
198*a8f7f3fcSMatthias Ringwald uint32_t result;
199*a8f7f3fcSMatthias Ringwald
200*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
201*a8f7f3fcSMatthias Ringwald return(result);
202*a8f7f3fcSMatthias Ringwald }
203*a8f7f3fcSMatthias Ringwald
204*a8f7f3fcSMatthias Ringwald
205*a8f7f3fcSMatthias Ringwald /**
206*a8f7f3fcSMatthias Ringwald \brief Get APSR Register
207*a8f7f3fcSMatthias Ringwald \details Returns the content of the APSR Register.
208*a8f7f3fcSMatthias Ringwald \return APSR Register value
209*a8f7f3fcSMatthias Ringwald */
__get_APSR(void)210*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_APSR(void)
211*a8f7f3fcSMatthias Ringwald {
212*a8f7f3fcSMatthias Ringwald uint32_t result;
213*a8f7f3fcSMatthias Ringwald
214*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, apsr" : "=r" (result) );
215*a8f7f3fcSMatthias Ringwald return(result);
216*a8f7f3fcSMatthias Ringwald }
217*a8f7f3fcSMatthias Ringwald
218*a8f7f3fcSMatthias Ringwald
219*a8f7f3fcSMatthias Ringwald /**
220*a8f7f3fcSMatthias Ringwald \brief Get xPSR Register
221*a8f7f3fcSMatthias Ringwald \details Returns the content of the xPSR Register.
222*a8f7f3fcSMatthias Ringwald \return xPSR Register value
223*a8f7f3fcSMatthias Ringwald */
__get_xPSR(void)224*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
225*a8f7f3fcSMatthias Ringwald {
226*a8f7f3fcSMatthias Ringwald uint32_t result;
227*a8f7f3fcSMatthias Ringwald
228*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
229*a8f7f3fcSMatthias Ringwald return(result);
230*a8f7f3fcSMatthias Ringwald }
231*a8f7f3fcSMatthias Ringwald
232*a8f7f3fcSMatthias Ringwald
233*a8f7f3fcSMatthias Ringwald /**
234*a8f7f3fcSMatthias Ringwald \brief Get Process Stack Pointer
235*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Process Stack Pointer (PSP).
236*a8f7f3fcSMatthias Ringwald \return PSP Register value
237*a8f7f3fcSMatthias Ringwald */
__get_PSP(void)238*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSP(void)
239*a8f7f3fcSMatthias Ringwald {
240*a8f7f3fcSMatthias Ringwald uint32_t result;
241*a8f7f3fcSMatthias Ringwald
242*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, psp" : "=r" (result) );
243*a8f7f3fcSMatthias Ringwald return(result);
244*a8f7f3fcSMatthias Ringwald }
245*a8f7f3fcSMatthias Ringwald
246*a8f7f3fcSMatthias Ringwald
247*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
248*a8f7f3fcSMatthias Ringwald /**
249*a8f7f3fcSMatthias Ringwald \brief Get Process Stack Pointer (non-secure)
250*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
251*a8f7f3fcSMatthias Ringwald \return PSP Register value
252*a8f7f3fcSMatthias Ringwald */
__TZ_get_PSP_NS(void)253*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
254*a8f7f3fcSMatthias Ringwald {
255*a8f7f3fcSMatthias Ringwald uint32_t result;
256*a8f7f3fcSMatthias Ringwald
257*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
258*a8f7f3fcSMatthias Ringwald return(result);
259*a8f7f3fcSMatthias Ringwald }
260*a8f7f3fcSMatthias Ringwald #endif
261*a8f7f3fcSMatthias Ringwald
262*a8f7f3fcSMatthias Ringwald
263*a8f7f3fcSMatthias Ringwald /**
264*a8f7f3fcSMatthias Ringwald \brief Set Process Stack Pointer
265*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Process Stack Pointer (PSP).
266*a8f7f3fcSMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
267*a8f7f3fcSMatthias Ringwald */
__set_PSP(uint32_t topOfProcStack)268*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
269*a8f7f3fcSMatthias Ringwald {
270*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
271*a8f7f3fcSMatthias Ringwald }
272*a8f7f3fcSMatthias Ringwald
273*a8f7f3fcSMatthias Ringwald
274*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
275*a8f7f3fcSMatthias Ringwald /**
276*a8f7f3fcSMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
277*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
278*a8f7f3fcSMatthias Ringwald \param [in] topOfProcStack Process Stack Pointer value to set
279*a8f7f3fcSMatthias Ringwald */
__TZ_set_PSP_NS(uint32_t topOfProcStack)280*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
281*a8f7f3fcSMatthias Ringwald {
282*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
283*a8f7f3fcSMatthias Ringwald }
284*a8f7f3fcSMatthias Ringwald #endif
285*a8f7f3fcSMatthias Ringwald
286*a8f7f3fcSMatthias Ringwald
287*a8f7f3fcSMatthias Ringwald /**
288*a8f7f3fcSMatthias Ringwald \brief Get Main Stack Pointer
289*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Main Stack Pointer (MSP).
290*a8f7f3fcSMatthias Ringwald \return MSP Register value
291*a8f7f3fcSMatthias Ringwald */
__get_MSP(void)292*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSP(void)
293*a8f7f3fcSMatthias Ringwald {
294*a8f7f3fcSMatthias Ringwald uint32_t result;
295*a8f7f3fcSMatthias Ringwald
296*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, msp" : "=r" (result) );
297*a8f7f3fcSMatthias Ringwald return(result);
298*a8f7f3fcSMatthias Ringwald }
299*a8f7f3fcSMatthias Ringwald
300*a8f7f3fcSMatthias Ringwald
301*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
302*a8f7f3fcSMatthias Ringwald /**
303*a8f7f3fcSMatthias Ringwald \brief Get Main Stack Pointer (non-secure)
304*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
305*a8f7f3fcSMatthias Ringwald \return MSP Register value
306*a8f7f3fcSMatthias Ringwald */
__TZ_get_MSP_NS(void)307*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
308*a8f7f3fcSMatthias Ringwald {
309*a8f7f3fcSMatthias Ringwald uint32_t result;
310*a8f7f3fcSMatthias Ringwald
311*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
312*a8f7f3fcSMatthias Ringwald return(result);
313*a8f7f3fcSMatthias Ringwald }
314*a8f7f3fcSMatthias Ringwald #endif
315*a8f7f3fcSMatthias Ringwald
316*a8f7f3fcSMatthias Ringwald
317*a8f7f3fcSMatthias Ringwald /**
318*a8f7f3fcSMatthias Ringwald \brief Set Main Stack Pointer
319*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Main Stack Pointer (MSP).
320*a8f7f3fcSMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
321*a8f7f3fcSMatthias Ringwald */
__set_MSP(uint32_t topOfMainStack)322*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
323*a8f7f3fcSMatthias Ringwald {
324*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
325*a8f7f3fcSMatthias Ringwald }
326*a8f7f3fcSMatthias Ringwald
327*a8f7f3fcSMatthias Ringwald
328*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
329*a8f7f3fcSMatthias Ringwald /**
330*a8f7f3fcSMatthias Ringwald \brief Set Main Stack Pointer (non-secure)
331*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
332*a8f7f3fcSMatthias Ringwald \param [in] topOfMainStack Main Stack Pointer value to set
333*a8f7f3fcSMatthias Ringwald */
__TZ_set_MSP_NS(uint32_t topOfMainStack)334*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
335*a8f7f3fcSMatthias Ringwald {
336*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
337*a8f7f3fcSMatthias Ringwald }
338*a8f7f3fcSMatthias Ringwald #endif
339*a8f7f3fcSMatthias Ringwald
340*a8f7f3fcSMatthias Ringwald
341*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
342*a8f7f3fcSMatthias Ringwald /**
343*a8f7f3fcSMatthias Ringwald \brief Get Stack Pointer (non-secure)
344*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
345*a8f7f3fcSMatthias Ringwald \return SP Register value
346*a8f7f3fcSMatthias Ringwald */
__TZ_get_SP_NS(void)347*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
348*a8f7f3fcSMatthias Ringwald {
349*a8f7f3fcSMatthias Ringwald uint32_t result;
350*a8f7f3fcSMatthias Ringwald
351*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
352*a8f7f3fcSMatthias Ringwald return(result);
353*a8f7f3fcSMatthias Ringwald }
354*a8f7f3fcSMatthias Ringwald
355*a8f7f3fcSMatthias Ringwald
356*a8f7f3fcSMatthias Ringwald /**
357*a8f7f3fcSMatthias Ringwald \brief Set Stack Pointer (non-secure)
358*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
359*a8f7f3fcSMatthias Ringwald \param [in] topOfStack Stack Pointer value to set
360*a8f7f3fcSMatthias Ringwald */
__TZ_set_SP_NS(uint32_t topOfStack)361*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
362*a8f7f3fcSMatthias Ringwald {
363*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
364*a8f7f3fcSMatthias Ringwald }
365*a8f7f3fcSMatthias Ringwald #endif
366*a8f7f3fcSMatthias Ringwald
367*a8f7f3fcSMatthias Ringwald
368*a8f7f3fcSMatthias Ringwald /**
369*a8f7f3fcSMatthias Ringwald \brief Get Priority Mask
370*a8f7f3fcSMatthias Ringwald \details Returns the current state of the priority mask bit from the Priority Mask Register.
371*a8f7f3fcSMatthias Ringwald \return Priority Mask value
372*a8f7f3fcSMatthias Ringwald */
__get_PRIMASK(void)373*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
374*a8f7f3fcSMatthias Ringwald {
375*a8f7f3fcSMatthias Ringwald uint32_t result;
376*a8f7f3fcSMatthias Ringwald
377*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, primask" : "=r" (result) );
378*a8f7f3fcSMatthias Ringwald return(result);
379*a8f7f3fcSMatthias Ringwald }
380*a8f7f3fcSMatthias Ringwald
381*a8f7f3fcSMatthias Ringwald
382*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
383*a8f7f3fcSMatthias Ringwald /**
384*a8f7f3fcSMatthias Ringwald \brief Get Priority Mask (non-secure)
385*a8f7f3fcSMatthias Ringwald \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
386*a8f7f3fcSMatthias Ringwald \return Priority Mask value
387*a8f7f3fcSMatthias Ringwald */
__TZ_get_PRIMASK_NS(void)388*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
389*a8f7f3fcSMatthias Ringwald {
390*a8f7f3fcSMatthias Ringwald uint32_t result;
391*a8f7f3fcSMatthias Ringwald
392*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
393*a8f7f3fcSMatthias Ringwald return(result);
394*a8f7f3fcSMatthias Ringwald }
395*a8f7f3fcSMatthias Ringwald #endif
396*a8f7f3fcSMatthias Ringwald
397*a8f7f3fcSMatthias Ringwald
398*a8f7f3fcSMatthias Ringwald /**
399*a8f7f3fcSMatthias Ringwald \brief Set Priority Mask
400*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Priority Mask Register.
401*a8f7f3fcSMatthias Ringwald \param [in] priMask Priority Mask
402*a8f7f3fcSMatthias Ringwald */
__set_PRIMASK(uint32_t priMask)403*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
404*a8f7f3fcSMatthias Ringwald {
405*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
406*a8f7f3fcSMatthias Ringwald }
407*a8f7f3fcSMatthias Ringwald
408*a8f7f3fcSMatthias Ringwald
409*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
410*a8f7f3fcSMatthias Ringwald /**
411*a8f7f3fcSMatthias Ringwald \brief Set Priority Mask (non-secure)
412*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
413*a8f7f3fcSMatthias Ringwald \param [in] priMask Priority Mask
414*a8f7f3fcSMatthias Ringwald */
__TZ_set_PRIMASK_NS(uint32_t priMask)415*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
416*a8f7f3fcSMatthias Ringwald {
417*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
418*a8f7f3fcSMatthias Ringwald }
419*a8f7f3fcSMatthias Ringwald #endif
420*a8f7f3fcSMatthias Ringwald
421*a8f7f3fcSMatthias Ringwald
422*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
423*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
424*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
425*a8f7f3fcSMatthias Ringwald /**
426*a8f7f3fcSMatthias Ringwald \brief Enable FIQ
427*a8f7f3fcSMatthias Ringwald \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
428*a8f7f3fcSMatthias Ringwald Can only be executed in Privileged modes.
429*a8f7f3fcSMatthias Ringwald */
430*a8f7f3fcSMatthias Ringwald #define __enable_fault_irq __enable_fiq /* see arm_compat.h */
431*a8f7f3fcSMatthias Ringwald
432*a8f7f3fcSMatthias Ringwald
433*a8f7f3fcSMatthias Ringwald /**
434*a8f7f3fcSMatthias Ringwald \brief Disable FIQ
435*a8f7f3fcSMatthias Ringwald \details Disables FIQ interrupts by setting the F-bit in the CPSR.
436*a8f7f3fcSMatthias Ringwald Can only be executed in Privileged modes.
437*a8f7f3fcSMatthias Ringwald */
438*a8f7f3fcSMatthias Ringwald #define __disable_fault_irq __disable_fiq /* see arm_compat.h */
439*a8f7f3fcSMatthias Ringwald
440*a8f7f3fcSMatthias Ringwald
441*a8f7f3fcSMatthias Ringwald /**
442*a8f7f3fcSMatthias Ringwald \brief Get Base Priority
443*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Base Priority register.
444*a8f7f3fcSMatthias Ringwald \return Base Priority register value
445*a8f7f3fcSMatthias Ringwald */
__get_BASEPRI(void)446*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
447*a8f7f3fcSMatthias Ringwald {
448*a8f7f3fcSMatthias Ringwald uint32_t result;
449*a8f7f3fcSMatthias Ringwald
450*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, basepri" : "=r" (result) );
451*a8f7f3fcSMatthias Ringwald return(result);
452*a8f7f3fcSMatthias Ringwald }
453*a8f7f3fcSMatthias Ringwald
454*a8f7f3fcSMatthias Ringwald
455*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
456*a8f7f3fcSMatthias Ringwald /**
457*a8f7f3fcSMatthias Ringwald \brief Get Base Priority (non-secure)
458*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Base Priority register when in secure state.
459*a8f7f3fcSMatthias Ringwald \return Base Priority register value
460*a8f7f3fcSMatthias Ringwald */
__TZ_get_BASEPRI_NS(void)461*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
462*a8f7f3fcSMatthias Ringwald {
463*a8f7f3fcSMatthias Ringwald uint32_t result;
464*a8f7f3fcSMatthias Ringwald
465*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
466*a8f7f3fcSMatthias Ringwald return(result);
467*a8f7f3fcSMatthias Ringwald }
468*a8f7f3fcSMatthias Ringwald #endif
469*a8f7f3fcSMatthias Ringwald
470*a8f7f3fcSMatthias Ringwald
471*a8f7f3fcSMatthias Ringwald /**
472*a8f7f3fcSMatthias Ringwald \brief Set Base Priority
473*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Base Priority register.
474*a8f7f3fcSMatthias Ringwald \param [in] basePri Base Priority value to set
475*a8f7f3fcSMatthias Ringwald */
__set_BASEPRI(uint32_t basePri)476*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
477*a8f7f3fcSMatthias Ringwald {
478*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
479*a8f7f3fcSMatthias Ringwald }
480*a8f7f3fcSMatthias Ringwald
481*a8f7f3fcSMatthias Ringwald
482*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
483*a8f7f3fcSMatthias Ringwald /**
484*a8f7f3fcSMatthias Ringwald \brief Set Base Priority (non-secure)
485*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Base Priority register when in secure state.
486*a8f7f3fcSMatthias Ringwald \param [in] basePri Base Priority value to set
487*a8f7f3fcSMatthias Ringwald */
__TZ_set_BASEPRI_NS(uint32_t basePri)488*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
489*a8f7f3fcSMatthias Ringwald {
490*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
491*a8f7f3fcSMatthias Ringwald }
492*a8f7f3fcSMatthias Ringwald #endif
493*a8f7f3fcSMatthias Ringwald
494*a8f7f3fcSMatthias Ringwald
495*a8f7f3fcSMatthias Ringwald /**
496*a8f7f3fcSMatthias Ringwald \brief Set Base Priority with condition
497*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
498*a8f7f3fcSMatthias Ringwald or the new value increases the BASEPRI priority level.
499*a8f7f3fcSMatthias Ringwald \param [in] basePri Base Priority value to set
500*a8f7f3fcSMatthias Ringwald */
__set_BASEPRI_MAX(uint32_t basePri)501*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
502*a8f7f3fcSMatthias Ringwald {
503*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
504*a8f7f3fcSMatthias Ringwald }
505*a8f7f3fcSMatthias Ringwald
506*a8f7f3fcSMatthias Ringwald
507*a8f7f3fcSMatthias Ringwald /**
508*a8f7f3fcSMatthias Ringwald \brief Get Fault Mask
509*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Fault Mask register.
510*a8f7f3fcSMatthias Ringwald \return Fault Mask register value
511*a8f7f3fcSMatthias Ringwald */
__get_FAULTMASK(void)512*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
513*a8f7f3fcSMatthias Ringwald {
514*a8f7f3fcSMatthias Ringwald uint32_t result;
515*a8f7f3fcSMatthias Ringwald
516*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
517*a8f7f3fcSMatthias Ringwald return(result);
518*a8f7f3fcSMatthias Ringwald }
519*a8f7f3fcSMatthias Ringwald
520*a8f7f3fcSMatthias Ringwald
521*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
522*a8f7f3fcSMatthias Ringwald /**
523*a8f7f3fcSMatthias Ringwald \brief Get Fault Mask (non-secure)
524*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Fault Mask register when in secure state.
525*a8f7f3fcSMatthias Ringwald \return Fault Mask register value
526*a8f7f3fcSMatthias Ringwald */
__TZ_get_FAULTMASK_NS(void)527*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
528*a8f7f3fcSMatthias Ringwald {
529*a8f7f3fcSMatthias Ringwald uint32_t result;
530*a8f7f3fcSMatthias Ringwald
531*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
532*a8f7f3fcSMatthias Ringwald return(result);
533*a8f7f3fcSMatthias Ringwald }
534*a8f7f3fcSMatthias Ringwald #endif
535*a8f7f3fcSMatthias Ringwald
536*a8f7f3fcSMatthias Ringwald
537*a8f7f3fcSMatthias Ringwald /**
538*a8f7f3fcSMatthias Ringwald \brief Set Fault Mask
539*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Fault Mask register.
540*a8f7f3fcSMatthias Ringwald \param [in] faultMask Fault Mask value to set
541*a8f7f3fcSMatthias Ringwald */
__set_FAULTMASK(uint32_t faultMask)542*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
543*a8f7f3fcSMatthias Ringwald {
544*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
545*a8f7f3fcSMatthias Ringwald }
546*a8f7f3fcSMatthias Ringwald
547*a8f7f3fcSMatthias Ringwald
548*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
549*a8f7f3fcSMatthias Ringwald /**
550*a8f7f3fcSMatthias Ringwald \brief Set Fault Mask (non-secure)
551*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Fault Mask register when in secure state.
552*a8f7f3fcSMatthias Ringwald \param [in] faultMask Fault Mask value to set
553*a8f7f3fcSMatthias Ringwald */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)554*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
555*a8f7f3fcSMatthias Ringwald {
556*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
557*a8f7f3fcSMatthias Ringwald }
558*a8f7f3fcSMatthias Ringwald #endif
559*a8f7f3fcSMatthias Ringwald
560*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
561*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
562*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
563*a8f7f3fcSMatthias Ringwald
564*a8f7f3fcSMatthias Ringwald
565*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
566*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
567*a8f7f3fcSMatthias Ringwald
568*a8f7f3fcSMatthias Ringwald /**
569*a8f7f3fcSMatthias Ringwald \brief Get Process Stack Pointer Limit
570*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
571*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
572*a8f7f3fcSMatthias Ringwald mode.
573*a8f7f3fcSMatthias Ringwald
574*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
575*a8f7f3fcSMatthias Ringwald \return PSPLIM Register value
576*a8f7f3fcSMatthias Ringwald */
__get_PSPLIM(void)577*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
578*a8f7f3fcSMatthias Ringwald {
579*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
580*a8f7f3fcSMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
581*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
582*a8f7f3fcSMatthias Ringwald return 0U;
583*a8f7f3fcSMatthias Ringwald #else
584*a8f7f3fcSMatthias Ringwald uint32_t result;
585*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, psplim" : "=r" (result) );
586*a8f7f3fcSMatthias Ringwald return result;
587*a8f7f3fcSMatthias Ringwald #endif
588*a8f7f3fcSMatthias Ringwald }
589*a8f7f3fcSMatthias Ringwald
590*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
591*a8f7f3fcSMatthias Ringwald /**
592*a8f7f3fcSMatthias Ringwald \brief Get Process Stack Pointer Limit (non-secure)
593*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
594*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence zero is returned always in non-secure
595*a8f7f3fcSMatthias Ringwald mode.
596*a8f7f3fcSMatthias Ringwald
597*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
598*a8f7f3fcSMatthias Ringwald \return PSPLIM Register value
599*a8f7f3fcSMatthias Ringwald */
__TZ_get_PSPLIM_NS(void)600*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
601*a8f7f3fcSMatthias Ringwald {
602*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
603*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
604*a8f7f3fcSMatthias Ringwald return 0U;
605*a8f7f3fcSMatthias Ringwald #else
606*a8f7f3fcSMatthias Ringwald uint32_t result;
607*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
608*a8f7f3fcSMatthias Ringwald return result;
609*a8f7f3fcSMatthias Ringwald #endif
610*a8f7f3fcSMatthias Ringwald }
611*a8f7f3fcSMatthias Ringwald #endif
612*a8f7f3fcSMatthias Ringwald
613*a8f7f3fcSMatthias Ringwald
614*a8f7f3fcSMatthias Ringwald /**
615*a8f7f3fcSMatthias Ringwald \brief Set Process Stack Pointer Limit
616*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
617*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
618*a8f7f3fcSMatthias Ringwald mode.
619*a8f7f3fcSMatthias Ringwald
620*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
621*a8f7f3fcSMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
622*a8f7f3fcSMatthias Ringwald */
__set_PSPLIM(uint32_t ProcStackPtrLimit)623*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
624*a8f7f3fcSMatthias Ringwald {
625*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
626*a8f7f3fcSMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
627*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
628*a8f7f3fcSMatthias Ringwald (void)ProcStackPtrLimit;
629*a8f7f3fcSMatthias Ringwald #else
630*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
631*a8f7f3fcSMatthias Ringwald #endif
632*a8f7f3fcSMatthias Ringwald }
633*a8f7f3fcSMatthias Ringwald
634*a8f7f3fcSMatthias Ringwald
635*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
636*a8f7f3fcSMatthias Ringwald /**
637*a8f7f3fcSMatthias Ringwald \brief Set Process Stack Pointer (non-secure)
638*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
639*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored in non-secure
640*a8f7f3fcSMatthias Ringwald mode.
641*a8f7f3fcSMatthias Ringwald
642*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
643*a8f7f3fcSMatthias Ringwald \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
644*a8f7f3fcSMatthias Ringwald */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)645*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
646*a8f7f3fcSMatthias Ringwald {
647*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
648*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure PSPLIM is RAZ/WI
649*a8f7f3fcSMatthias Ringwald (void)ProcStackPtrLimit;
650*a8f7f3fcSMatthias Ringwald #else
651*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
652*a8f7f3fcSMatthias Ringwald #endif
653*a8f7f3fcSMatthias Ringwald }
654*a8f7f3fcSMatthias Ringwald #endif
655*a8f7f3fcSMatthias Ringwald
656*a8f7f3fcSMatthias Ringwald
657*a8f7f3fcSMatthias Ringwald /**
658*a8f7f3fcSMatthias Ringwald \brief Get Main Stack Pointer Limit
659*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
660*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
661*a8f7f3fcSMatthias Ringwald
662*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
663*a8f7f3fcSMatthias Ringwald \return MSPLIM Register value
664*a8f7f3fcSMatthias Ringwald */
__get_MSPLIM(void)665*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
666*a8f7f3fcSMatthias Ringwald {
667*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
668*a8f7f3fcSMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
669*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
670*a8f7f3fcSMatthias Ringwald return 0U;
671*a8f7f3fcSMatthias Ringwald #else
672*a8f7f3fcSMatthias Ringwald uint32_t result;
673*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, msplim" : "=r" (result) );
674*a8f7f3fcSMatthias Ringwald return result;
675*a8f7f3fcSMatthias Ringwald #endif
676*a8f7f3fcSMatthias Ringwald }
677*a8f7f3fcSMatthias Ringwald
678*a8f7f3fcSMatthias Ringwald
679*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
680*a8f7f3fcSMatthias Ringwald /**
681*a8f7f3fcSMatthias Ringwald \brief Get Main Stack Pointer Limit (non-secure)
682*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
683*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence zero is returned always.
684*a8f7f3fcSMatthias Ringwald
685*a8f7f3fcSMatthias Ringwald \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
686*a8f7f3fcSMatthias Ringwald \return MSPLIM Register value
687*a8f7f3fcSMatthias Ringwald */
__TZ_get_MSPLIM_NS(void)688*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
689*a8f7f3fcSMatthias Ringwald {
690*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
691*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
692*a8f7f3fcSMatthias Ringwald return 0U;
693*a8f7f3fcSMatthias Ringwald #else
694*a8f7f3fcSMatthias Ringwald uint32_t result;
695*a8f7f3fcSMatthias Ringwald __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
696*a8f7f3fcSMatthias Ringwald return result;
697*a8f7f3fcSMatthias Ringwald #endif
698*a8f7f3fcSMatthias Ringwald }
699*a8f7f3fcSMatthias Ringwald #endif
700*a8f7f3fcSMatthias Ringwald
701*a8f7f3fcSMatthias Ringwald
702*a8f7f3fcSMatthias Ringwald /**
703*a8f7f3fcSMatthias Ringwald \brief Set Main Stack Pointer Limit
704*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
705*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
706*a8f7f3fcSMatthias Ringwald
707*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
708*a8f7f3fcSMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
709*a8f7f3fcSMatthias Ringwald */
__set_MSPLIM(uint32_t MainStackPtrLimit)710*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
711*a8f7f3fcSMatthias Ringwald {
712*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
713*a8f7f3fcSMatthias Ringwald (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
714*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
715*a8f7f3fcSMatthias Ringwald (void)MainStackPtrLimit;
716*a8f7f3fcSMatthias Ringwald #else
717*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
718*a8f7f3fcSMatthias Ringwald #endif
719*a8f7f3fcSMatthias Ringwald }
720*a8f7f3fcSMatthias Ringwald
721*a8f7f3fcSMatthias Ringwald
722*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
723*a8f7f3fcSMatthias Ringwald /**
724*a8f7f3fcSMatthias Ringwald \brief Set Main Stack Pointer Limit (non-secure)
725*a8f7f3fcSMatthias Ringwald Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
726*a8f7f3fcSMatthias Ringwald Stack Pointer Limit register hence the write is silently ignored.
727*a8f7f3fcSMatthias Ringwald
728*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
729*a8f7f3fcSMatthias Ringwald \param [in] MainStackPtrLimit Main Stack Pointer value to set
730*a8f7f3fcSMatthias Ringwald */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)731*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
732*a8f7f3fcSMatthias Ringwald {
733*a8f7f3fcSMatthias Ringwald #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
734*a8f7f3fcSMatthias Ringwald // without main extensions, the non-secure MSPLIM is RAZ/WI
735*a8f7f3fcSMatthias Ringwald (void)MainStackPtrLimit;
736*a8f7f3fcSMatthias Ringwald #else
737*a8f7f3fcSMatthias Ringwald __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
738*a8f7f3fcSMatthias Ringwald #endif
739*a8f7f3fcSMatthias Ringwald }
740*a8f7f3fcSMatthias Ringwald #endif
741*a8f7f3fcSMatthias Ringwald
742*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
743*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
744*a8f7f3fcSMatthias Ringwald
745*a8f7f3fcSMatthias Ringwald /**
746*a8f7f3fcSMatthias Ringwald \brief Get FPSCR
747*a8f7f3fcSMatthias Ringwald \details Returns the current value of the Floating Point Status/Control register.
748*a8f7f3fcSMatthias Ringwald \return Floating Point Status/Control register value
749*a8f7f3fcSMatthias Ringwald */
750*a8f7f3fcSMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
751*a8f7f3fcSMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
752*a8f7f3fcSMatthias Ringwald #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
753*a8f7f3fcSMatthias Ringwald #else
754*a8f7f3fcSMatthias Ringwald #define __get_FPSCR() ((uint32_t)0U)
755*a8f7f3fcSMatthias Ringwald #endif
756*a8f7f3fcSMatthias Ringwald
757*a8f7f3fcSMatthias Ringwald /**
758*a8f7f3fcSMatthias Ringwald \brief Set FPSCR
759*a8f7f3fcSMatthias Ringwald \details Assigns the given value to the Floating Point Status/Control register.
760*a8f7f3fcSMatthias Ringwald \param [in] fpscr Floating Point Status/Control value to set
761*a8f7f3fcSMatthias Ringwald */
762*a8f7f3fcSMatthias Ringwald #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
763*a8f7f3fcSMatthias Ringwald (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
764*a8f7f3fcSMatthias Ringwald #define __set_FPSCR __builtin_arm_set_fpscr
765*a8f7f3fcSMatthias Ringwald #else
766*a8f7f3fcSMatthias Ringwald #define __set_FPSCR(x) ((void)(x))
767*a8f7f3fcSMatthias Ringwald #endif
768*a8f7f3fcSMatthias Ringwald
769*a8f7f3fcSMatthias Ringwald
770*a8f7f3fcSMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
771*a8f7f3fcSMatthias Ringwald
772*a8f7f3fcSMatthias Ringwald
773*a8f7f3fcSMatthias Ringwald /* ########################## Core Instruction Access ######################### */
774*a8f7f3fcSMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
775*a8f7f3fcSMatthias Ringwald Access to dedicated instructions
776*a8f7f3fcSMatthias Ringwald @{
777*a8f7f3fcSMatthias Ringwald */
778*a8f7f3fcSMatthias Ringwald
779*a8f7f3fcSMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
780*a8f7f3fcSMatthias Ringwald * For thumb1, use low register (r0-r7), specified by constraint "l"
781*a8f7f3fcSMatthias Ringwald * Otherwise, use general registers, specified by constraint "r" */
782*a8f7f3fcSMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
783*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
784*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
785*a8f7f3fcSMatthias Ringwald #else
786*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
787*a8f7f3fcSMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
788*a8f7f3fcSMatthias Ringwald #endif
789*a8f7f3fcSMatthias Ringwald
790*a8f7f3fcSMatthias Ringwald /**
791*a8f7f3fcSMatthias Ringwald \brief No Operation
792*a8f7f3fcSMatthias Ringwald \details No Operation does nothing. This instruction can be used for code alignment purposes.
793*a8f7f3fcSMatthias Ringwald */
794*a8f7f3fcSMatthias Ringwald #define __NOP __builtin_arm_nop
795*a8f7f3fcSMatthias Ringwald
796*a8f7f3fcSMatthias Ringwald /**
797*a8f7f3fcSMatthias Ringwald \brief Wait For Interrupt
798*a8f7f3fcSMatthias Ringwald \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
799*a8f7f3fcSMatthias Ringwald */
800*a8f7f3fcSMatthias Ringwald #define __WFI __builtin_arm_wfi
801*a8f7f3fcSMatthias Ringwald
802*a8f7f3fcSMatthias Ringwald
803*a8f7f3fcSMatthias Ringwald /**
804*a8f7f3fcSMatthias Ringwald \brief Wait For Event
805*a8f7f3fcSMatthias Ringwald \details Wait For Event is a hint instruction that permits the processor to enter
806*a8f7f3fcSMatthias Ringwald a low-power state until one of a number of events occurs.
807*a8f7f3fcSMatthias Ringwald */
808*a8f7f3fcSMatthias Ringwald #define __WFE __builtin_arm_wfe
809*a8f7f3fcSMatthias Ringwald
810*a8f7f3fcSMatthias Ringwald
811*a8f7f3fcSMatthias Ringwald /**
812*a8f7f3fcSMatthias Ringwald \brief Send Event
813*a8f7f3fcSMatthias Ringwald \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
814*a8f7f3fcSMatthias Ringwald */
815*a8f7f3fcSMatthias Ringwald #define __SEV __builtin_arm_sev
816*a8f7f3fcSMatthias Ringwald
817*a8f7f3fcSMatthias Ringwald
818*a8f7f3fcSMatthias Ringwald /**
819*a8f7f3fcSMatthias Ringwald \brief Instruction Synchronization Barrier
820*a8f7f3fcSMatthias Ringwald \details Instruction Synchronization Barrier flushes the pipeline in the processor,
821*a8f7f3fcSMatthias Ringwald so that all instructions following the ISB are fetched from cache or memory,
822*a8f7f3fcSMatthias Ringwald after the instruction has been completed.
823*a8f7f3fcSMatthias Ringwald */
824*a8f7f3fcSMatthias Ringwald #define __ISB() __builtin_arm_isb(0xF);
825*a8f7f3fcSMatthias Ringwald
826*a8f7f3fcSMatthias Ringwald /**
827*a8f7f3fcSMatthias Ringwald \brief Data Synchronization Barrier
828*a8f7f3fcSMatthias Ringwald \details Acts as a special kind of Data Memory Barrier.
829*a8f7f3fcSMatthias Ringwald It completes when all explicit memory accesses before this instruction complete.
830*a8f7f3fcSMatthias Ringwald */
831*a8f7f3fcSMatthias Ringwald #define __DSB() __builtin_arm_dsb(0xF);
832*a8f7f3fcSMatthias Ringwald
833*a8f7f3fcSMatthias Ringwald
834*a8f7f3fcSMatthias Ringwald /**
835*a8f7f3fcSMatthias Ringwald \brief Data Memory Barrier
836*a8f7f3fcSMatthias Ringwald \details Ensures the apparent order of the explicit memory operations before
837*a8f7f3fcSMatthias Ringwald and after the instruction, without ensuring their completion.
838*a8f7f3fcSMatthias Ringwald */
839*a8f7f3fcSMatthias Ringwald #define __DMB() __builtin_arm_dmb(0xF);
840*a8f7f3fcSMatthias Ringwald
841*a8f7f3fcSMatthias Ringwald
842*a8f7f3fcSMatthias Ringwald /**
843*a8f7f3fcSMatthias Ringwald \brief Reverse byte order (32 bit)
844*a8f7f3fcSMatthias Ringwald \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
845*a8f7f3fcSMatthias Ringwald \param [in] value Value to reverse
846*a8f7f3fcSMatthias Ringwald \return Reversed value
847*a8f7f3fcSMatthias Ringwald */
848*a8f7f3fcSMatthias Ringwald #define __REV(value) __builtin_bswap32(value)
849*a8f7f3fcSMatthias Ringwald
850*a8f7f3fcSMatthias Ringwald
851*a8f7f3fcSMatthias Ringwald /**
852*a8f7f3fcSMatthias Ringwald \brief Reverse byte order (16 bit)
853*a8f7f3fcSMatthias Ringwald \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
854*a8f7f3fcSMatthias Ringwald \param [in] value Value to reverse
855*a8f7f3fcSMatthias Ringwald \return Reversed value
856*a8f7f3fcSMatthias Ringwald */
857*a8f7f3fcSMatthias Ringwald #define __REV16(value) __ROR(__REV(value), 16)
858*a8f7f3fcSMatthias Ringwald
859*a8f7f3fcSMatthias Ringwald
860*a8f7f3fcSMatthias Ringwald /**
861*a8f7f3fcSMatthias Ringwald \brief Reverse byte order (16 bit)
862*a8f7f3fcSMatthias Ringwald \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
863*a8f7f3fcSMatthias Ringwald \param [in] value Value to reverse
864*a8f7f3fcSMatthias Ringwald \return Reversed value
865*a8f7f3fcSMatthias Ringwald */
866*a8f7f3fcSMatthias Ringwald #define __REVSH(value) (int16_t)__builtin_bswap16(value)
867*a8f7f3fcSMatthias Ringwald
868*a8f7f3fcSMatthias Ringwald
869*a8f7f3fcSMatthias Ringwald /**
870*a8f7f3fcSMatthias Ringwald \brief Rotate Right in unsigned value (32 bit)
871*a8f7f3fcSMatthias Ringwald \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
872*a8f7f3fcSMatthias Ringwald \param [in] op1 Value to rotate
873*a8f7f3fcSMatthias Ringwald \param [in] op2 Number of Bits to rotate
874*a8f7f3fcSMatthias Ringwald \return Rotated value
875*a8f7f3fcSMatthias Ringwald */
__ROR(uint32_t op1,uint32_t op2)876*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
877*a8f7f3fcSMatthias Ringwald {
878*a8f7f3fcSMatthias Ringwald op2 %= 32U;
879*a8f7f3fcSMatthias Ringwald if (op2 == 0U)
880*a8f7f3fcSMatthias Ringwald {
881*a8f7f3fcSMatthias Ringwald return op1;
882*a8f7f3fcSMatthias Ringwald }
883*a8f7f3fcSMatthias Ringwald return (op1 >> op2) | (op1 << (32U - op2));
884*a8f7f3fcSMatthias Ringwald }
885*a8f7f3fcSMatthias Ringwald
886*a8f7f3fcSMatthias Ringwald
887*a8f7f3fcSMatthias Ringwald /**
888*a8f7f3fcSMatthias Ringwald \brief Breakpoint
889*a8f7f3fcSMatthias Ringwald \details Causes the processor to enter Debug state.
890*a8f7f3fcSMatthias Ringwald Debug tools can use this to investigate system state when the instruction at a particular address is reached.
891*a8f7f3fcSMatthias Ringwald \param [in] value is ignored by the processor.
892*a8f7f3fcSMatthias Ringwald If required, a debugger can use it to store additional information about the breakpoint.
893*a8f7f3fcSMatthias Ringwald */
894*a8f7f3fcSMatthias Ringwald #define __BKPT(value) __ASM volatile ("bkpt "#value)
895*a8f7f3fcSMatthias Ringwald
896*a8f7f3fcSMatthias Ringwald
897*a8f7f3fcSMatthias Ringwald /**
898*a8f7f3fcSMatthias Ringwald \brief Reverse bit order of value
899*a8f7f3fcSMatthias Ringwald \details Reverses the bit order of the given value.
900*a8f7f3fcSMatthias Ringwald \param [in] value Value to reverse
901*a8f7f3fcSMatthias Ringwald \return Reversed value
902*a8f7f3fcSMatthias Ringwald */
903*a8f7f3fcSMatthias Ringwald #define __RBIT __builtin_arm_rbit
904*a8f7f3fcSMatthias Ringwald
905*a8f7f3fcSMatthias Ringwald /**
906*a8f7f3fcSMatthias Ringwald \brief Count leading zeros
907*a8f7f3fcSMatthias Ringwald \details Counts the number of leading zeros of a data value.
908*a8f7f3fcSMatthias Ringwald \param [in] value Value to count the leading zeros
909*a8f7f3fcSMatthias Ringwald \return number of leading zeros in value
910*a8f7f3fcSMatthias Ringwald */
911*a8f7f3fcSMatthias Ringwald #define __CLZ (uint8_t)__builtin_clz
912*a8f7f3fcSMatthias Ringwald
913*a8f7f3fcSMatthias Ringwald
914*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
915*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
916*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
917*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
918*a8f7f3fcSMatthias Ringwald /**
919*a8f7f3fcSMatthias Ringwald \brief LDR Exclusive (8 bit)
920*a8f7f3fcSMatthias Ringwald \details Executes a exclusive LDR instruction for 8 bit value.
921*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
922*a8f7f3fcSMatthias Ringwald \return value of type uint8_t at (*ptr)
923*a8f7f3fcSMatthias Ringwald */
924*a8f7f3fcSMatthias Ringwald #define __LDREXB (uint8_t)__builtin_arm_ldrex
925*a8f7f3fcSMatthias Ringwald
926*a8f7f3fcSMatthias Ringwald
927*a8f7f3fcSMatthias Ringwald /**
928*a8f7f3fcSMatthias Ringwald \brief LDR Exclusive (16 bit)
929*a8f7f3fcSMatthias Ringwald \details Executes a exclusive LDR instruction for 16 bit values.
930*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
931*a8f7f3fcSMatthias Ringwald \return value of type uint16_t at (*ptr)
932*a8f7f3fcSMatthias Ringwald */
933*a8f7f3fcSMatthias Ringwald #define __LDREXH (uint16_t)__builtin_arm_ldrex
934*a8f7f3fcSMatthias Ringwald
935*a8f7f3fcSMatthias Ringwald
936*a8f7f3fcSMatthias Ringwald /**
937*a8f7f3fcSMatthias Ringwald \brief LDR Exclusive (32 bit)
938*a8f7f3fcSMatthias Ringwald \details Executes a exclusive LDR instruction for 32 bit values.
939*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
940*a8f7f3fcSMatthias Ringwald \return value of type uint32_t at (*ptr)
941*a8f7f3fcSMatthias Ringwald */
942*a8f7f3fcSMatthias Ringwald #define __LDREXW (uint32_t)__builtin_arm_ldrex
943*a8f7f3fcSMatthias Ringwald
944*a8f7f3fcSMatthias Ringwald
945*a8f7f3fcSMatthias Ringwald /**
946*a8f7f3fcSMatthias Ringwald \brief STR Exclusive (8 bit)
947*a8f7f3fcSMatthias Ringwald \details Executes a exclusive STR instruction for 8 bit values.
948*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
949*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
950*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
951*a8f7f3fcSMatthias Ringwald \return 1 Function failed
952*a8f7f3fcSMatthias Ringwald */
953*a8f7f3fcSMatthias Ringwald #define __STREXB (uint32_t)__builtin_arm_strex
954*a8f7f3fcSMatthias Ringwald
955*a8f7f3fcSMatthias Ringwald
956*a8f7f3fcSMatthias Ringwald /**
957*a8f7f3fcSMatthias Ringwald \brief STR Exclusive (16 bit)
958*a8f7f3fcSMatthias Ringwald \details Executes a exclusive STR instruction for 16 bit values.
959*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
960*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
961*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
962*a8f7f3fcSMatthias Ringwald \return 1 Function failed
963*a8f7f3fcSMatthias Ringwald */
964*a8f7f3fcSMatthias Ringwald #define __STREXH (uint32_t)__builtin_arm_strex
965*a8f7f3fcSMatthias Ringwald
966*a8f7f3fcSMatthias Ringwald
967*a8f7f3fcSMatthias Ringwald /**
968*a8f7f3fcSMatthias Ringwald \brief STR Exclusive (32 bit)
969*a8f7f3fcSMatthias Ringwald \details Executes a exclusive STR instruction for 32 bit values.
970*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
971*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
972*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
973*a8f7f3fcSMatthias Ringwald \return 1 Function failed
974*a8f7f3fcSMatthias Ringwald */
975*a8f7f3fcSMatthias Ringwald #define __STREXW (uint32_t)__builtin_arm_strex
976*a8f7f3fcSMatthias Ringwald
977*a8f7f3fcSMatthias Ringwald
978*a8f7f3fcSMatthias Ringwald /**
979*a8f7f3fcSMatthias Ringwald \brief Remove the exclusive lock
980*a8f7f3fcSMatthias Ringwald \details Removes the exclusive lock which is created by LDREX.
981*a8f7f3fcSMatthias Ringwald */
982*a8f7f3fcSMatthias Ringwald #define __CLREX __builtin_arm_clrex
983*a8f7f3fcSMatthias Ringwald
984*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
985*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
986*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
987*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
988*a8f7f3fcSMatthias Ringwald
989*a8f7f3fcSMatthias Ringwald
990*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
991*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
992*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
993*a8f7f3fcSMatthias Ringwald
994*a8f7f3fcSMatthias Ringwald /**
995*a8f7f3fcSMatthias Ringwald \brief Signed Saturate
996*a8f7f3fcSMatthias Ringwald \details Saturates a signed value.
997*a8f7f3fcSMatthias Ringwald \param [in] value Value to be saturated
998*a8f7f3fcSMatthias Ringwald \param [in] sat Bit position to saturate to (1..32)
999*a8f7f3fcSMatthias Ringwald \return Saturated value
1000*a8f7f3fcSMatthias Ringwald */
1001*a8f7f3fcSMatthias Ringwald #define __SSAT __builtin_arm_ssat
1002*a8f7f3fcSMatthias Ringwald
1003*a8f7f3fcSMatthias Ringwald
1004*a8f7f3fcSMatthias Ringwald /**
1005*a8f7f3fcSMatthias Ringwald \brief Unsigned Saturate
1006*a8f7f3fcSMatthias Ringwald \details Saturates an unsigned value.
1007*a8f7f3fcSMatthias Ringwald \param [in] value Value to be saturated
1008*a8f7f3fcSMatthias Ringwald \param [in] sat Bit position to saturate to (0..31)
1009*a8f7f3fcSMatthias Ringwald \return Saturated value
1010*a8f7f3fcSMatthias Ringwald */
1011*a8f7f3fcSMatthias Ringwald #define __USAT __builtin_arm_usat
1012*a8f7f3fcSMatthias Ringwald
1013*a8f7f3fcSMatthias Ringwald
1014*a8f7f3fcSMatthias Ringwald /**
1015*a8f7f3fcSMatthias Ringwald \brief Rotate Right with Extend (32 bit)
1016*a8f7f3fcSMatthias Ringwald \details Moves each bit of a bitstring right by one bit.
1017*a8f7f3fcSMatthias Ringwald The carry input is shifted in at the left end of the bitstring.
1018*a8f7f3fcSMatthias Ringwald \param [in] value Value to rotate
1019*a8f7f3fcSMatthias Ringwald \return Rotated value
1020*a8f7f3fcSMatthias Ringwald */
__RRX(uint32_t value)1021*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1022*a8f7f3fcSMatthias Ringwald {
1023*a8f7f3fcSMatthias Ringwald uint32_t result;
1024*a8f7f3fcSMatthias Ringwald
1025*a8f7f3fcSMatthias Ringwald __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1026*a8f7f3fcSMatthias Ringwald return(result);
1027*a8f7f3fcSMatthias Ringwald }
1028*a8f7f3fcSMatthias Ringwald
1029*a8f7f3fcSMatthias Ringwald
1030*a8f7f3fcSMatthias Ringwald /**
1031*a8f7f3fcSMatthias Ringwald \brief LDRT Unprivileged (8 bit)
1032*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 8 bit value.
1033*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1034*a8f7f3fcSMatthias Ringwald \return value of type uint8_t at (*ptr)
1035*a8f7f3fcSMatthias Ringwald */
__LDRBT(volatile uint8_t * ptr)1036*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1037*a8f7f3fcSMatthias Ringwald {
1038*a8f7f3fcSMatthias Ringwald uint32_t result;
1039*a8f7f3fcSMatthias Ringwald
1040*a8f7f3fcSMatthias Ringwald __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1041*a8f7f3fcSMatthias Ringwald return ((uint8_t) result); /* Add explicit type cast here */
1042*a8f7f3fcSMatthias Ringwald }
1043*a8f7f3fcSMatthias Ringwald
1044*a8f7f3fcSMatthias Ringwald
1045*a8f7f3fcSMatthias Ringwald /**
1046*a8f7f3fcSMatthias Ringwald \brief LDRT Unprivileged (16 bit)
1047*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 16 bit values.
1048*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1049*a8f7f3fcSMatthias Ringwald \return value of type uint16_t at (*ptr)
1050*a8f7f3fcSMatthias Ringwald */
__LDRHT(volatile uint16_t * ptr)1051*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1052*a8f7f3fcSMatthias Ringwald {
1053*a8f7f3fcSMatthias Ringwald uint32_t result;
1054*a8f7f3fcSMatthias Ringwald
1055*a8f7f3fcSMatthias Ringwald __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1056*a8f7f3fcSMatthias Ringwald return ((uint16_t) result); /* Add explicit type cast here */
1057*a8f7f3fcSMatthias Ringwald }
1058*a8f7f3fcSMatthias Ringwald
1059*a8f7f3fcSMatthias Ringwald
1060*a8f7f3fcSMatthias Ringwald /**
1061*a8f7f3fcSMatthias Ringwald \brief LDRT Unprivileged (32 bit)
1062*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged LDRT instruction for 32 bit values.
1063*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1064*a8f7f3fcSMatthias Ringwald \return value of type uint32_t at (*ptr)
1065*a8f7f3fcSMatthias Ringwald */
__LDRT(volatile uint32_t * ptr)1066*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1067*a8f7f3fcSMatthias Ringwald {
1068*a8f7f3fcSMatthias Ringwald uint32_t result;
1069*a8f7f3fcSMatthias Ringwald
1070*a8f7f3fcSMatthias Ringwald __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1071*a8f7f3fcSMatthias Ringwald return(result);
1072*a8f7f3fcSMatthias Ringwald }
1073*a8f7f3fcSMatthias Ringwald
1074*a8f7f3fcSMatthias Ringwald
1075*a8f7f3fcSMatthias Ringwald /**
1076*a8f7f3fcSMatthias Ringwald \brief STRT Unprivileged (8 bit)
1077*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged STRT instruction for 8 bit values.
1078*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1079*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1080*a8f7f3fcSMatthias Ringwald */
__STRBT(uint8_t value,volatile uint8_t * ptr)1081*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1082*a8f7f3fcSMatthias Ringwald {
1083*a8f7f3fcSMatthias Ringwald __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1084*a8f7f3fcSMatthias Ringwald }
1085*a8f7f3fcSMatthias Ringwald
1086*a8f7f3fcSMatthias Ringwald
1087*a8f7f3fcSMatthias Ringwald /**
1088*a8f7f3fcSMatthias Ringwald \brief STRT Unprivileged (16 bit)
1089*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged STRT instruction for 16 bit values.
1090*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1091*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1092*a8f7f3fcSMatthias Ringwald */
__STRHT(uint16_t value,volatile uint16_t * ptr)1093*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1094*a8f7f3fcSMatthias Ringwald {
1095*a8f7f3fcSMatthias Ringwald __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1096*a8f7f3fcSMatthias Ringwald }
1097*a8f7f3fcSMatthias Ringwald
1098*a8f7f3fcSMatthias Ringwald
1099*a8f7f3fcSMatthias Ringwald /**
1100*a8f7f3fcSMatthias Ringwald \brief STRT Unprivileged (32 bit)
1101*a8f7f3fcSMatthias Ringwald \details Executes a Unprivileged STRT instruction for 32 bit values.
1102*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1103*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1104*a8f7f3fcSMatthias Ringwald */
__STRT(uint32_t value,volatile uint32_t * ptr)1105*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1106*a8f7f3fcSMatthias Ringwald {
1107*a8f7f3fcSMatthias Ringwald __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1108*a8f7f3fcSMatthias Ringwald }
1109*a8f7f3fcSMatthias Ringwald
1110*a8f7f3fcSMatthias Ringwald #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1111*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1112*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1113*a8f7f3fcSMatthias Ringwald
1114*a8f7f3fcSMatthias Ringwald /**
1115*a8f7f3fcSMatthias Ringwald \brief Signed Saturate
1116*a8f7f3fcSMatthias Ringwald \details Saturates a signed value.
1117*a8f7f3fcSMatthias Ringwald \param [in] value Value to be saturated
1118*a8f7f3fcSMatthias Ringwald \param [in] sat Bit position to saturate to (1..32)
1119*a8f7f3fcSMatthias Ringwald \return Saturated value
1120*a8f7f3fcSMatthias Ringwald */
__SSAT(int32_t val,uint32_t sat)1121*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1122*a8f7f3fcSMatthias Ringwald {
1123*a8f7f3fcSMatthias Ringwald if ((sat >= 1U) && (sat <= 32U))
1124*a8f7f3fcSMatthias Ringwald {
1125*a8f7f3fcSMatthias Ringwald const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1126*a8f7f3fcSMatthias Ringwald const int32_t min = -1 - max ;
1127*a8f7f3fcSMatthias Ringwald if (val > max)
1128*a8f7f3fcSMatthias Ringwald {
1129*a8f7f3fcSMatthias Ringwald return max;
1130*a8f7f3fcSMatthias Ringwald }
1131*a8f7f3fcSMatthias Ringwald else if (val < min)
1132*a8f7f3fcSMatthias Ringwald {
1133*a8f7f3fcSMatthias Ringwald return min;
1134*a8f7f3fcSMatthias Ringwald }
1135*a8f7f3fcSMatthias Ringwald }
1136*a8f7f3fcSMatthias Ringwald return val;
1137*a8f7f3fcSMatthias Ringwald }
1138*a8f7f3fcSMatthias Ringwald
1139*a8f7f3fcSMatthias Ringwald /**
1140*a8f7f3fcSMatthias Ringwald \brief Unsigned Saturate
1141*a8f7f3fcSMatthias Ringwald \details Saturates an unsigned value.
1142*a8f7f3fcSMatthias Ringwald \param [in] value Value to be saturated
1143*a8f7f3fcSMatthias Ringwald \param [in] sat Bit position to saturate to (0..31)
1144*a8f7f3fcSMatthias Ringwald \return Saturated value
1145*a8f7f3fcSMatthias Ringwald */
__USAT(int32_t val,uint32_t sat)1146*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1147*a8f7f3fcSMatthias Ringwald {
1148*a8f7f3fcSMatthias Ringwald if (sat <= 31U)
1149*a8f7f3fcSMatthias Ringwald {
1150*a8f7f3fcSMatthias Ringwald const uint32_t max = ((1U << sat) - 1U);
1151*a8f7f3fcSMatthias Ringwald if (val > (int32_t)max)
1152*a8f7f3fcSMatthias Ringwald {
1153*a8f7f3fcSMatthias Ringwald return max;
1154*a8f7f3fcSMatthias Ringwald }
1155*a8f7f3fcSMatthias Ringwald else if (val < 0)
1156*a8f7f3fcSMatthias Ringwald {
1157*a8f7f3fcSMatthias Ringwald return 0U;
1158*a8f7f3fcSMatthias Ringwald }
1159*a8f7f3fcSMatthias Ringwald }
1160*a8f7f3fcSMatthias Ringwald return (uint32_t)val;
1161*a8f7f3fcSMatthias Ringwald }
1162*a8f7f3fcSMatthias Ringwald
1163*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1164*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1165*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1166*a8f7f3fcSMatthias Ringwald
1167*a8f7f3fcSMatthias Ringwald
1168*a8f7f3fcSMatthias Ringwald #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1169*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1170*a8f7f3fcSMatthias Ringwald /**
1171*a8f7f3fcSMatthias Ringwald \brief Load-Acquire (8 bit)
1172*a8f7f3fcSMatthias Ringwald \details Executes a LDAB instruction for 8 bit value.
1173*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1174*a8f7f3fcSMatthias Ringwald \return value of type uint8_t at (*ptr)
1175*a8f7f3fcSMatthias Ringwald */
__LDAB(volatile uint8_t * ptr)1176*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1177*a8f7f3fcSMatthias Ringwald {
1178*a8f7f3fcSMatthias Ringwald uint32_t result;
1179*a8f7f3fcSMatthias Ringwald
1180*a8f7f3fcSMatthias Ringwald __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1181*a8f7f3fcSMatthias Ringwald return ((uint8_t) result);
1182*a8f7f3fcSMatthias Ringwald }
1183*a8f7f3fcSMatthias Ringwald
1184*a8f7f3fcSMatthias Ringwald
1185*a8f7f3fcSMatthias Ringwald /**
1186*a8f7f3fcSMatthias Ringwald \brief Load-Acquire (16 bit)
1187*a8f7f3fcSMatthias Ringwald \details Executes a LDAH instruction for 16 bit values.
1188*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1189*a8f7f3fcSMatthias Ringwald \return value of type uint16_t at (*ptr)
1190*a8f7f3fcSMatthias Ringwald */
__LDAH(volatile uint16_t * ptr)1191*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1192*a8f7f3fcSMatthias Ringwald {
1193*a8f7f3fcSMatthias Ringwald uint32_t result;
1194*a8f7f3fcSMatthias Ringwald
1195*a8f7f3fcSMatthias Ringwald __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1196*a8f7f3fcSMatthias Ringwald return ((uint16_t) result);
1197*a8f7f3fcSMatthias Ringwald }
1198*a8f7f3fcSMatthias Ringwald
1199*a8f7f3fcSMatthias Ringwald
1200*a8f7f3fcSMatthias Ringwald /**
1201*a8f7f3fcSMatthias Ringwald \brief Load-Acquire (32 bit)
1202*a8f7f3fcSMatthias Ringwald \details Executes a LDA instruction for 32 bit values.
1203*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1204*a8f7f3fcSMatthias Ringwald \return value of type uint32_t at (*ptr)
1205*a8f7f3fcSMatthias Ringwald */
__LDA(volatile uint32_t * ptr)1206*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1207*a8f7f3fcSMatthias Ringwald {
1208*a8f7f3fcSMatthias Ringwald uint32_t result;
1209*a8f7f3fcSMatthias Ringwald
1210*a8f7f3fcSMatthias Ringwald __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1211*a8f7f3fcSMatthias Ringwald return(result);
1212*a8f7f3fcSMatthias Ringwald }
1213*a8f7f3fcSMatthias Ringwald
1214*a8f7f3fcSMatthias Ringwald
1215*a8f7f3fcSMatthias Ringwald /**
1216*a8f7f3fcSMatthias Ringwald \brief Store-Release (8 bit)
1217*a8f7f3fcSMatthias Ringwald \details Executes a STLB instruction for 8 bit values.
1218*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1219*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1220*a8f7f3fcSMatthias Ringwald */
__STLB(uint8_t value,volatile uint8_t * ptr)1221*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1222*a8f7f3fcSMatthias Ringwald {
1223*a8f7f3fcSMatthias Ringwald __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1224*a8f7f3fcSMatthias Ringwald }
1225*a8f7f3fcSMatthias Ringwald
1226*a8f7f3fcSMatthias Ringwald
1227*a8f7f3fcSMatthias Ringwald /**
1228*a8f7f3fcSMatthias Ringwald \brief Store-Release (16 bit)
1229*a8f7f3fcSMatthias Ringwald \details Executes a STLH instruction for 16 bit values.
1230*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1231*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1232*a8f7f3fcSMatthias Ringwald */
__STLH(uint16_t value,volatile uint16_t * ptr)1233*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1234*a8f7f3fcSMatthias Ringwald {
1235*a8f7f3fcSMatthias Ringwald __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1236*a8f7f3fcSMatthias Ringwald }
1237*a8f7f3fcSMatthias Ringwald
1238*a8f7f3fcSMatthias Ringwald
1239*a8f7f3fcSMatthias Ringwald /**
1240*a8f7f3fcSMatthias Ringwald \brief Store-Release (32 bit)
1241*a8f7f3fcSMatthias Ringwald \details Executes a STL instruction for 32 bit values.
1242*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1243*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1244*a8f7f3fcSMatthias Ringwald */
__STL(uint32_t value,volatile uint32_t * ptr)1245*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1246*a8f7f3fcSMatthias Ringwald {
1247*a8f7f3fcSMatthias Ringwald __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1248*a8f7f3fcSMatthias Ringwald }
1249*a8f7f3fcSMatthias Ringwald
1250*a8f7f3fcSMatthias Ringwald
1251*a8f7f3fcSMatthias Ringwald /**
1252*a8f7f3fcSMatthias Ringwald \brief Load-Acquire Exclusive (8 bit)
1253*a8f7f3fcSMatthias Ringwald \details Executes a LDAB exclusive instruction for 8 bit value.
1254*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1255*a8f7f3fcSMatthias Ringwald \return value of type uint8_t at (*ptr)
1256*a8f7f3fcSMatthias Ringwald */
1257*a8f7f3fcSMatthias Ringwald #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1258*a8f7f3fcSMatthias Ringwald
1259*a8f7f3fcSMatthias Ringwald
1260*a8f7f3fcSMatthias Ringwald /**
1261*a8f7f3fcSMatthias Ringwald \brief Load-Acquire Exclusive (16 bit)
1262*a8f7f3fcSMatthias Ringwald \details Executes a LDAH exclusive instruction for 16 bit values.
1263*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1264*a8f7f3fcSMatthias Ringwald \return value of type uint16_t at (*ptr)
1265*a8f7f3fcSMatthias Ringwald */
1266*a8f7f3fcSMatthias Ringwald #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1267*a8f7f3fcSMatthias Ringwald
1268*a8f7f3fcSMatthias Ringwald
1269*a8f7f3fcSMatthias Ringwald /**
1270*a8f7f3fcSMatthias Ringwald \brief Load-Acquire Exclusive (32 bit)
1271*a8f7f3fcSMatthias Ringwald \details Executes a LDA exclusive instruction for 32 bit values.
1272*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to data
1273*a8f7f3fcSMatthias Ringwald \return value of type uint32_t at (*ptr)
1274*a8f7f3fcSMatthias Ringwald */
1275*a8f7f3fcSMatthias Ringwald #define __LDAEX (uint32_t)__builtin_arm_ldaex
1276*a8f7f3fcSMatthias Ringwald
1277*a8f7f3fcSMatthias Ringwald
1278*a8f7f3fcSMatthias Ringwald /**
1279*a8f7f3fcSMatthias Ringwald \brief Store-Release Exclusive (8 bit)
1280*a8f7f3fcSMatthias Ringwald \details Executes a STLB exclusive instruction for 8 bit values.
1281*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1282*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1283*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
1284*a8f7f3fcSMatthias Ringwald \return 1 Function failed
1285*a8f7f3fcSMatthias Ringwald */
1286*a8f7f3fcSMatthias Ringwald #define __STLEXB (uint32_t)__builtin_arm_stlex
1287*a8f7f3fcSMatthias Ringwald
1288*a8f7f3fcSMatthias Ringwald
1289*a8f7f3fcSMatthias Ringwald /**
1290*a8f7f3fcSMatthias Ringwald \brief Store-Release Exclusive (16 bit)
1291*a8f7f3fcSMatthias Ringwald \details Executes a STLH exclusive instruction for 16 bit values.
1292*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1293*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1294*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
1295*a8f7f3fcSMatthias Ringwald \return 1 Function failed
1296*a8f7f3fcSMatthias Ringwald */
1297*a8f7f3fcSMatthias Ringwald #define __STLEXH (uint32_t)__builtin_arm_stlex
1298*a8f7f3fcSMatthias Ringwald
1299*a8f7f3fcSMatthias Ringwald
1300*a8f7f3fcSMatthias Ringwald /**
1301*a8f7f3fcSMatthias Ringwald \brief Store-Release Exclusive (32 bit)
1302*a8f7f3fcSMatthias Ringwald \details Executes a STL exclusive instruction for 32 bit values.
1303*a8f7f3fcSMatthias Ringwald \param [in] value Value to store
1304*a8f7f3fcSMatthias Ringwald \param [in] ptr Pointer to location
1305*a8f7f3fcSMatthias Ringwald \return 0 Function succeeded
1306*a8f7f3fcSMatthias Ringwald \return 1 Function failed
1307*a8f7f3fcSMatthias Ringwald */
1308*a8f7f3fcSMatthias Ringwald #define __STLEX (uint32_t)__builtin_arm_stlex
1309*a8f7f3fcSMatthias Ringwald
1310*a8f7f3fcSMatthias Ringwald #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1311*a8f7f3fcSMatthias Ringwald (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1312*a8f7f3fcSMatthias Ringwald
1313*a8f7f3fcSMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1314*a8f7f3fcSMatthias Ringwald
1315*a8f7f3fcSMatthias Ringwald
1316*a8f7f3fcSMatthias Ringwald /* ################### Compiler specific Intrinsics ########################### */
1317*a8f7f3fcSMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1318*a8f7f3fcSMatthias Ringwald Access to dedicated SIMD instructions
1319*a8f7f3fcSMatthias Ringwald @{
1320*a8f7f3fcSMatthias Ringwald */
1321*a8f7f3fcSMatthias Ringwald
1322*a8f7f3fcSMatthias Ringwald #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1323*a8f7f3fcSMatthias Ringwald
__SADD8(uint32_t op1,uint32_t op2)1324*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1325*a8f7f3fcSMatthias Ringwald {
1326*a8f7f3fcSMatthias Ringwald uint32_t result;
1327*a8f7f3fcSMatthias Ringwald
1328*a8f7f3fcSMatthias Ringwald __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1329*a8f7f3fcSMatthias Ringwald return(result);
1330*a8f7f3fcSMatthias Ringwald }
1331*a8f7f3fcSMatthias Ringwald
__QADD8(uint32_t op1,uint32_t op2)1332*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1333*a8f7f3fcSMatthias Ringwald {
1334*a8f7f3fcSMatthias Ringwald uint32_t result;
1335*a8f7f3fcSMatthias Ringwald
1336*a8f7f3fcSMatthias Ringwald __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1337*a8f7f3fcSMatthias Ringwald return(result);
1338*a8f7f3fcSMatthias Ringwald }
1339*a8f7f3fcSMatthias Ringwald
__SHADD8(uint32_t op1,uint32_t op2)1340*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1341*a8f7f3fcSMatthias Ringwald {
1342*a8f7f3fcSMatthias Ringwald uint32_t result;
1343*a8f7f3fcSMatthias Ringwald
1344*a8f7f3fcSMatthias Ringwald __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1345*a8f7f3fcSMatthias Ringwald return(result);
1346*a8f7f3fcSMatthias Ringwald }
1347*a8f7f3fcSMatthias Ringwald
__UADD8(uint32_t op1,uint32_t op2)1348*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1349*a8f7f3fcSMatthias Ringwald {
1350*a8f7f3fcSMatthias Ringwald uint32_t result;
1351*a8f7f3fcSMatthias Ringwald
1352*a8f7f3fcSMatthias Ringwald __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1353*a8f7f3fcSMatthias Ringwald return(result);
1354*a8f7f3fcSMatthias Ringwald }
1355*a8f7f3fcSMatthias Ringwald
__UQADD8(uint32_t op1,uint32_t op2)1356*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1357*a8f7f3fcSMatthias Ringwald {
1358*a8f7f3fcSMatthias Ringwald uint32_t result;
1359*a8f7f3fcSMatthias Ringwald
1360*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1361*a8f7f3fcSMatthias Ringwald return(result);
1362*a8f7f3fcSMatthias Ringwald }
1363*a8f7f3fcSMatthias Ringwald
__UHADD8(uint32_t op1,uint32_t op2)1364*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1365*a8f7f3fcSMatthias Ringwald {
1366*a8f7f3fcSMatthias Ringwald uint32_t result;
1367*a8f7f3fcSMatthias Ringwald
1368*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1369*a8f7f3fcSMatthias Ringwald return(result);
1370*a8f7f3fcSMatthias Ringwald }
1371*a8f7f3fcSMatthias Ringwald
1372*a8f7f3fcSMatthias Ringwald
__SSUB8(uint32_t op1,uint32_t op2)1373*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1374*a8f7f3fcSMatthias Ringwald {
1375*a8f7f3fcSMatthias Ringwald uint32_t result;
1376*a8f7f3fcSMatthias Ringwald
1377*a8f7f3fcSMatthias Ringwald __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1378*a8f7f3fcSMatthias Ringwald return(result);
1379*a8f7f3fcSMatthias Ringwald }
1380*a8f7f3fcSMatthias Ringwald
__QSUB8(uint32_t op1,uint32_t op2)1381*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1382*a8f7f3fcSMatthias Ringwald {
1383*a8f7f3fcSMatthias Ringwald uint32_t result;
1384*a8f7f3fcSMatthias Ringwald
1385*a8f7f3fcSMatthias Ringwald __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1386*a8f7f3fcSMatthias Ringwald return(result);
1387*a8f7f3fcSMatthias Ringwald }
1388*a8f7f3fcSMatthias Ringwald
__SHSUB8(uint32_t op1,uint32_t op2)1389*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1390*a8f7f3fcSMatthias Ringwald {
1391*a8f7f3fcSMatthias Ringwald uint32_t result;
1392*a8f7f3fcSMatthias Ringwald
1393*a8f7f3fcSMatthias Ringwald __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1394*a8f7f3fcSMatthias Ringwald return(result);
1395*a8f7f3fcSMatthias Ringwald }
1396*a8f7f3fcSMatthias Ringwald
__USUB8(uint32_t op1,uint32_t op2)1397*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1398*a8f7f3fcSMatthias Ringwald {
1399*a8f7f3fcSMatthias Ringwald uint32_t result;
1400*a8f7f3fcSMatthias Ringwald
1401*a8f7f3fcSMatthias Ringwald __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1402*a8f7f3fcSMatthias Ringwald return(result);
1403*a8f7f3fcSMatthias Ringwald }
1404*a8f7f3fcSMatthias Ringwald
__UQSUB8(uint32_t op1,uint32_t op2)1405*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1406*a8f7f3fcSMatthias Ringwald {
1407*a8f7f3fcSMatthias Ringwald uint32_t result;
1408*a8f7f3fcSMatthias Ringwald
1409*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1410*a8f7f3fcSMatthias Ringwald return(result);
1411*a8f7f3fcSMatthias Ringwald }
1412*a8f7f3fcSMatthias Ringwald
__UHSUB8(uint32_t op1,uint32_t op2)1413*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1414*a8f7f3fcSMatthias Ringwald {
1415*a8f7f3fcSMatthias Ringwald uint32_t result;
1416*a8f7f3fcSMatthias Ringwald
1417*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1418*a8f7f3fcSMatthias Ringwald return(result);
1419*a8f7f3fcSMatthias Ringwald }
1420*a8f7f3fcSMatthias Ringwald
1421*a8f7f3fcSMatthias Ringwald
__SADD16(uint32_t op1,uint32_t op2)1422*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1423*a8f7f3fcSMatthias Ringwald {
1424*a8f7f3fcSMatthias Ringwald uint32_t result;
1425*a8f7f3fcSMatthias Ringwald
1426*a8f7f3fcSMatthias Ringwald __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1427*a8f7f3fcSMatthias Ringwald return(result);
1428*a8f7f3fcSMatthias Ringwald }
1429*a8f7f3fcSMatthias Ringwald
__QADD16(uint32_t op1,uint32_t op2)1430*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1431*a8f7f3fcSMatthias Ringwald {
1432*a8f7f3fcSMatthias Ringwald uint32_t result;
1433*a8f7f3fcSMatthias Ringwald
1434*a8f7f3fcSMatthias Ringwald __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1435*a8f7f3fcSMatthias Ringwald return(result);
1436*a8f7f3fcSMatthias Ringwald }
1437*a8f7f3fcSMatthias Ringwald
__SHADD16(uint32_t op1,uint32_t op2)1438*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1439*a8f7f3fcSMatthias Ringwald {
1440*a8f7f3fcSMatthias Ringwald uint32_t result;
1441*a8f7f3fcSMatthias Ringwald
1442*a8f7f3fcSMatthias Ringwald __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1443*a8f7f3fcSMatthias Ringwald return(result);
1444*a8f7f3fcSMatthias Ringwald }
1445*a8f7f3fcSMatthias Ringwald
__UADD16(uint32_t op1,uint32_t op2)1446*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1447*a8f7f3fcSMatthias Ringwald {
1448*a8f7f3fcSMatthias Ringwald uint32_t result;
1449*a8f7f3fcSMatthias Ringwald
1450*a8f7f3fcSMatthias Ringwald __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1451*a8f7f3fcSMatthias Ringwald return(result);
1452*a8f7f3fcSMatthias Ringwald }
1453*a8f7f3fcSMatthias Ringwald
__UQADD16(uint32_t op1,uint32_t op2)1454*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1455*a8f7f3fcSMatthias Ringwald {
1456*a8f7f3fcSMatthias Ringwald uint32_t result;
1457*a8f7f3fcSMatthias Ringwald
1458*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1459*a8f7f3fcSMatthias Ringwald return(result);
1460*a8f7f3fcSMatthias Ringwald }
1461*a8f7f3fcSMatthias Ringwald
__UHADD16(uint32_t op1,uint32_t op2)1462*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1463*a8f7f3fcSMatthias Ringwald {
1464*a8f7f3fcSMatthias Ringwald uint32_t result;
1465*a8f7f3fcSMatthias Ringwald
1466*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1467*a8f7f3fcSMatthias Ringwald return(result);
1468*a8f7f3fcSMatthias Ringwald }
1469*a8f7f3fcSMatthias Ringwald
__SSUB16(uint32_t op1,uint32_t op2)1470*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1471*a8f7f3fcSMatthias Ringwald {
1472*a8f7f3fcSMatthias Ringwald uint32_t result;
1473*a8f7f3fcSMatthias Ringwald
1474*a8f7f3fcSMatthias Ringwald __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1475*a8f7f3fcSMatthias Ringwald return(result);
1476*a8f7f3fcSMatthias Ringwald }
1477*a8f7f3fcSMatthias Ringwald
__QSUB16(uint32_t op1,uint32_t op2)1478*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1479*a8f7f3fcSMatthias Ringwald {
1480*a8f7f3fcSMatthias Ringwald uint32_t result;
1481*a8f7f3fcSMatthias Ringwald
1482*a8f7f3fcSMatthias Ringwald __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1483*a8f7f3fcSMatthias Ringwald return(result);
1484*a8f7f3fcSMatthias Ringwald }
1485*a8f7f3fcSMatthias Ringwald
__SHSUB16(uint32_t op1,uint32_t op2)1486*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1487*a8f7f3fcSMatthias Ringwald {
1488*a8f7f3fcSMatthias Ringwald uint32_t result;
1489*a8f7f3fcSMatthias Ringwald
1490*a8f7f3fcSMatthias Ringwald __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1491*a8f7f3fcSMatthias Ringwald return(result);
1492*a8f7f3fcSMatthias Ringwald }
1493*a8f7f3fcSMatthias Ringwald
__USUB16(uint32_t op1,uint32_t op2)1494*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1495*a8f7f3fcSMatthias Ringwald {
1496*a8f7f3fcSMatthias Ringwald uint32_t result;
1497*a8f7f3fcSMatthias Ringwald
1498*a8f7f3fcSMatthias Ringwald __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1499*a8f7f3fcSMatthias Ringwald return(result);
1500*a8f7f3fcSMatthias Ringwald }
1501*a8f7f3fcSMatthias Ringwald
__UQSUB16(uint32_t op1,uint32_t op2)1502*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1503*a8f7f3fcSMatthias Ringwald {
1504*a8f7f3fcSMatthias Ringwald uint32_t result;
1505*a8f7f3fcSMatthias Ringwald
1506*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1507*a8f7f3fcSMatthias Ringwald return(result);
1508*a8f7f3fcSMatthias Ringwald }
1509*a8f7f3fcSMatthias Ringwald
__UHSUB16(uint32_t op1,uint32_t op2)1510*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1511*a8f7f3fcSMatthias Ringwald {
1512*a8f7f3fcSMatthias Ringwald uint32_t result;
1513*a8f7f3fcSMatthias Ringwald
1514*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1515*a8f7f3fcSMatthias Ringwald return(result);
1516*a8f7f3fcSMatthias Ringwald }
1517*a8f7f3fcSMatthias Ringwald
__SASX(uint32_t op1,uint32_t op2)1518*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1519*a8f7f3fcSMatthias Ringwald {
1520*a8f7f3fcSMatthias Ringwald uint32_t result;
1521*a8f7f3fcSMatthias Ringwald
1522*a8f7f3fcSMatthias Ringwald __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1523*a8f7f3fcSMatthias Ringwald return(result);
1524*a8f7f3fcSMatthias Ringwald }
1525*a8f7f3fcSMatthias Ringwald
__QASX(uint32_t op1,uint32_t op2)1526*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1527*a8f7f3fcSMatthias Ringwald {
1528*a8f7f3fcSMatthias Ringwald uint32_t result;
1529*a8f7f3fcSMatthias Ringwald
1530*a8f7f3fcSMatthias Ringwald __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1531*a8f7f3fcSMatthias Ringwald return(result);
1532*a8f7f3fcSMatthias Ringwald }
1533*a8f7f3fcSMatthias Ringwald
__SHASX(uint32_t op1,uint32_t op2)1534*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1535*a8f7f3fcSMatthias Ringwald {
1536*a8f7f3fcSMatthias Ringwald uint32_t result;
1537*a8f7f3fcSMatthias Ringwald
1538*a8f7f3fcSMatthias Ringwald __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1539*a8f7f3fcSMatthias Ringwald return(result);
1540*a8f7f3fcSMatthias Ringwald }
1541*a8f7f3fcSMatthias Ringwald
__UASX(uint32_t op1,uint32_t op2)1542*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1543*a8f7f3fcSMatthias Ringwald {
1544*a8f7f3fcSMatthias Ringwald uint32_t result;
1545*a8f7f3fcSMatthias Ringwald
1546*a8f7f3fcSMatthias Ringwald __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1547*a8f7f3fcSMatthias Ringwald return(result);
1548*a8f7f3fcSMatthias Ringwald }
1549*a8f7f3fcSMatthias Ringwald
__UQASX(uint32_t op1,uint32_t op2)1550*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1551*a8f7f3fcSMatthias Ringwald {
1552*a8f7f3fcSMatthias Ringwald uint32_t result;
1553*a8f7f3fcSMatthias Ringwald
1554*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1555*a8f7f3fcSMatthias Ringwald return(result);
1556*a8f7f3fcSMatthias Ringwald }
1557*a8f7f3fcSMatthias Ringwald
__UHASX(uint32_t op1,uint32_t op2)1558*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1559*a8f7f3fcSMatthias Ringwald {
1560*a8f7f3fcSMatthias Ringwald uint32_t result;
1561*a8f7f3fcSMatthias Ringwald
1562*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1563*a8f7f3fcSMatthias Ringwald return(result);
1564*a8f7f3fcSMatthias Ringwald }
1565*a8f7f3fcSMatthias Ringwald
__SSAX(uint32_t op1,uint32_t op2)1566*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1567*a8f7f3fcSMatthias Ringwald {
1568*a8f7f3fcSMatthias Ringwald uint32_t result;
1569*a8f7f3fcSMatthias Ringwald
1570*a8f7f3fcSMatthias Ringwald __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1571*a8f7f3fcSMatthias Ringwald return(result);
1572*a8f7f3fcSMatthias Ringwald }
1573*a8f7f3fcSMatthias Ringwald
__QSAX(uint32_t op1,uint32_t op2)1574*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1575*a8f7f3fcSMatthias Ringwald {
1576*a8f7f3fcSMatthias Ringwald uint32_t result;
1577*a8f7f3fcSMatthias Ringwald
1578*a8f7f3fcSMatthias Ringwald __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1579*a8f7f3fcSMatthias Ringwald return(result);
1580*a8f7f3fcSMatthias Ringwald }
1581*a8f7f3fcSMatthias Ringwald
__SHSAX(uint32_t op1,uint32_t op2)1582*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1583*a8f7f3fcSMatthias Ringwald {
1584*a8f7f3fcSMatthias Ringwald uint32_t result;
1585*a8f7f3fcSMatthias Ringwald
1586*a8f7f3fcSMatthias Ringwald __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1587*a8f7f3fcSMatthias Ringwald return(result);
1588*a8f7f3fcSMatthias Ringwald }
1589*a8f7f3fcSMatthias Ringwald
__USAX(uint32_t op1,uint32_t op2)1590*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1591*a8f7f3fcSMatthias Ringwald {
1592*a8f7f3fcSMatthias Ringwald uint32_t result;
1593*a8f7f3fcSMatthias Ringwald
1594*a8f7f3fcSMatthias Ringwald __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1595*a8f7f3fcSMatthias Ringwald return(result);
1596*a8f7f3fcSMatthias Ringwald }
1597*a8f7f3fcSMatthias Ringwald
__UQSAX(uint32_t op1,uint32_t op2)1598*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1599*a8f7f3fcSMatthias Ringwald {
1600*a8f7f3fcSMatthias Ringwald uint32_t result;
1601*a8f7f3fcSMatthias Ringwald
1602*a8f7f3fcSMatthias Ringwald __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1603*a8f7f3fcSMatthias Ringwald return(result);
1604*a8f7f3fcSMatthias Ringwald }
1605*a8f7f3fcSMatthias Ringwald
__UHSAX(uint32_t op1,uint32_t op2)1606*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1607*a8f7f3fcSMatthias Ringwald {
1608*a8f7f3fcSMatthias Ringwald uint32_t result;
1609*a8f7f3fcSMatthias Ringwald
1610*a8f7f3fcSMatthias Ringwald __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1611*a8f7f3fcSMatthias Ringwald return(result);
1612*a8f7f3fcSMatthias Ringwald }
1613*a8f7f3fcSMatthias Ringwald
__USAD8(uint32_t op1,uint32_t op2)1614*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1615*a8f7f3fcSMatthias Ringwald {
1616*a8f7f3fcSMatthias Ringwald uint32_t result;
1617*a8f7f3fcSMatthias Ringwald
1618*a8f7f3fcSMatthias Ringwald __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1619*a8f7f3fcSMatthias Ringwald return(result);
1620*a8f7f3fcSMatthias Ringwald }
1621*a8f7f3fcSMatthias Ringwald
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1622*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1623*a8f7f3fcSMatthias Ringwald {
1624*a8f7f3fcSMatthias Ringwald uint32_t result;
1625*a8f7f3fcSMatthias Ringwald
1626*a8f7f3fcSMatthias Ringwald __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1627*a8f7f3fcSMatthias Ringwald return(result);
1628*a8f7f3fcSMatthias Ringwald }
1629*a8f7f3fcSMatthias Ringwald
1630*a8f7f3fcSMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1631*a8f7f3fcSMatthias Ringwald ({ \
1632*a8f7f3fcSMatthias Ringwald int32_t __RES, __ARG1 = (ARG1); \
1633*a8f7f3fcSMatthias Ringwald __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1634*a8f7f3fcSMatthias Ringwald __RES; \
1635*a8f7f3fcSMatthias Ringwald })
1636*a8f7f3fcSMatthias Ringwald
1637*a8f7f3fcSMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1638*a8f7f3fcSMatthias Ringwald ({ \
1639*a8f7f3fcSMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1); \
1640*a8f7f3fcSMatthias Ringwald __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1641*a8f7f3fcSMatthias Ringwald __RES; \
1642*a8f7f3fcSMatthias Ringwald })
1643*a8f7f3fcSMatthias Ringwald
__UXTB16(uint32_t op1)1644*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1645*a8f7f3fcSMatthias Ringwald {
1646*a8f7f3fcSMatthias Ringwald uint32_t result;
1647*a8f7f3fcSMatthias Ringwald
1648*a8f7f3fcSMatthias Ringwald __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1649*a8f7f3fcSMatthias Ringwald return(result);
1650*a8f7f3fcSMatthias Ringwald }
1651*a8f7f3fcSMatthias Ringwald
__UXTAB16(uint32_t op1,uint32_t op2)1652*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1653*a8f7f3fcSMatthias Ringwald {
1654*a8f7f3fcSMatthias Ringwald uint32_t result;
1655*a8f7f3fcSMatthias Ringwald
1656*a8f7f3fcSMatthias Ringwald __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1657*a8f7f3fcSMatthias Ringwald return(result);
1658*a8f7f3fcSMatthias Ringwald }
1659*a8f7f3fcSMatthias Ringwald
__SXTB16(uint32_t op1)1660*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1661*a8f7f3fcSMatthias Ringwald {
1662*a8f7f3fcSMatthias Ringwald uint32_t result;
1663*a8f7f3fcSMatthias Ringwald
1664*a8f7f3fcSMatthias Ringwald __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1665*a8f7f3fcSMatthias Ringwald return(result);
1666*a8f7f3fcSMatthias Ringwald }
1667*a8f7f3fcSMatthias Ringwald
__SXTAB16(uint32_t op1,uint32_t op2)1668*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1669*a8f7f3fcSMatthias Ringwald {
1670*a8f7f3fcSMatthias Ringwald uint32_t result;
1671*a8f7f3fcSMatthias Ringwald
1672*a8f7f3fcSMatthias Ringwald __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673*a8f7f3fcSMatthias Ringwald return(result);
1674*a8f7f3fcSMatthias Ringwald }
1675*a8f7f3fcSMatthias Ringwald
__SMUAD(uint32_t op1,uint32_t op2)1676*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1677*a8f7f3fcSMatthias Ringwald {
1678*a8f7f3fcSMatthias Ringwald uint32_t result;
1679*a8f7f3fcSMatthias Ringwald
1680*a8f7f3fcSMatthias Ringwald __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681*a8f7f3fcSMatthias Ringwald return(result);
1682*a8f7f3fcSMatthias Ringwald }
1683*a8f7f3fcSMatthias Ringwald
__SMUADX(uint32_t op1,uint32_t op2)1684*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1685*a8f7f3fcSMatthias Ringwald {
1686*a8f7f3fcSMatthias Ringwald uint32_t result;
1687*a8f7f3fcSMatthias Ringwald
1688*a8f7f3fcSMatthias Ringwald __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689*a8f7f3fcSMatthias Ringwald return(result);
1690*a8f7f3fcSMatthias Ringwald }
1691*a8f7f3fcSMatthias Ringwald
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1692*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1693*a8f7f3fcSMatthias Ringwald {
1694*a8f7f3fcSMatthias Ringwald uint32_t result;
1695*a8f7f3fcSMatthias Ringwald
1696*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1697*a8f7f3fcSMatthias Ringwald return(result);
1698*a8f7f3fcSMatthias Ringwald }
1699*a8f7f3fcSMatthias Ringwald
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1700*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1701*a8f7f3fcSMatthias Ringwald {
1702*a8f7f3fcSMatthias Ringwald uint32_t result;
1703*a8f7f3fcSMatthias Ringwald
1704*a8f7f3fcSMatthias Ringwald __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1705*a8f7f3fcSMatthias Ringwald return(result);
1706*a8f7f3fcSMatthias Ringwald }
1707*a8f7f3fcSMatthias Ringwald
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1708*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1709*a8f7f3fcSMatthias Ringwald {
1710*a8f7f3fcSMatthias Ringwald union llreg_u{
1711*a8f7f3fcSMatthias Ringwald uint32_t w32[2];
1712*a8f7f3fcSMatthias Ringwald uint64_t w64;
1713*a8f7f3fcSMatthias Ringwald } llr;
1714*a8f7f3fcSMatthias Ringwald llr.w64 = acc;
1715*a8f7f3fcSMatthias Ringwald
1716*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1717*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1718*a8f7f3fcSMatthias Ringwald #else /* Big endian */
1719*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1720*a8f7f3fcSMatthias Ringwald #endif
1721*a8f7f3fcSMatthias Ringwald
1722*a8f7f3fcSMatthias Ringwald return(llr.w64);
1723*a8f7f3fcSMatthias Ringwald }
1724*a8f7f3fcSMatthias Ringwald
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1725*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1726*a8f7f3fcSMatthias Ringwald {
1727*a8f7f3fcSMatthias Ringwald union llreg_u{
1728*a8f7f3fcSMatthias Ringwald uint32_t w32[2];
1729*a8f7f3fcSMatthias Ringwald uint64_t w64;
1730*a8f7f3fcSMatthias Ringwald } llr;
1731*a8f7f3fcSMatthias Ringwald llr.w64 = acc;
1732*a8f7f3fcSMatthias Ringwald
1733*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1734*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1735*a8f7f3fcSMatthias Ringwald #else /* Big endian */
1736*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1737*a8f7f3fcSMatthias Ringwald #endif
1738*a8f7f3fcSMatthias Ringwald
1739*a8f7f3fcSMatthias Ringwald return(llr.w64);
1740*a8f7f3fcSMatthias Ringwald }
1741*a8f7f3fcSMatthias Ringwald
__SMUSD(uint32_t op1,uint32_t op2)1742*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1743*a8f7f3fcSMatthias Ringwald {
1744*a8f7f3fcSMatthias Ringwald uint32_t result;
1745*a8f7f3fcSMatthias Ringwald
1746*a8f7f3fcSMatthias Ringwald __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1747*a8f7f3fcSMatthias Ringwald return(result);
1748*a8f7f3fcSMatthias Ringwald }
1749*a8f7f3fcSMatthias Ringwald
__SMUSDX(uint32_t op1,uint32_t op2)1750*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1751*a8f7f3fcSMatthias Ringwald {
1752*a8f7f3fcSMatthias Ringwald uint32_t result;
1753*a8f7f3fcSMatthias Ringwald
1754*a8f7f3fcSMatthias Ringwald __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1755*a8f7f3fcSMatthias Ringwald return(result);
1756*a8f7f3fcSMatthias Ringwald }
1757*a8f7f3fcSMatthias Ringwald
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1758*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1759*a8f7f3fcSMatthias Ringwald {
1760*a8f7f3fcSMatthias Ringwald uint32_t result;
1761*a8f7f3fcSMatthias Ringwald
1762*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1763*a8f7f3fcSMatthias Ringwald return(result);
1764*a8f7f3fcSMatthias Ringwald }
1765*a8f7f3fcSMatthias Ringwald
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1766*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1767*a8f7f3fcSMatthias Ringwald {
1768*a8f7f3fcSMatthias Ringwald uint32_t result;
1769*a8f7f3fcSMatthias Ringwald
1770*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1771*a8f7f3fcSMatthias Ringwald return(result);
1772*a8f7f3fcSMatthias Ringwald }
1773*a8f7f3fcSMatthias Ringwald
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1774*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1775*a8f7f3fcSMatthias Ringwald {
1776*a8f7f3fcSMatthias Ringwald union llreg_u{
1777*a8f7f3fcSMatthias Ringwald uint32_t w32[2];
1778*a8f7f3fcSMatthias Ringwald uint64_t w64;
1779*a8f7f3fcSMatthias Ringwald } llr;
1780*a8f7f3fcSMatthias Ringwald llr.w64 = acc;
1781*a8f7f3fcSMatthias Ringwald
1782*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1783*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1784*a8f7f3fcSMatthias Ringwald #else /* Big endian */
1785*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1786*a8f7f3fcSMatthias Ringwald #endif
1787*a8f7f3fcSMatthias Ringwald
1788*a8f7f3fcSMatthias Ringwald return(llr.w64);
1789*a8f7f3fcSMatthias Ringwald }
1790*a8f7f3fcSMatthias Ringwald
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)1791*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1792*a8f7f3fcSMatthias Ringwald {
1793*a8f7f3fcSMatthias Ringwald union llreg_u{
1794*a8f7f3fcSMatthias Ringwald uint32_t w32[2];
1795*a8f7f3fcSMatthias Ringwald uint64_t w64;
1796*a8f7f3fcSMatthias Ringwald } llr;
1797*a8f7f3fcSMatthias Ringwald llr.w64 = acc;
1798*a8f7f3fcSMatthias Ringwald
1799*a8f7f3fcSMatthias Ringwald #ifndef __ARMEB__ /* Little endian */
1800*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1801*a8f7f3fcSMatthias Ringwald #else /* Big endian */
1802*a8f7f3fcSMatthias Ringwald __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1803*a8f7f3fcSMatthias Ringwald #endif
1804*a8f7f3fcSMatthias Ringwald
1805*a8f7f3fcSMatthias Ringwald return(llr.w64);
1806*a8f7f3fcSMatthias Ringwald }
1807*a8f7f3fcSMatthias Ringwald
__SEL(uint32_t op1,uint32_t op2)1808*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1809*a8f7f3fcSMatthias Ringwald {
1810*a8f7f3fcSMatthias Ringwald uint32_t result;
1811*a8f7f3fcSMatthias Ringwald
1812*a8f7f3fcSMatthias Ringwald __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1813*a8f7f3fcSMatthias Ringwald return(result);
1814*a8f7f3fcSMatthias Ringwald }
1815*a8f7f3fcSMatthias Ringwald
__QADD(int32_t op1,int32_t op2)1816*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
1817*a8f7f3fcSMatthias Ringwald {
1818*a8f7f3fcSMatthias Ringwald int32_t result;
1819*a8f7f3fcSMatthias Ringwald
1820*a8f7f3fcSMatthias Ringwald __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1821*a8f7f3fcSMatthias Ringwald return(result);
1822*a8f7f3fcSMatthias Ringwald }
1823*a8f7f3fcSMatthias Ringwald
__QSUB(int32_t op1,int32_t op2)1824*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
1825*a8f7f3fcSMatthias Ringwald {
1826*a8f7f3fcSMatthias Ringwald int32_t result;
1827*a8f7f3fcSMatthias Ringwald
1828*a8f7f3fcSMatthias Ringwald __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1829*a8f7f3fcSMatthias Ringwald return(result);
1830*a8f7f3fcSMatthias Ringwald }
1831*a8f7f3fcSMatthias Ringwald
1832*a8f7f3fcSMatthias Ringwald #if 0
1833*a8f7f3fcSMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
1834*a8f7f3fcSMatthias Ringwald ({ \
1835*a8f7f3fcSMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1836*a8f7f3fcSMatthias Ringwald __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1837*a8f7f3fcSMatthias Ringwald __RES; \
1838*a8f7f3fcSMatthias Ringwald })
1839*a8f7f3fcSMatthias Ringwald
1840*a8f7f3fcSMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
1841*a8f7f3fcSMatthias Ringwald ({ \
1842*a8f7f3fcSMatthias Ringwald uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1843*a8f7f3fcSMatthias Ringwald if (ARG3 == 0) \
1844*a8f7f3fcSMatthias Ringwald __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1845*a8f7f3fcSMatthias Ringwald else \
1846*a8f7f3fcSMatthias Ringwald __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1847*a8f7f3fcSMatthias Ringwald __RES; \
1848*a8f7f3fcSMatthias Ringwald })
1849*a8f7f3fcSMatthias Ringwald #endif
1850*a8f7f3fcSMatthias Ringwald
1851*a8f7f3fcSMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1852*a8f7f3fcSMatthias Ringwald ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1853*a8f7f3fcSMatthias Ringwald
1854*a8f7f3fcSMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1855*a8f7f3fcSMatthias Ringwald ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1856*a8f7f3fcSMatthias Ringwald
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1857*a8f7f3fcSMatthias Ringwald __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1858*a8f7f3fcSMatthias Ringwald {
1859*a8f7f3fcSMatthias Ringwald int32_t result;
1860*a8f7f3fcSMatthias Ringwald
1861*a8f7f3fcSMatthias Ringwald __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1862*a8f7f3fcSMatthias Ringwald return(result);
1863*a8f7f3fcSMatthias Ringwald }
1864*a8f7f3fcSMatthias Ringwald
1865*a8f7f3fcSMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1) */
1866*a8f7f3fcSMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
1867*a8f7f3fcSMatthias Ringwald
1868*a8f7f3fcSMatthias Ringwald
1869*a8f7f3fcSMatthias Ringwald #endif /* __CMSIS_ARMCLANG_H */
1870