xref: /btstack/port/stm32-l073rz-nucleo-em9304/Drivers/CMSIS/Include/cmsis_armcc_V6.h (revision e838079242074edcbcbb400962776e15fe6ca6cb)
1*e8380792SMatthias Ringwald /**************************************************************************//**
2*e8380792SMatthias Ringwald  * @file     cmsis_armcc_V6.h
3*e8380792SMatthias Ringwald  * @brief    CMSIS Cortex-M Core Function/Instruction Header File
4*e8380792SMatthias Ringwald  * @version  V4.30
5*e8380792SMatthias Ringwald  * @date     20. October 2015
6*e8380792SMatthias Ringwald  ******************************************************************************/
7*e8380792SMatthias Ringwald /* Copyright (c) 2009 - 2015 ARM LIMITED
8*e8380792SMatthias Ringwald 
9*e8380792SMatthias Ringwald    All rights reserved.
10*e8380792SMatthias Ringwald    Redistribution and use in source and binary forms, with or without
11*e8380792SMatthias Ringwald    modification, are permitted provided that the following conditions are met:
12*e8380792SMatthias Ringwald    - Redistributions of source code must retain the above copyright
13*e8380792SMatthias Ringwald      notice, this list of conditions and the following disclaimer.
14*e8380792SMatthias Ringwald    - Redistributions in binary form must reproduce the above copyright
15*e8380792SMatthias Ringwald      notice, this list of conditions and the following disclaimer in the
16*e8380792SMatthias Ringwald      documentation and/or other materials provided with the distribution.
17*e8380792SMatthias Ringwald    - Neither the name of ARM nor the names of its contributors may be used
18*e8380792SMatthias Ringwald      to endorse or promote products derived from this software without
19*e8380792SMatthias Ringwald      specific prior written permission.
20*e8380792SMatthias Ringwald    *
21*e8380792SMatthias Ringwald    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22*e8380792SMatthias Ringwald    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23*e8380792SMatthias Ringwald    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24*e8380792SMatthias Ringwald    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
25*e8380792SMatthias Ringwald    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26*e8380792SMatthias Ringwald    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27*e8380792SMatthias Ringwald    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28*e8380792SMatthias Ringwald    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29*e8380792SMatthias Ringwald    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30*e8380792SMatthias Ringwald    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31*e8380792SMatthias Ringwald    POSSIBILITY OF SUCH DAMAGE.
32*e8380792SMatthias Ringwald    ---------------------------------------------------------------------------*/
33*e8380792SMatthias Ringwald 
34*e8380792SMatthias Ringwald 
35*e8380792SMatthias Ringwald #ifndef __CMSIS_ARMCC_V6_H
36*e8380792SMatthias Ringwald #define __CMSIS_ARMCC_V6_H
37*e8380792SMatthias Ringwald 
38*e8380792SMatthias Ringwald 
39*e8380792SMatthias Ringwald /* ###########################  Core Function Access  ########################### */
40*e8380792SMatthias Ringwald /** \ingroup  CMSIS_Core_FunctionInterface
41*e8380792SMatthias Ringwald     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
42*e8380792SMatthias Ringwald   @{
43*e8380792SMatthias Ringwald  */
44*e8380792SMatthias Ringwald 
45*e8380792SMatthias Ringwald /**
46*e8380792SMatthias Ringwald   \brief   Enable IRQ Interrupts
47*e8380792SMatthias Ringwald   \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
48*e8380792SMatthias Ringwald            Can only be executed in Privileged modes.
49*e8380792SMatthias Ringwald  */
__enable_irq(void)50*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __enable_irq(void)
51*e8380792SMatthias Ringwald {
52*e8380792SMatthias Ringwald   __ASM volatile ("cpsie i" : : : "memory");
53*e8380792SMatthias Ringwald }
54*e8380792SMatthias Ringwald 
55*e8380792SMatthias Ringwald 
56*e8380792SMatthias Ringwald /**
57*e8380792SMatthias Ringwald   \brief   Disable IRQ Interrupts
58*e8380792SMatthias Ringwald   \details Disables IRQ interrupts by setting the I-bit in the CPSR.
59*e8380792SMatthias Ringwald            Can only be executed in Privileged modes.
60*e8380792SMatthias Ringwald  */
__disable_irq(void)61*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __disable_irq(void)
62*e8380792SMatthias Ringwald {
63*e8380792SMatthias Ringwald   __ASM volatile ("cpsid i" : : : "memory");
64*e8380792SMatthias Ringwald }
65*e8380792SMatthias Ringwald 
66*e8380792SMatthias Ringwald 
67*e8380792SMatthias Ringwald /**
68*e8380792SMatthias Ringwald   \brief   Get Control Register
69*e8380792SMatthias Ringwald   \details Returns the content of the Control Register.
70*e8380792SMatthias Ringwald   \return               Control Register value
71*e8380792SMatthias Ringwald  */
__get_CONTROL(void)72*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CONTROL(void)
73*e8380792SMatthias Ringwald {
74*e8380792SMatthias Ringwald   uint32_t result;
75*e8380792SMatthias Ringwald 
76*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, control" : "=r" (result) );
77*e8380792SMatthias Ringwald   return(result);
78*e8380792SMatthias Ringwald }
79*e8380792SMatthias Ringwald 
80*e8380792SMatthias Ringwald 
81*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
82*e8380792SMatthias Ringwald /**
83*e8380792SMatthias Ringwald   \brief   Get Control Register (non-secure)
84*e8380792SMatthias Ringwald   \details Returns the content of the non-secure Control Register when in secure mode.
85*e8380792SMatthias Ringwald   \return               non-secure Control Register value
86*e8380792SMatthias Ringwald  */
__TZ_get_CONTROL_NS(void)87*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_CONTROL_NS(void)
88*e8380792SMatthias Ringwald {
89*e8380792SMatthias Ringwald   uint32_t result;
90*e8380792SMatthias Ringwald 
91*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
92*e8380792SMatthias Ringwald   return(result);
93*e8380792SMatthias Ringwald }
94*e8380792SMatthias Ringwald #endif
95*e8380792SMatthias Ringwald 
96*e8380792SMatthias Ringwald 
97*e8380792SMatthias Ringwald /**
98*e8380792SMatthias Ringwald   \brief   Set Control Register
99*e8380792SMatthias Ringwald   \details Writes the given value to the Control Register.
100*e8380792SMatthias Ringwald   \param [in]    control  Control Register value to set
101*e8380792SMatthias Ringwald  */
__set_CONTROL(uint32_t control)102*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_CONTROL(uint32_t control)
103*e8380792SMatthias Ringwald {
104*e8380792SMatthias Ringwald   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
105*e8380792SMatthias Ringwald }
106*e8380792SMatthias Ringwald 
107*e8380792SMatthias Ringwald 
108*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
109*e8380792SMatthias Ringwald /**
110*e8380792SMatthias Ringwald   \brief   Set Control Register (non-secure)
111*e8380792SMatthias Ringwald   \details Writes the given value to the non-secure Control Register when in secure state.
112*e8380792SMatthias Ringwald   \param [in]    control  Control Register value to set
113*e8380792SMatthias Ringwald  */
__TZ_set_CONTROL_NS(uint32_t control)114*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_CONTROL_NS(uint32_t control)
115*e8380792SMatthias Ringwald {
116*e8380792SMatthias Ringwald   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
117*e8380792SMatthias Ringwald }
118*e8380792SMatthias Ringwald #endif
119*e8380792SMatthias Ringwald 
120*e8380792SMatthias Ringwald 
121*e8380792SMatthias Ringwald /**
122*e8380792SMatthias Ringwald   \brief   Get IPSR Register
123*e8380792SMatthias Ringwald   \details Returns the content of the IPSR Register.
124*e8380792SMatthias Ringwald   \return               IPSR Register value
125*e8380792SMatthias Ringwald  */
__get_IPSR(void)126*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_IPSR(void)
127*e8380792SMatthias Ringwald {
128*e8380792SMatthias Ringwald   uint32_t result;
129*e8380792SMatthias Ringwald 
130*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
131*e8380792SMatthias Ringwald   return(result);
132*e8380792SMatthias Ringwald }
133*e8380792SMatthias Ringwald 
134*e8380792SMatthias Ringwald 
135*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
136*e8380792SMatthias Ringwald /**
137*e8380792SMatthias Ringwald   \brief   Get IPSR Register (non-secure)
138*e8380792SMatthias Ringwald   \details Returns the content of the non-secure IPSR Register when in secure state.
139*e8380792SMatthias Ringwald   \return               IPSR Register value
140*e8380792SMatthias Ringwald  */
__TZ_get_IPSR_NS(void)141*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_IPSR_NS(void)
142*e8380792SMatthias Ringwald {
143*e8380792SMatthias Ringwald   uint32_t result;
144*e8380792SMatthias Ringwald 
145*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, ipsr_ns" : "=r" (result) );
146*e8380792SMatthias Ringwald   return(result);
147*e8380792SMatthias Ringwald }
148*e8380792SMatthias Ringwald #endif
149*e8380792SMatthias Ringwald 
150*e8380792SMatthias Ringwald 
151*e8380792SMatthias Ringwald /**
152*e8380792SMatthias Ringwald   \brief   Get APSR Register
153*e8380792SMatthias Ringwald   \details Returns the content of the APSR Register.
154*e8380792SMatthias Ringwald   \return               APSR Register value
155*e8380792SMatthias Ringwald  */
__get_APSR(void)156*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_APSR(void)
157*e8380792SMatthias Ringwald {
158*e8380792SMatthias Ringwald   uint32_t result;
159*e8380792SMatthias Ringwald 
160*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
161*e8380792SMatthias Ringwald   return(result);
162*e8380792SMatthias Ringwald }
163*e8380792SMatthias Ringwald 
164*e8380792SMatthias Ringwald 
165*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
166*e8380792SMatthias Ringwald /**
167*e8380792SMatthias Ringwald   \brief   Get APSR Register (non-secure)
168*e8380792SMatthias Ringwald   \details Returns the content of the non-secure APSR Register when in secure state.
169*e8380792SMatthias Ringwald   \return               APSR Register value
170*e8380792SMatthias Ringwald  */
__TZ_get_APSR_NS(void)171*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_APSR_NS(void)
172*e8380792SMatthias Ringwald {
173*e8380792SMatthias Ringwald   uint32_t result;
174*e8380792SMatthias Ringwald 
175*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, apsr_ns" : "=r" (result) );
176*e8380792SMatthias Ringwald   return(result);
177*e8380792SMatthias Ringwald }
178*e8380792SMatthias Ringwald #endif
179*e8380792SMatthias Ringwald 
180*e8380792SMatthias Ringwald 
181*e8380792SMatthias Ringwald /**
182*e8380792SMatthias Ringwald   \brief   Get xPSR Register
183*e8380792SMatthias Ringwald   \details Returns the content of the xPSR Register.
184*e8380792SMatthias Ringwald   \return               xPSR Register value
185*e8380792SMatthias Ringwald  */
__get_xPSR(void)186*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_xPSR(void)
187*e8380792SMatthias Ringwald {
188*e8380792SMatthias Ringwald   uint32_t result;
189*e8380792SMatthias Ringwald 
190*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
191*e8380792SMatthias Ringwald   return(result);
192*e8380792SMatthias Ringwald }
193*e8380792SMatthias Ringwald 
194*e8380792SMatthias Ringwald 
195*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
196*e8380792SMatthias Ringwald /**
197*e8380792SMatthias Ringwald   \brief   Get xPSR Register (non-secure)
198*e8380792SMatthias Ringwald   \details Returns the content of the non-secure xPSR Register when in secure state.
199*e8380792SMatthias Ringwald   \return               xPSR Register value
200*e8380792SMatthias Ringwald  */
__TZ_get_xPSR_NS(void)201*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_xPSR_NS(void)
202*e8380792SMatthias Ringwald {
203*e8380792SMatthias Ringwald   uint32_t result;
204*e8380792SMatthias Ringwald 
205*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, xpsr_ns" : "=r" (result) );
206*e8380792SMatthias Ringwald   return(result);
207*e8380792SMatthias Ringwald }
208*e8380792SMatthias Ringwald #endif
209*e8380792SMatthias Ringwald 
210*e8380792SMatthias Ringwald 
211*e8380792SMatthias Ringwald /**
212*e8380792SMatthias Ringwald   \brief   Get Process Stack Pointer
213*e8380792SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer (PSP).
214*e8380792SMatthias Ringwald   \return               PSP Register value
215*e8380792SMatthias Ringwald  */
__get_PSP(void)216*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSP(void)
217*e8380792SMatthias Ringwald {
218*e8380792SMatthias Ringwald   register uint32_t result;
219*e8380792SMatthias Ringwald 
220*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
221*e8380792SMatthias Ringwald   return(result);
222*e8380792SMatthias Ringwald }
223*e8380792SMatthias Ringwald 
224*e8380792SMatthias Ringwald 
225*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
226*e8380792SMatthias Ringwald /**
227*e8380792SMatthias Ringwald   \brief   Get Process Stack Pointer (non-secure)
228*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
229*e8380792SMatthias Ringwald   \return               PSP Register value
230*e8380792SMatthias Ringwald  */
__TZ_get_PSP_NS(void)231*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSP_NS(void)
232*e8380792SMatthias Ringwald {
233*e8380792SMatthias Ringwald   register uint32_t result;
234*e8380792SMatthias Ringwald 
235*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
236*e8380792SMatthias Ringwald   return(result);
237*e8380792SMatthias Ringwald }
238*e8380792SMatthias Ringwald #endif
239*e8380792SMatthias Ringwald 
240*e8380792SMatthias Ringwald 
241*e8380792SMatthias Ringwald /**
242*e8380792SMatthias Ringwald   \brief   Set Process Stack Pointer
243*e8380792SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer (PSP).
244*e8380792SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
245*e8380792SMatthias Ringwald  */
__set_PSP(uint32_t topOfProcStack)246*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
247*e8380792SMatthias Ringwald {
248*e8380792SMatthias Ringwald   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : "sp");
249*e8380792SMatthias Ringwald }
250*e8380792SMatthias Ringwald 
251*e8380792SMatthias Ringwald 
252*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
253*e8380792SMatthias Ringwald /**
254*e8380792SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
255*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
256*e8380792SMatthias Ringwald   \param [in]    topOfProcStack  Process Stack Pointer value to set
257*e8380792SMatthias Ringwald  */
__TZ_set_PSP_NS(uint32_t topOfProcStack)258*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
259*e8380792SMatthias Ringwald {
260*e8380792SMatthias Ringwald   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : "sp");
261*e8380792SMatthias Ringwald }
262*e8380792SMatthias Ringwald #endif
263*e8380792SMatthias Ringwald 
264*e8380792SMatthias Ringwald 
265*e8380792SMatthias Ringwald /**
266*e8380792SMatthias Ringwald   \brief   Get Main Stack Pointer
267*e8380792SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer (MSP).
268*e8380792SMatthias Ringwald   \return               MSP Register value
269*e8380792SMatthias Ringwald  */
__get_MSP(void)270*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSP(void)
271*e8380792SMatthias Ringwald {
272*e8380792SMatthias Ringwald   register uint32_t result;
273*e8380792SMatthias Ringwald 
274*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, msp" : "=r" (result) );
275*e8380792SMatthias Ringwald   return(result);
276*e8380792SMatthias Ringwald }
277*e8380792SMatthias Ringwald 
278*e8380792SMatthias Ringwald 
279*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
280*e8380792SMatthias Ringwald /**
281*e8380792SMatthias Ringwald   \brief   Get Main Stack Pointer (non-secure)
282*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
283*e8380792SMatthias Ringwald   \return               MSP Register value
284*e8380792SMatthias Ringwald  */
__TZ_get_MSP_NS(void)285*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSP_NS(void)
286*e8380792SMatthias Ringwald {
287*e8380792SMatthias Ringwald   register uint32_t result;
288*e8380792SMatthias Ringwald 
289*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
290*e8380792SMatthias Ringwald   return(result);
291*e8380792SMatthias Ringwald }
292*e8380792SMatthias Ringwald #endif
293*e8380792SMatthias Ringwald 
294*e8380792SMatthias Ringwald 
295*e8380792SMatthias Ringwald /**
296*e8380792SMatthias Ringwald   \brief   Set Main Stack Pointer
297*e8380792SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer (MSP).
298*e8380792SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
299*e8380792SMatthias Ringwald  */
__set_MSP(uint32_t topOfMainStack)300*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
301*e8380792SMatthias Ringwald {
302*e8380792SMatthias Ringwald   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : "sp");
303*e8380792SMatthias Ringwald }
304*e8380792SMatthias Ringwald 
305*e8380792SMatthias Ringwald 
306*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
307*e8380792SMatthias Ringwald /**
308*e8380792SMatthias Ringwald   \brief   Set Main Stack Pointer (non-secure)
309*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
310*e8380792SMatthias Ringwald   \param [in]    topOfMainStack  Main Stack Pointer value to set
311*e8380792SMatthias Ringwald  */
__TZ_set_MSP_NS(uint32_t topOfMainStack)312*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
313*e8380792SMatthias Ringwald {
314*e8380792SMatthias Ringwald   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : "sp");
315*e8380792SMatthias Ringwald }
316*e8380792SMatthias Ringwald #endif
317*e8380792SMatthias Ringwald 
318*e8380792SMatthias Ringwald 
319*e8380792SMatthias Ringwald /**
320*e8380792SMatthias Ringwald   \brief   Get Priority Mask
321*e8380792SMatthias Ringwald   \details Returns the current state of the priority mask bit from the Priority Mask Register.
322*e8380792SMatthias Ringwald   \return               Priority Mask value
323*e8380792SMatthias Ringwald  */
__get_PRIMASK(void)324*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PRIMASK(void)
325*e8380792SMatthias Ringwald {
326*e8380792SMatthias Ringwald   uint32_t result;
327*e8380792SMatthias Ringwald 
328*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, primask" : "=r" (result) );
329*e8380792SMatthias Ringwald   return(result);
330*e8380792SMatthias Ringwald }
331*e8380792SMatthias Ringwald 
332*e8380792SMatthias Ringwald 
333*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
334*e8380792SMatthias Ringwald /**
335*e8380792SMatthias Ringwald   \brief   Get Priority Mask (non-secure)
336*e8380792SMatthias Ringwald   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
337*e8380792SMatthias Ringwald   \return               Priority Mask value
338*e8380792SMatthias Ringwald  */
__TZ_get_PRIMASK_NS(void)339*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PRIMASK_NS(void)
340*e8380792SMatthias Ringwald {
341*e8380792SMatthias Ringwald   uint32_t result;
342*e8380792SMatthias Ringwald 
343*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
344*e8380792SMatthias Ringwald   return(result);
345*e8380792SMatthias Ringwald }
346*e8380792SMatthias Ringwald #endif
347*e8380792SMatthias Ringwald 
348*e8380792SMatthias Ringwald 
349*e8380792SMatthias Ringwald /**
350*e8380792SMatthias Ringwald   \brief   Set Priority Mask
351*e8380792SMatthias Ringwald   \details Assigns the given value to the Priority Mask Register.
352*e8380792SMatthias Ringwald   \param [in]    priMask  Priority Mask
353*e8380792SMatthias Ringwald  */
__set_PRIMASK(uint32_t priMask)354*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
355*e8380792SMatthias Ringwald {
356*e8380792SMatthias Ringwald   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
357*e8380792SMatthias Ringwald }
358*e8380792SMatthias Ringwald 
359*e8380792SMatthias Ringwald 
360*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
361*e8380792SMatthias Ringwald /**
362*e8380792SMatthias Ringwald   \brief   Set Priority Mask (non-secure)
363*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
364*e8380792SMatthias Ringwald   \param [in]    priMask  Priority Mask
365*e8380792SMatthias Ringwald  */
__TZ_set_PRIMASK_NS(uint32_t priMask)366*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
367*e8380792SMatthias Ringwald {
368*e8380792SMatthias Ringwald   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
369*e8380792SMatthias Ringwald }
370*e8380792SMatthias Ringwald #endif
371*e8380792SMatthias Ringwald 
372*e8380792SMatthias Ringwald 
373*e8380792SMatthias Ringwald #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=3 */
374*e8380792SMatthias Ringwald 
375*e8380792SMatthias Ringwald /**
376*e8380792SMatthias Ringwald   \brief   Enable FIQ
377*e8380792SMatthias Ringwald   \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
378*e8380792SMatthias Ringwald            Can only be executed in Privileged modes.
379*e8380792SMatthias Ringwald  */
__enable_fault_irq(void)380*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __enable_fault_irq(void)
381*e8380792SMatthias Ringwald {
382*e8380792SMatthias Ringwald   __ASM volatile ("cpsie f" : : : "memory");
383*e8380792SMatthias Ringwald }
384*e8380792SMatthias Ringwald 
385*e8380792SMatthias Ringwald 
386*e8380792SMatthias Ringwald /**
387*e8380792SMatthias Ringwald   \brief   Disable FIQ
388*e8380792SMatthias Ringwald   \details Disables FIQ interrupts by setting the F-bit in the CPSR.
389*e8380792SMatthias Ringwald            Can only be executed in Privileged modes.
390*e8380792SMatthias Ringwald  */
__disable_fault_irq(void)391*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __disable_fault_irq(void)
392*e8380792SMatthias Ringwald {
393*e8380792SMatthias Ringwald   __ASM volatile ("cpsid f" : : : "memory");
394*e8380792SMatthias Ringwald }
395*e8380792SMatthias Ringwald 
396*e8380792SMatthias Ringwald 
397*e8380792SMatthias Ringwald /**
398*e8380792SMatthias Ringwald   \brief   Get Base Priority
399*e8380792SMatthias Ringwald   \details Returns the current value of the Base Priority register.
400*e8380792SMatthias Ringwald   \return               Base Priority register value
401*e8380792SMatthias Ringwald  */
__get_BASEPRI(void)402*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_BASEPRI(void)
403*e8380792SMatthias Ringwald {
404*e8380792SMatthias Ringwald   uint32_t result;
405*e8380792SMatthias Ringwald 
406*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
407*e8380792SMatthias Ringwald   return(result);
408*e8380792SMatthias Ringwald }
409*e8380792SMatthias Ringwald 
410*e8380792SMatthias Ringwald 
411*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
412*e8380792SMatthias Ringwald /**
413*e8380792SMatthias Ringwald   \brief   Get Base Priority (non-secure)
414*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Base Priority register when in secure state.
415*e8380792SMatthias Ringwald   \return               Base Priority register value
416*e8380792SMatthias Ringwald  */
__TZ_get_BASEPRI_NS(void)417*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_BASEPRI_NS(void)
418*e8380792SMatthias Ringwald {
419*e8380792SMatthias Ringwald   uint32_t result;
420*e8380792SMatthias Ringwald 
421*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
422*e8380792SMatthias Ringwald   return(result);
423*e8380792SMatthias Ringwald }
424*e8380792SMatthias Ringwald #endif
425*e8380792SMatthias Ringwald 
426*e8380792SMatthias Ringwald 
427*e8380792SMatthias Ringwald /**
428*e8380792SMatthias Ringwald   \brief   Set Base Priority
429*e8380792SMatthias Ringwald   \details Assigns the given value to the Base Priority register.
430*e8380792SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
431*e8380792SMatthias Ringwald  */
__set_BASEPRI(uint32_t value)432*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI(uint32_t value)
433*e8380792SMatthias Ringwald {
434*e8380792SMatthias Ringwald   __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory");
435*e8380792SMatthias Ringwald }
436*e8380792SMatthias Ringwald 
437*e8380792SMatthias Ringwald 
438*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
439*e8380792SMatthias Ringwald /**
440*e8380792SMatthias Ringwald   \brief   Set Base Priority (non-secure)
441*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Base Priority register when in secure state.
442*e8380792SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
443*e8380792SMatthias Ringwald  */
__TZ_set_BASEPRI_NS(uint32_t value)444*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_NS(uint32_t value)
445*e8380792SMatthias Ringwald {
446*e8380792SMatthias Ringwald   __ASM volatile ("MSR basepri_ns, %0" : : "r" (value) : "memory");
447*e8380792SMatthias Ringwald }
448*e8380792SMatthias Ringwald #endif
449*e8380792SMatthias Ringwald 
450*e8380792SMatthias Ringwald 
451*e8380792SMatthias Ringwald /**
452*e8380792SMatthias Ringwald   \brief   Set Base Priority with condition
453*e8380792SMatthias Ringwald   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
454*e8380792SMatthias Ringwald            or the new value increases the BASEPRI priority level.
455*e8380792SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
456*e8380792SMatthias Ringwald  */
__set_BASEPRI_MAX(uint32_t value)457*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value)
458*e8380792SMatthias Ringwald {
459*e8380792SMatthias Ringwald   __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory");
460*e8380792SMatthias Ringwald }
461*e8380792SMatthias Ringwald 
462*e8380792SMatthias Ringwald 
463*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
464*e8380792SMatthias Ringwald /**
465*e8380792SMatthias Ringwald   \brief   Set Base Priority with condition (non_secure)
466*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Base Priority register when in secure state only if BASEPRI masking is disabled,
467*e8380792SMatthias Ringwald 	       or the new value increases the BASEPRI priority level.
468*e8380792SMatthias Ringwald   \param [in]    basePri  Base Priority value to set
469*e8380792SMatthias Ringwald  */
__TZ_set_BASEPRI_MAX_NS(uint32_t value)470*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_MAX_NS(uint32_t value)
471*e8380792SMatthias Ringwald {
472*e8380792SMatthias Ringwald   __ASM volatile ("MSR basepri_max_ns, %0" : : "r" (value) : "memory");
473*e8380792SMatthias Ringwald }
474*e8380792SMatthias Ringwald #endif
475*e8380792SMatthias Ringwald 
476*e8380792SMatthias Ringwald 
477*e8380792SMatthias Ringwald /**
478*e8380792SMatthias Ringwald   \brief   Get Fault Mask
479*e8380792SMatthias Ringwald   \details Returns the current value of the Fault Mask register.
480*e8380792SMatthias Ringwald   \return               Fault Mask register value
481*e8380792SMatthias Ringwald  */
__get_FAULTMASK(void)482*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FAULTMASK(void)
483*e8380792SMatthias Ringwald {
484*e8380792SMatthias Ringwald   uint32_t result;
485*e8380792SMatthias Ringwald 
486*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
487*e8380792SMatthias Ringwald   return(result);
488*e8380792SMatthias Ringwald }
489*e8380792SMatthias Ringwald 
490*e8380792SMatthias Ringwald 
491*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
492*e8380792SMatthias Ringwald /**
493*e8380792SMatthias Ringwald   \brief   Get Fault Mask (non-secure)
494*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Fault Mask register when in secure state.
495*e8380792SMatthias Ringwald   \return               Fault Mask register value
496*e8380792SMatthias Ringwald  */
__TZ_get_FAULTMASK_NS(void)497*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FAULTMASK_NS(void)
498*e8380792SMatthias Ringwald {
499*e8380792SMatthias Ringwald   uint32_t result;
500*e8380792SMatthias Ringwald 
501*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
502*e8380792SMatthias Ringwald   return(result);
503*e8380792SMatthias Ringwald }
504*e8380792SMatthias Ringwald #endif
505*e8380792SMatthias Ringwald 
506*e8380792SMatthias Ringwald 
507*e8380792SMatthias Ringwald /**
508*e8380792SMatthias Ringwald   \brief   Set Fault Mask
509*e8380792SMatthias Ringwald   \details Assigns the given value to the Fault Mask register.
510*e8380792SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
511*e8380792SMatthias Ringwald  */
__set_FAULTMASK(uint32_t faultMask)512*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
513*e8380792SMatthias Ringwald {
514*e8380792SMatthias Ringwald   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
515*e8380792SMatthias Ringwald }
516*e8380792SMatthias Ringwald 
517*e8380792SMatthias Ringwald 
518*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
519*e8380792SMatthias Ringwald /**
520*e8380792SMatthias Ringwald   \brief   Set Fault Mask (non-secure)
521*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
522*e8380792SMatthias Ringwald   \param [in]    faultMask  Fault Mask value to set
523*e8380792SMatthias Ringwald  */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)524*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
525*e8380792SMatthias Ringwald {
526*e8380792SMatthias Ringwald   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
527*e8380792SMatthias Ringwald }
528*e8380792SMatthias Ringwald #endif
529*e8380792SMatthias Ringwald 
530*e8380792SMatthias Ringwald 
531*e8380792SMatthias Ringwald #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
532*e8380792SMatthias Ringwald 
533*e8380792SMatthias Ringwald 
534*e8380792SMatthias Ringwald #if (__ARM_ARCH_8M__ == 1U)
535*e8380792SMatthias Ringwald 
536*e8380792SMatthias Ringwald /**
537*e8380792SMatthias Ringwald   \brief   Get Process Stack Pointer Limit
538*e8380792SMatthias Ringwald   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
539*e8380792SMatthias Ringwald   \return               PSPLIM Register value
540*e8380792SMatthias Ringwald  */
__get_PSPLIM(void)541*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSPLIM(void)
542*e8380792SMatthias Ringwald {
543*e8380792SMatthias Ringwald   register uint32_t result;
544*e8380792SMatthias Ringwald 
545*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
546*e8380792SMatthias Ringwald   return(result);
547*e8380792SMatthias Ringwald }
548*e8380792SMatthias Ringwald 
549*e8380792SMatthias Ringwald 
550*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
551*e8380792SMatthias Ringwald /**
552*e8380792SMatthias Ringwald   \brief   Get Process Stack Pointer Limit (non-secure)
553*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
554*e8380792SMatthias Ringwald   \return               PSPLIM Register value
555*e8380792SMatthias Ringwald  */
__TZ_get_PSPLIM_NS(void)556*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSPLIM_NS(void)
557*e8380792SMatthias Ringwald {
558*e8380792SMatthias Ringwald   register uint32_t result;
559*e8380792SMatthias Ringwald 
560*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
561*e8380792SMatthias Ringwald   return(result);
562*e8380792SMatthias Ringwald }
563*e8380792SMatthias Ringwald #endif
564*e8380792SMatthias Ringwald 
565*e8380792SMatthias Ringwald 
566*e8380792SMatthias Ringwald /**
567*e8380792SMatthias Ringwald   \brief   Set Process Stack Pointer Limit
568*e8380792SMatthias Ringwald   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
569*e8380792SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
570*e8380792SMatthias Ringwald  */
__set_PSPLIM(uint32_t ProcStackPtrLimit)571*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
572*e8380792SMatthias Ringwald {
573*e8380792SMatthias Ringwald   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
574*e8380792SMatthias Ringwald }
575*e8380792SMatthias Ringwald 
576*e8380792SMatthias Ringwald 
577*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
578*e8380792SMatthias Ringwald /**
579*e8380792SMatthias Ringwald   \brief   Set Process Stack Pointer (non-secure)
580*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
581*e8380792SMatthias Ringwald   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
582*e8380792SMatthias Ringwald  */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)583*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
584*e8380792SMatthias Ringwald {
585*e8380792SMatthias Ringwald   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
586*e8380792SMatthias Ringwald }
587*e8380792SMatthias Ringwald #endif
588*e8380792SMatthias Ringwald 
589*e8380792SMatthias Ringwald 
590*e8380792SMatthias Ringwald /**
591*e8380792SMatthias Ringwald   \brief   Get Main Stack Pointer Limit
592*e8380792SMatthias Ringwald   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
593*e8380792SMatthias Ringwald   \return               MSPLIM Register value
594*e8380792SMatthias Ringwald  */
__get_MSPLIM(void)595*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSPLIM(void)
596*e8380792SMatthias Ringwald {
597*e8380792SMatthias Ringwald   register uint32_t result;
598*e8380792SMatthias Ringwald 
599*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
600*e8380792SMatthias Ringwald 
601*e8380792SMatthias Ringwald   return(result);
602*e8380792SMatthias Ringwald }
603*e8380792SMatthias Ringwald 
604*e8380792SMatthias Ringwald 
605*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
606*e8380792SMatthias Ringwald /**
607*e8380792SMatthias Ringwald   \brief   Get Main Stack Pointer Limit (non-secure)
608*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
609*e8380792SMatthias Ringwald   \return               MSPLIM Register value
610*e8380792SMatthias Ringwald  */
__TZ_get_MSPLIM_NS(void)611*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSPLIM_NS(void)
612*e8380792SMatthias Ringwald {
613*e8380792SMatthias Ringwald   register uint32_t result;
614*e8380792SMatthias Ringwald 
615*e8380792SMatthias Ringwald   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
616*e8380792SMatthias Ringwald   return(result);
617*e8380792SMatthias Ringwald }
618*e8380792SMatthias Ringwald #endif
619*e8380792SMatthias Ringwald 
620*e8380792SMatthias Ringwald 
621*e8380792SMatthias Ringwald /**
622*e8380792SMatthias Ringwald   \brief   Set Main Stack Pointer Limit
623*e8380792SMatthias Ringwald   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
624*e8380792SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
625*e8380792SMatthias Ringwald  */
__set_MSPLIM(uint32_t MainStackPtrLimit)626*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
627*e8380792SMatthias Ringwald {
628*e8380792SMatthias Ringwald   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
629*e8380792SMatthias Ringwald }
630*e8380792SMatthias Ringwald 
631*e8380792SMatthias Ringwald 
632*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M')     /* ToDo:  ARMCC_V6: check predefined macro for mainline */
633*e8380792SMatthias Ringwald /**
634*e8380792SMatthias Ringwald   \brief   Set Main Stack Pointer Limit (non-secure)
635*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
636*e8380792SMatthias Ringwald   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
637*e8380792SMatthias Ringwald  */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)638*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
639*e8380792SMatthias Ringwald {
640*e8380792SMatthias Ringwald   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
641*e8380792SMatthias Ringwald }
642*e8380792SMatthias Ringwald #endif
643*e8380792SMatthias Ringwald 
644*e8380792SMatthias Ringwald #endif /* (__ARM_ARCH_8M__ == 1U) */
645*e8380792SMatthias Ringwald 
646*e8380792SMatthias Ringwald 
647*e8380792SMatthias Ringwald #if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=4 */
648*e8380792SMatthias Ringwald 
649*e8380792SMatthias Ringwald /**
650*e8380792SMatthias Ringwald   \brief   Get FPSCR
651*e8380792SMatthias Ringwald   \details eturns the current value of the Floating Point Status/Control register.
652*e8380792SMatthias Ringwald   \return               Floating Point Status/Control register value
653*e8380792SMatthias Ringwald  */
654*e8380792SMatthias Ringwald #define __get_FPSCR      __builtin_arm_get_fpscr
655*e8380792SMatthias Ringwald #if 0
656*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
657*e8380792SMatthias Ringwald {
658*e8380792SMatthias Ringwald #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
659*e8380792SMatthias Ringwald   uint32_t result;
660*e8380792SMatthias Ringwald 
661*e8380792SMatthias Ringwald   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
662*e8380792SMatthias Ringwald   __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
663*e8380792SMatthias Ringwald   __ASM volatile ("");
664*e8380792SMatthias Ringwald   return(result);
665*e8380792SMatthias Ringwald #else
666*e8380792SMatthias Ringwald    return(0);
667*e8380792SMatthias Ringwald #endif
668*e8380792SMatthias Ringwald }
669*e8380792SMatthias Ringwald #endif
670*e8380792SMatthias Ringwald 
671*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
672*e8380792SMatthias Ringwald /**
673*e8380792SMatthias Ringwald   \brief   Get FPSCR (non-secure)
674*e8380792SMatthias Ringwald   \details Returns the current value of the non-secure Floating Point Status/Control register when in secure state.
675*e8380792SMatthias Ringwald   \return               Floating Point Status/Control register value
676*e8380792SMatthias Ringwald  */
__TZ_get_FPSCR_NS(void)677*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void)
678*e8380792SMatthias Ringwald {
679*e8380792SMatthias Ringwald #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
680*e8380792SMatthias Ringwald   uint32_t result;
681*e8380792SMatthias Ringwald 
682*e8380792SMatthias Ringwald   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
683*e8380792SMatthias Ringwald   __ASM volatile ("VMRS %0, fpscr_ns" : "=r" (result) );
684*e8380792SMatthias Ringwald   __ASM volatile ("");
685*e8380792SMatthias Ringwald   return(result);
686*e8380792SMatthias Ringwald #else
687*e8380792SMatthias Ringwald    return(0);
688*e8380792SMatthias Ringwald #endif
689*e8380792SMatthias Ringwald }
690*e8380792SMatthias Ringwald #endif
691*e8380792SMatthias Ringwald 
692*e8380792SMatthias Ringwald 
693*e8380792SMatthias Ringwald /**
694*e8380792SMatthias Ringwald   \brief   Set FPSCR
695*e8380792SMatthias Ringwald   \details Assigns the given value to the Floating Point Status/Control register.
696*e8380792SMatthias Ringwald   \param [in]    fpscr  Floating Point Status/Control value to set
697*e8380792SMatthias Ringwald  */
698*e8380792SMatthias Ringwald #define __set_FPSCR      __builtin_arm_set_fpscr
699*e8380792SMatthias Ringwald #if 0
700*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
701*e8380792SMatthias Ringwald {
702*e8380792SMatthias Ringwald #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
703*e8380792SMatthias Ringwald   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
704*e8380792SMatthias Ringwald   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");
705*e8380792SMatthias Ringwald   __ASM volatile ("");
706*e8380792SMatthias Ringwald #endif
707*e8380792SMatthias Ringwald }
708*e8380792SMatthias Ringwald #endif
709*e8380792SMatthias Ringwald 
710*e8380792SMatthias Ringwald #if  (__ARM_FEATURE_CMSE == 3U)
711*e8380792SMatthias Ringwald /**
712*e8380792SMatthias Ringwald   \brief   Set FPSCR (non-secure)
713*e8380792SMatthias Ringwald   \details Assigns the given value to the non-secure Floating Point Status/Control register when in secure state.
714*e8380792SMatthias Ringwald   \param [in]    fpscr  Floating Point Status/Control value to set
715*e8380792SMatthias Ringwald  */
__TZ_set_FPSCR_NS(uint32_t fpscr)716*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FPSCR_NS(uint32_t fpscr)
717*e8380792SMatthias Ringwald {
718*e8380792SMatthias Ringwald #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
719*e8380792SMatthias Ringwald   __ASM volatile ("");                                 /* Empty asm statement works as a scheduling barrier */
720*e8380792SMatthias Ringwald   __ASM volatile ("VMSR fpscr_ns, %0" : : "r" (fpscr) : "vfpcc");
721*e8380792SMatthias Ringwald   __ASM volatile ("");
722*e8380792SMatthias Ringwald #endif
723*e8380792SMatthias Ringwald }
724*e8380792SMatthias Ringwald #endif
725*e8380792SMatthias Ringwald 
726*e8380792SMatthias Ringwald #endif /* ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
727*e8380792SMatthias Ringwald 
728*e8380792SMatthias Ringwald 
729*e8380792SMatthias Ringwald 
730*e8380792SMatthias Ringwald /*@} end of CMSIS_Core_RegAccFunctions */
731*e8380792SMatthias Ringwald 
732*e8380792SMatthias Ringwald 
733*e8380792SMatthias Ringwald /* ##########################  Core Instruction Access  ######################### */
734*e8380792SMatthias Ringwald /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
735*e8380792SMatthias Ringwald   Access to dedicated instructions
736*e8380792SMatthias Ringwald   @{
737*e8380792SMatthias Ringwald */
738*e8380792SMatthias Ringwald 
739*e8380792SMatthias Ringwald /* Define macros for porting to both thumb1 and thumb2.
740*e8380792SMatthias Ringwald  * For thumb1, use low register (r0-r7), specified by constraint "l"
741*e8380792SMatthias Ringwald  * Otherwise, use general registers, specified by constraint "r" */
742*e8380792SMatthias Ringwald #if defined (__thumb__) && !defined (__thumb2__)
743*e8380792SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
744*e8380792SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "l" (r)
745*e8380792SMatthias Ringwald #else
746*e8380792SMatthias Ringwald #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
747*e8380792SMatthias Ringwald #define __CMSIS_GCC_USE_REG(r) "r" (r)
748*e8380792SMatthias Ringwald #endif
749*e8380792SMatthias Ringwald 
750*e8380792SMatthias Ringwald /**
751*e8380792SMatthias Ringwald   \brief   No Operation
752*e8380792SMatthias Ringwald   \details No Operation does nothing. This instruction can be used for code alignment purposes.
753*e8380792SMatthias Ringwald  */
754*e8380792SMatthias Ringwald #define __NOP          __builtin_arm_nop
755*e8380792SMatthias Ringwald 
756*e8380792SMatthias Ringwald /**
757*e8380792SMatthias Ringwald   \brief   Wait For Interrupt
758*e8380792SMatthias Ringwald   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
759*e8380792SMatthias Ringwald  */
760*e8380792SMatthias Ringwald #define __WFI          __builtin_arm_wfi
761*e8380792SMatthias Ringwald 
762*e8380792SMatthias Ringwald 
763*e8380792SMatthias Ringwald /**
764*e8380792SMatthias Ringwald   \brief   Wait For Event
765*e8380792SMatthias Ringwald   \details Wait For Event is a hint instruction that permits the processor to enter
766*e8380792SMatthias Ringwald            a low-power state until one of a number of events occurs.
767*e8380792SMatthias Ringwald  */
768*e8380792SMatthias Ringwald #define __WFE          __builtin_arm_wfe
769*e8380792SMatthias Ringwald 
770*e8380792SMatthias Ringwald 
771*e8380792SMatthias Ringwald /**
772*e8380792SMatthias Ringwald   \brief   Send Event
773*e8380792SMatthias Ringwald   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
774*e8380792SMatthias Ringwald  */
775*e8380792SMatthias Ringwald #define __SEV          __builtin_arm_sev
776*e8380792SMatthias Ringwald 
777*e8380792SMatthias Ringwald 
778*e8380792SMatthias Ringwald /**
779*e8380792SMatthias Ringwald   \brief   Instruction Synchronization Barrier
780*e8380792SMatthias Ringwald   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
781*e8380792SMatthias Ringwald            so that all instructions following the ISB are fetched from cache or memory,
782*e8380792SMatthias Ringwald            after the instruction has been completed.
783*e8380792SMatthias Ringwald  */
784*e8380792SMatthias Ringwald #define __ISB()        __builtin_arm_isb(0xF);
785*e8380792SMatthias Ringwald 
786*e8380792SMatthias Ringwald /**
787*e8380792SMatthias Ringwald   \brief   Data Synchronization Barrier
788*e8380792SMatthias Ringwald   \details Acts as a special kind of Data Memory Barrier.
789*e8380792SMatthias Ringwald            It completes when all explicit memory accesses before this instruction complete.
790*e8380792SMatthias Ringwald  */
791*e8380792SMatthias Ringwald #define __DSB()        __builtin_arm_dsb(0xF);
792*e8380792SMatthias Ringwald 
793*e8380792SMatthias Ringwald 
794*e8380792SMatthias Ringwald /**
795*e8380792SMatthias Ringwald   \brief   Data Memory Barrier
796*e8380792SMatthias Ringwald   \details Ensures the apparent order of the explicit memory operations before
797*e8380792SMatthias Ringwald            and after the instruction, without ensuring their completion.
798*e8380792SMatthias Ringwald  */
799*e8380792SMatthias Ringwald #define __DMB()        __builtin_arm_dmb(0xF);
800*e8380792SMatthias Ringwald 
801*e8380792SMatthias Ringwald 
802*e8380792SMatthias Ringwald /**
803*e8380792SMatthias Ringwald   \brief   Reverse byte order (32 bit)
804*e8380792SMatthias Ringwald   \details Reverses the byte order in integer value.
805*e8380792SMatthias Ringwald   \param [in]    value  Value to reverse
806*e8380792SMatthias Ringwald   \return               Reversed value
807*e8380792SMatthias Ringwald  */
808*e8380792SMatthias Ringwald #define __REV          __builtin_bswap32
809*e8380792SMatthias Ringwald 
810*e8380792SMatthias Ringwald 
811*e8380792SMatthias Ringwald /**
812*e8380792SMatthias Ringwald   \brief   Reverse byte order (16 bit)
813*e8380792SMatthias Ringwald   \details Reverses the byte order in two unsigned short values.
814*e8380792SMatthias Ringwald   \param [in]    value  Value to reverse
815*e8380792SMatthias Ringwald   \return               Reversed value
816*e8380792SMatthias Ringwald  */
817*e8380792SMatthias Ringwald #define __REV16          __builtin_bswap16                           /* ToDo:  ARMCC_V6: check if __builtin_bswap16 could be used */
818*e8380792SMatthias Ringwald #if 0
819*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
820*e8380792SMatthias Ringwald {
821*e8380792SMatthias Ringwald   uint32_t result;
822*e8380792SMatthias Ringwald 
823*e8380792SMatthias Ringwald   __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
824*e8380792SMatthias Ringwald   return(result);
825*e8380792SMatthias Ringwald }
826*e8380792SMatthias Ringwald #endif
827*e8380792SMatthias Ringwald 
828*e8380792SMatthias Ringwald 
829*e8380792SMatthias Ringwald /**
830*e8380792SMatthias Ringwald   \brief   Reverse byte order in signed short value
831*e8380792SMatthias Ringwald   \details Reverses the byte order in a signed short value with sign extension to integer.
832*e8380792SMatthias Ringwald   \param [in]    value  Value to reverse
833*e8380792SMatthias Ringwald   \return               Reversed value
834*e8380792SMatthias Ringwald  */
835*e8380792SMatthias Ringwald                                                           /* ToDo:  ARMCC_V6: check if __builtin_bswap16 could be used */
__REVSH(int32_t value)836*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
837*e8380792SMatthias Ringwald {
838*e8380792SMatthias Ringwald   int32_t result;
839*e8380792SMatthias Ringwald 
840*e8380792SMatthias Ringwald   __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
841*e8380792SMatthias Ringwald   return(result);
842*e8380792SMatthias Ringwald }
843*e8380792SMatthias Ringwald 
844*e8380792SMatthias Ringwald 
845*e8380792SMatthias Ringwald /**
846*e8380792SMatthias Ringwald   \brief   Rotate Right in unsigned value (32 bit)
847*e8380792SMatthias Ringwald   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
848*e8380792SMatthias Ringwald   \param [in]    op1  Value to rotate
849*e8380792SMatthias Ringwald   \param [in]    op2  Number of Bits to rotate
850*e8380792SMatthias Ringwald   \return               Rotated value
851*e8380792SMatthias Ringwald  */
__ROR(uint32_t op1,uint32_t op2)852*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
853*e8380792SMatthias Ringwald {
854*e8380792SMatthias Ringwald   return (op1 >> op2) | (op1 << (32U - op2));
855*e8380792SMatthias Ringwald }
856*e8380792SMatthias Ringwald 
857*e8380792SMatthias Ringwald 
858*e8380792SMatthias Ringwald /**
859*e8380792SMatthias Ringwald   \brief   Breakpoint
860*e8380792SMatthias Ringwald   \details Causes the processor to enter Debug state.
861*e8380792SMatthias Ringwald             Debug tools can use this to investigate system state when the instruction at a particular address is reached.
862*e8380792SMatthias Ringwald     \param [in]    value  is ignored by the processor.
863*e8380792SMatthias Ringwald                    If required, a debugger can use it to store additional information about the breakpoint.
864*e8380792SMatthias Ringwald  */
865*e8380792SMatthias Ringwald #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
866*e8380792SMatthias Ringwald 
867*e8380792SMatthias Ringwald 
868*e8380792SMatthias Ringwald /**
869*e8380792SMatthias Ringwald   \brief   Reverse bit order of value
870*e8380792SMatthias Ringwald   \details Reverses the bit order of the given value.
871*e8380792SMatthias Ringwald   \param [in]    value  Value to reverse
872*e8380792SMatthias Ringwald   \return               Reversed value
873*e8380792SMatthias Ringwald  */
874*e8380792SMatthias Ringwald                                                           /* ToDo:  ARMCC_V6: check if __builtin_arm_rbit is supported */
__RBIT(uint32_t value)875*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
876*e8380792SMatthias Ringwald {
877*e8380792SMatthias Ringwald   uint32_t result;
878*e8380792SMatthias Ringwald 
879*e8380792SMatthias Ringwald #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=3 */
880*e8380792SMatthias Ringwald    __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
881*e8380792SMatthias Ringwald #else
882*e8380792SMatthias Ringwald   int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
883*e8380792SMatthias Ringwald 
884*e8380792SMatthias Ringwald   result = value;                      /* r will be reversed bits of v; first get LSB of v */
885*e8380792SMatthias Ringwald   for (value >>= 1U; value; value >>= 1U)
886*e8380792SMatthias Ringwald   {
887*e8380792SMatthias Ringwald     result <<= 1U;
888*e8380792SMatthias Ringwald     result |= value & 1U;
889*e8380792SMatthias Ringwald     s--;
890*e8380792SMatthias Ringwald   }
891*e8380792SMatthias Ringwald   result <<= s;                        /* shift when v's highest bits are zero */
892*e8380792SMatthias Ringwald #endif
893*e8380792SMatthias Ringwald   return(result);
894*e8380792SMatthias Ringwald }
895*e8380792SMatthias Ringwald 
896*e8380792SMatthias Ringwald 
897*e8380792SMatthias Ringwald /**
898*e8380792SMatthias Ringwald   \brief   Count leading zeros
899*e8380792SMatthias Ringwald   \details Counts the number of leading zeros of a data value.
900*e8380792SMatthias Ringwald   \param [in]  value  Value to count the leading zeros
901*e8380792SMatthias Ringwald   \return             number of leading zeros in value
902*e8380792SMatthias Ringwald  */
903*e8380792SMatthias Ringwald #define __CLZ             __builtin_clz
904*e8380792SMatthias Ringwald 
905*e8380792SMatthias Ringwald 
906*e8380792SMatthias Ringwald #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U))  /* ToDo:  ARMCC_V6: check if this is ok for cortex >=3 */
907*e8380792SMatthias Ringwald 
908*e8380792SMatthias Ringwald /**
909*e8380792SMatthias Ringwald   \brief   LDR Exclusive (8 bit)
910*e8380792SMatthias Ringwald   \details Executes a exclusive LDR instruction for 8 bit value.
911*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
912*e8380792SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
913*e8380792SMatthias Ringwald  */
914*e8380792SMatthias Ringwald #define __LDREXB        (uint8_t)__builtin_arm_ldrex
915*e8380792SMatthias Ringwald 
916*e8380792SMatthias Ringwald 
917*e8380792SMatthias Ringwald /**
918*e8380792SMatthias Ringwald   \brief   LDR Exclusive (16 bit)
919*e8380792SMatthias Ringwald   \details Executes a exclusive LDR instruction for 16 bit values.
920*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
921*e8380792SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
922*e8380792SMatthias Ringwald  */
923*e8380792SMatthias Ringwald #define __LDREXH        (uint16_t)__builtin_arm_ldrex
924*e8380792SMatthias Ringwald 
925*e8380792SMatthias Ringwald 
926*e8380792SMatthias Ringwald /**
927*e8380792SMatthias Ringwald   \brief   LDR Exclusive (32 bit)
928*e8380792SMatthias Ringwald   \details Executes a exclusive LDR instruction for 32 bit values.
929*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
930*e8380792SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
931*e8380792SMatthias Ringwald  */
932*e8380792SMatthias Ringwald #define __LDREXW        (uint32_t)__builtin_arm_ldrex
933*e8380792SMatthias Ringwald 
934*e8380792SMatthias Ringwald 
935*e8380792SMatthias Ringwald /**
936*e8380792SMatthias Ringwald   \brief   STR Exclusive (8 bit)
937*e8380792SMatthias Ringwald   \details Executes a exclusive STR instruction for 8 bit values.
938*e8380792SMatthias Ringwald   \param [in]  value  Value to store
939*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
940*e8380792SMatthias Ringwald   \return          0  Function succeeded
941*e8380792SMatthias Ringwald   \return          1  Function failed
942*e8380792SMatthias Ringwald  */
943*e8380792SMatthias Ringwald #define __STREXB        (uint32_t)__builtin_arm_strex
944*e8380792SMatthias Ringwald 
945*e8380792SMatthias Ringwald 
946*e8380792SMatthias Ringwald /**
947*e8380792SMatthias Ringwald   \brief   STR Exclusive (16 bit)
948*e8380792SMatthias Ringwald   \details Executes a exclusive STR instruction for 16 bit values.
949*e8380792SMatthias Ringwald   \param [in]  value  Value to store
950*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
951*e8380792SMatthias Ringwald   \return          0  Function succeeded
952*e8380792SMatthias Ringwald   \return          1  Function failed
953*e8380792SMatthias Ringwald  */
954*e8380792SMatthias Ringwald #define __STREXH        (uint32_t)__builtin_arm_strex
955*e8380792SMatthias Ringwald 
956*e8380792SMatthias Ringwald 
957*e8380792SMatthias Ringwald /**
958*e8380792SMatthias Ringwald   \brief   STR Exclusive (32 bit)
959*e8380792SMatthias Ringwald   \details Executes a exclusive STR instruction for 32 bit values.
960*e8380792SMatthias Ringwald   \param [in]  value  Value to store
961*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
962*e8380792SMatthias Ringwald   \return          0  Function succeeded
963*e8380792SMatthias Ringwald   \return          1  Function failed
964*e8380792SMatthias Ringwald  */
965*e8380792SMatthias Ringwald #define __STREXW        (uint32_t)__builtin_arm_strex
966*e8380792SMatthias Ringwald 
967*e8380792SMatthias Ringwald 
968*e8380792SMatthias Ringwald /**
969*e8380792SMatthias Ringwald   \brief   Remove the exclusive lock
970*e8380792SMatthias Ringwald   \details Removes the exclusive lock which is created by LDREX.
971*e8380792SMatthias Ringwald  */
972*e8380792SMatthias Ringwald #define __CLREX             __builtin_arm_clrex
973*e8380792SMatthias Ringwald 
974*e8380792SMatthias Ringwald 
975*e8380792SMatthias Ringwald /**
976*e8380792SMatthias Ringwald   \brief   Signed Saturate
977*e8380792SMatthias Ringwald   \details Saturates a signed value.
978*e8380792SMatthias Ringwald   \param [in]  value  Value to be saturated
979*e8380792SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (1..32)
980*e8380792SMatthias Ringwald   \return             Saturated value
981*e8380792SMatthias Ringwald  */
982*e8380792SMatthias Ringwald /*#define __SSAT             __builtin_arm_ssat*/
983*e8380792SMatthias Ringwald #define __SSAT(ARG1,ARG2) \
984*e8380792SMatthias Ringwald ({                          \
985*e8380792SMatthias Ringwald   int32_t __RES, __ARG1 = (ARG1); \
986*e8380792SMatthias Ringwald   __ASM ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
987*e8380792SMatthias Ringwald   __RES; \
988*e8380792SMatthias Ringwald  })
989*e8380792SMatthias Ringwald 
990*e8380792SMatthias Ringwald 
991*e8380792SMatthias Ringwald /**
992*e8380792SMatthias Ringwald   \brief   Unsigned Saturate
993*e8380792SMatthias Ringwald   \details Saturates an unsigned value.
994*e8380792SMatthias Ringwald   \param [in]  value  Value to be saturated
995*e8380792SMatthias Ringwald   \param [in]    sat  Bit position to saturate to (0..31)
996*e8380792SMatthias Ringwald   \return             Saturated value
997*e8380792SMatthias Ringwald  */
998*e8380792SMatthias Ringwald #define __USAT             __builtin_arm_usat
999*e8380792SMatthias Ringwald #if 0
1000*e8380792SMatthias Ringwald #define __USAT(ARG1,ARG2) \
1001*e8380792SMatthias Ringwald ({                          \
1002*e8380792SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1003*e8380792SMatthias Ringwald   __ASM ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1004*e8380792SMatthias Ringwald   __RES; \
1005*e8380792SMatthias Ringwald  })
1006*e8380792SMatthias Ringwald #endif
1007*e8380792SMatthias Ringwald 
1008*e8380792SMatthias Ringwald 
1009*e8380792SMatthias Ringwald /**
1010*e8380792SMatthias Ringwald   \brief   Rotate Right with Extend (32 bit)
1011*e8380792SMatthias Ringwald   \details Moves each bit of a bitstring right by one bit.
1012*e8380792SMatthias Ringwald            The carry input is shifted in at the left end of the bitstring.
1013*e8380792SMatthias Ringwald   \param [in]    value  Value to rotate
1014*e8380792SMatthias Ringwald   \return               Rotated value
1015*e8380792SMatthias Ringwald  */
__RRX(uint32_t value)1016*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
1017*e8380792SMatthias Ringwald {
1018*e8380792SMatthias Ringwald   uint32_t result;
1019*e8380792SMatthias Ringwald 
1020*e8380792SMatthias Ringwald   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1021*e8380792SMatthias Ringwald   return(result);
1022*e8380792SMatthias Ringwald }
1023*e8380792SMatthias Ringwald 
1024*e8380792SMatthias Ringwald 
1025*e8380792SMatthias Ringwald /**
1026*e8380792SMatthias Ringwald   \brief   LDRT Unprivileged (8 bit)
1027*e8380792SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 8 bit value.
1028*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1029*e8380792SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1030*e8380792SMatthias Ringwald  */
__LDRBT(volatile uint8_t * ptr)1031*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1032*e8380792SMatthias Ringwald {
1033*e8380792SMatthias Ringwald     uint32_t result;
1034*e8380792SMatthias Ringwald 
1035*e8380792SMatthias Ringwald    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1036*e8380792SMatthias Ringwald    return ((uint8_t) result);    /* Add explicit type cast here */
1037*e8380792SMatthias Ringwald }
1038*e8380792SMatthias Ringwald 
1039*e8380792SMatthias Ringwald 
1040*e8380792SMatthias Ringwald /**
1041*e8380792SMatthias Ringwald   \brief   LDRT Unprivileged (16 bit)
1042*e8380792SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 16 bit values.
1043*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1044*e8380792SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1045*e8380792SMatthias Ringwald  */
__LDRHT(volatile uint16_t * ptr)1046*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1047*e8380792SMatthias Ringwald {
1048*e8380792SMatthias Ringwald     uint32_t result;
1049*e8380792SMatthias Ringwald 
1050*e8380792SMatthias Ringwald    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1051*e8380792SMatthias Ringwald    return ((uint16_t) result);    /* Add explicit type cast here */
1052*e8380792SMatthias Ringwald }
1053*e8380792SMatthias Ringwald 
1054*e8380792SMatthias Ringwald 
1055*e8380792SMatthias Ringwald /**
1056*e8380792SMatthias Ringwald   \brief   LDRT Unprivileged (32 bit)
1057*e8380792SMatthias Ringwald   \details Executes a Unprivileged LDRT instruction for 32 bit values.
1058*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1059*e8380792SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1060*e8380792SMatthias Ringwald  */
__LDRT(volatile uint32_t * ptr)1061*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *ptr)
1062*e8380792SMatthias Ringwald {
1063*e8380792SMatthias Ringwald     uint32_t result;
1064*e8380792SMatthias Ringwald 
1065*e8380792SMatthias Ringwald    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1066*e8380792SMatthias Ringwald    return(result);
1067*e8380792SMatthias Ringwald }
1068*e8380792SMatthias Ringwald 
1069*e8380792SMatthias Ringwald 
1070*e8380792SMatthias Ringwald /**
1071*e8380792SMatthias Ringwald   \brief   STRT Unprivileged (8 bit)
1072*e8380792SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 8 bit values.
1073*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1074*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1075*e8380792SMatthias Ringwald  */
__STRBT(uint8_t value,volatile uint8_t * ptr)1076*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1077*e8380792SMatthias Ringwald {
1078*e8380792SMatthias Ringwald    __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1079*e8380792SMatthias Ringwald }
1080*e8380792SMatthias Ringwald 
1081*e8380792SMatthias Ringwald 
1082*e8380792SMatthias Ringwald /**
1083*e8380792SMatthias Ringwald   \brief   STRT Unprivileged (16 bit)
1084*e8380792SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 16 bit values.
1085*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1086*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1087*e8380792SMatthias Ringwald  */
__STRHT(uint16_t value,volatile uint16_t * ptr)1088*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1089*e8380792SMatthias Ringwald {
1090*e8380792SMatthias Ringwald    __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1091*e8380792SMatthias Ringwald }
1092*e8380792SMatthias Ringwald 
1093*e8380792SMatthias Ringwald 
1094*e8380792SMatthias Ringwald /**
1095*e8380792SMatthias Ringwald   \brief   STRT Unprivileged (32 bit)
1096*e8380792SMatthias Ringwald   \details Executes a Unprivileged STRT instruction for 32 bit values.
1097*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1098*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1099*e8380792SMatthias Ringwald  */
__STRT(uint32_t value,volatile uint32_t * ptr)1100*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1101*e8380792SMatthias Ringwald {
1102*e8380792SMatthias Ringwald    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1103*e8380792SMatthias Ringwald }
1104*e8380792SMatthias Ringwald 
1105*e8380792SMatthias Ringwald #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
1106*e8380792SMatthias Ringwald 
1107*e8380792SMatthias Ringwald 
1108*e8380792SMatthias Ringwald #if (__ARM_ARCH_8M__ == 1U)
1109*e8380792SMatthias Ringwald 
1110*e8380792SMatthias Ringwald /**
1111*e8380792SMatthias Ringwald   \brief   Load-Acquire (8 bit)
1112*e8380792SMatthias Ringwald   \details Executes a LDAB instruction for 8 bit value.
1113*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1114*e8380792SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1115*e8380792SMatthias Ringwald  */
__LDAB(volatile uint8_t * ptr)1116*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDAB(volatile uint8_t *ptr)
1117*e8380792SMatthias Ringwald {
1118*e8380792SMatthias Ringwald     uint32_t result;
1119*e8380792SMatthias Ringwald 
1120*e8380792SMatthias Ringwald    __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1121*e8380792SMatthias Ringwald    return ((uint8_t) result);
1122*e8380792SMatthias Ringwald }
1123*e8380792SMatthias Ringwald 
1124*e8380792SMatthias Ringwald 
1125*e8380792SMatthias Ringwald /**
1126*e8380792SMatthias Ringwald   \brief   Load-Acquire (16 bit)
1127*e8380792SMatthias Ringwald   \details Executes a LDAH instruction for 16 bit values.
1128*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1129*e8380792SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1130*e8380792SMatthias Ringwald  */
__LDAH(volatile uint16_t * ptr)1131*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDAH(volatile uint16_t *ptr)
1132*e8380792SMatthias Ringwald {
1133*e8380792SMatthias Ringwald     uint32_t result;
1134*e8380792SMatthias Ringwald 
1135*e8380792SMatthias Ringwald    __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1136*e8380792SMatthias Ringwald    return ((uint16_t) result);
1137*e8380792SMatthias Ringwald }
1138*e8380792SMatthias Ringwald 
1139*e8380792SMatthias Ringwald 
1140*e8380792SMatthias Ringwald /**
1141*e8380792SMatthias Ringwald   \brief   Load-Acquire (32 bit)
1142*e8380792SMatthias Ringwald   \details Executes a LDA instruction for 32 bit values.
1143*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1144*e8380792SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1145*e8380792SMatthias Ringwald  */
__LDA(volatile uint32_t * ptr)1146*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDA(volatile uint32_t *ptr)
1147*e8380792SMatthias Ringwald {
1148*e8380792SMatthias Ringwald     uint32_t result;
1149*e8380792SMatthias Ringwald 
1150*e8380792SMatthias Ringwald    __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1151*e8380792SMatthias Ringwald    return(result);
1152*e8380792SMatthias Ringwald }
1153*e8380792SMatthias Ringwald 
1154*e8380792SMatthias Ringwald 
1155*e8380792SMatthias Ringwald /**
1156*e8380792SMatthias Ringwald   \brief   Store-Release (8 bit)
1157*e8380792SMatthias Ringwald   \details Executes a STLB instruction for 8 bit values.
1158*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1159*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1160*e8380792SMatthias Ringwald  */
__STLB(uint8_t value,volatile uint8_t * ptr)1161*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1162*e8380792SMatthias Ringwald {
1163*e8380792SMatthias Ringwald    __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1164*e8380792SMatthias Ringwald }
1165*e8380792SMatthias Ringwald 
1166*e8380792SMatthias Ringwald 
1167*e8380792SMatthias Ringwald /**
1168*e8380792SMatthias Ringwald   \brief   Store-Release (16 bit)
1169*e8380792SMatthias Ringwald   \details Executes a STLH instruction for 16 bit values.
1170*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1171*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1172*e8380792SMatthias Ringwald  */
__STLH(uint16_t value,volatile uint16_t * ptr)1173*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1174*e8380792SMatthias Ringwald {
1175*e8380792SMatthias Ringwald    __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1176*e8380792SMatthias Ringwald }
1177*e8380792SMatthias Ringwald 
1178*e8380792SMatthias Ringwald 
1179*e8380792SMatthias Ringwald /**
1180*e8380792SMatthias Ringwald   \brief   Store-Release (32 bit)
1181*e8380792SMatthias Ringwald   \details Executes a STL instruction for 32 bit values.
1182*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1183*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1184*e8380792SMatthias Ringwald  */
__STL(uint32_t value,volatile uint32_t * ptr)1185*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1186*e8380792SMatthias Ringwald {
1187*e8380792SMatthias Ringwald    __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1188*e8380792SMatthias Ringwald }
1189*e8380792SMatthias Ringwald 
1190*e8380792SMatthias Ringwald 
1191*e8380792SMatthias Ringwald /**
1192*e8380792SMatthias Ringwald   \brief   Load-Acquire Exclusive (8 bit)
1193*e8380792SMatthias Ringwald   \details Executes a LDAB exclusive instruction for 8 bit value.
1194*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1195*e8380792SMatthias Ringwald   \return             value of type uint8_t at (*ptr)
1196*e8380792SMatthias Ringwald  */
1197*e8380792SMatthias Ringwald #define     __LDAEXB                 (uint8_t)__builtin_arm_ldaex
1198*e8380792SMatthias Ringwald 
1199*e8380792SMatthias Ringwald 
1200*e8380792SMatthias Ringwald /**
1201*e8380792SMatthias Ringwald   \brief   Load-Acquire Exclusive (16 bit)
1202*e8380792SMatthias Ringwald   \details Executes a LDAH exclusive instruction for 16 bit values.
1203*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1204*e8380792SMatthias Ringwald   \return        value of type uint16_t at (*ptr)
1205*e8380792SMatthias Ringwald  */
1206*e8380792SMatthias Ringwald #define     __LDAEXH                 (uint16_t)__builtin_arm_ldaex
1207*e8380792SMatthias Ringwald 
1208*e8380792SMatthias Ringwald 
1209*e8380792SMatthias Ringwald /**
1210*e8380792SMatthias Ringwald   \brief   Load-Acquire Exclusive (32 bit)
1211*e8380792SMatthias Ringwald   \details Executes a LDA exclusive instruction for 32 bit values.
1212*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to data
1213*e8380792SMatthias Ringwald   \return        value of type uint32_t at (*ptr)
1214*e8380792SMatthias Ringwald  */
1215*e8380792SMatthias Ringwald #define     __LDAEX                  (uint32_t)__builtin_arm_ldaex
1216*e8380792SMatthias Ringwald 
1217*e8380792SMatthias Ringwald 
1218*e8380792SMatthias Ringwald /**
1219*e8380792SMatthias Ringwald   \brief   Store-Release Exclusive (8 bit)
1220*e8380792SMatthias Ringwald   \details Executes a STLB exclusive instruction for 8 bit values.
1221*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1222*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1223*e8380792SMatthias Ringwald   \return          0  Function succeeded
1224*e8380792SMatthias Ringwald   \return          1  Function failed
1225*e8380792SMatthias Ringwald  */
1226*e8380792SMatthias Ringwald #define     __STLEXB                 (uint32_t)__builtin_arm_stlex
1227*e8380792SMatthias Ringwald 
1228*e8380792SMatthias Ringwald 
1229*e8380792SMatthias Ringwald /**
1230*e8380792SMatthias Ringwald   \brief   Store-Release Exclusive (16 bit)
1231*e8380792SMatthias Ringwald   \details Executes a STLH exclusive instruction for 16 bit values.
1232*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1233*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1234*e8380792SMatthias Ringwald   \return          0  Function succeeded
1235*e8380792SMatthias Ringwald   \return          1  Function failed
1236*e8380792SMatthias Ringwald  */
1237*e8380792SMatthias Ringwald #define     __STLEXH                 (uint32_t)__builtin_arm_stlex
1238*e8380792SMatthias Ringwald 
1239*e8380792SMatthias Ringwald 
1240*e8380792SMatthias Ringwald /**
1241*e8380792SMatthias Ringwald   \brief   Store-Release Exclusive (32 bit)
1242*e8380792SMatthias Ringwald   \details Executes a STL exclusive instruction for 32 bit values.
1243*e8380792SMatthias Ringwald   \param [in]  value  Value to store
1244*e8380792SMatthias Ringwald   \param [in]    ptr  Pointer to location
1245*e8380792SMatthias Ringwald   \return          0  Function succeeded
1246*e8380792SMatthias Ringwald   \return          1  Function failed
1247*e8380792SMatthias Ringwald  */
1248*e8380792SMatthias Ringwald #define     __STLEX                  (uint32_t)__builtin_arm_stlex
1249*e8380792SMatthias Ringwald 
1250*e8380792SMatthias Ringwald #endif /* (__ARM_ARCH_8M__ == 1U) */
1251*e8380792SMatthias Ringwald 
1252*e8380792SMatthias Ringwald /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1253*e8380792SMatthias Ringwald 
1254*e8380792SMatthias Ringwald 
1255*e8380792SMatthias Ringwald /* ###################  Compiler specific Intrinsics  ########################### */
1256*e8380792SMatthias Ringwald /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1257*e8380792SMatthias Ringwald   Access to dedicated SIMD instructions
1258*e8380792SMatthias Ringwald   @{
1259*e8380792SMatthias Ringwald */
1260*e8380792SMatthias Ringwald 
1261*e8380792SMatthias Ringwald #if (__ARM_FEATURE_DSP == 1U)        /* ToDo:  ARMCC_V6: This should be ARCH >= ARMv7-M + SIMD */
1262*e8380792SMatthias Ringwald 
__SADD8(uint32_t op1,uint32_t op2)1263*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1264*e8380792SMatthias Ringwald {
1265*e8380792SMatthias Ringwald   uint32_t result;
1266*e8380792SMatthias Ringwald 
1267*e8380792SMatthias Ringwald   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1268*e8380792SMatthias Ringwald   return(result);
1269*e8380792SMatthias Ringwald }
1270*e8380792SMatthias Ringwald 
__QADD8(uint32_t op1,uint32_t op2)1271*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1272*e8380792SMatthias Ringwald {
1273*e8380792SMatthias Ringwald   uint32_t result;
1274*e8380792SMatthias Ringwald 
1275*e8380792SMatthias Ringwald   __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1276*e8380792SMatthias Ringwald   return(result);
1277*e8380792SMatthias Ringwald }
1278*e8380792SMatthias Ringwald 
__SHADD8(uint32_t op1,uint32_t op2)1279*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1280*e8380792SMatthias Ringwald {
1281*e8380792SMatthias Ringwald   uint32_t result;
1282*e8380792SMatthias Ringwald 
1283*e8380792SMatthias Ringwald   __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1284*e8380792SMatthias Ringwald   return(result);
1285*e8380792SMatthias Ringwald }
1286*e8380792SMatthias Ringwald 
__UADD8(uint32_t op1,uint32_t op2)1287*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1288*e8380792SMatthias Ringwald {
1289*e8380792SMatthias Ringwald   uint32_t result;
1290*e8380792SMatthias Ringwald 
1291*e8380792SMatthias Ringwald   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1292*e8380792SMatthias Ringwald   return(result);
1293*e8380792SMatthias Ringwald }
1294*e8380792SMatthias Ringwald 
__UQADD8(uint32_t op1,uint32_t op2)1295*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1296*e8380792SMatthias Ringwald {
1297*e8380792SMatthias Ringwald   uint32_t result;
1298*e8380792SMatthias Ringwald 
1299*e8380792SMatthias Ringwald   __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1300*e8380792SMatthias Ringwald   return(result);
1301*e8380792SMatthias Ringwald }
1302*e8380792SMatthias Ringwald 
__UHADD8(uint32_t op1,uint32_t op2)1303*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1304*e8380792SMatthias Ringwald {
1305*e8380792SMatthias Ringwald   uint32_t result;
1306*e8380792SMatthias Ringwald 
1307*e8380792SMatthias Ringwald   __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1308*e8380792SMatthias Ringwald   return(result);
1309*e8380792SMatthias Ringwald }
1310*e8380792SMatthias Ringwald 
1311*e8380792SMatthias Ringwald 
__SSUB8(uint32_t op1,uint32_t op2)1312*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1313*e8380792SMatthias Ringwald {
1314*e8380792SMatthias Ringwald   uint32_t result;
1315*e8380792SMatthias Ringwald 
1316*e8380792SMatthias Ringwald   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1317*e8380792SMatthias Ringwald   return(result);
1318*e8380792SMatthias Ringwald }
1319*e8380792SMatthias Ringwald 
__QSUB8(uint32_t op1,uint32_t op2)1320*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1321*e8380792SMatthias Ringwald {
1322*e8380792SMatthias Ringwald   uint32_t result;
1323*e8380792SMatthias Ringwald 
1324*e8380792SMatthias Ringwald   __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1325*e8380792SMatthias Ringwald   return(result);
1326*e8380792SMatthias Ringwald }
1327*e8380792SMatthias Ringwald 
__SHSUB8(uint32_t op1,uint32_t op2)1328*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1329*e8380792SMatthias Ringwald {
1330*e8380792SMatthias Ringwald   uint32_t result;
1331*e8380792SMatthias Ringwald 
1332*e8380792SMatthias Ringwald   __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1333*e8380792SMatthias Ringwald   return(result);
1334*e8380792SMatthias Ringwald }
1335*e8380792SMatthias Ringwald 
__USUB8(uint32_t op1,uint32_t op2)1336*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1337*e8380792SMatthias Ringwald {
1338*e8380792SMatthias Ringwald   uint32_t result;
1339*e8380792SMatthias Ringwald 
1340*e8380792SMatthias Ringwald   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1341*e8380792SMatthias Ringwald   return(result);
1342*e8380792SMatthias Ringwald }
1343*e8380792SMatthias Ringwald 
__UQSUB8(uint32_t op1,uint32_t op2)1344*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1345*e8380792SMatthias Ringwald {
1346*e8380792SMatthias Ringwald   uint32_t result;
1347*e8380792SMatthias Ringwald 
1348*e8380792SMatthias Ringwald   __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1349*e8380792SMatthias Ringwald   return(result);
1350*e8380792SMatthias Ringwald }
1351*e8380792SMatthias Ringwald 
__UHSUB8(uint32_t op1,uint32_t op2)1352*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1353*e8380792SMatthias Ringwald {
1354*e8380792SMatthias Ringwald   uint32_t result;
1355*e8380792SMatthias Ringwald 
1356*e8380792SMatthias Ringwald   __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1357*e8380792SMatthias Ringwald   return(result);
1358*e8380792SMatthias Ringwald }
1359*e8380792SMatthias Ringwald 
1360*e8380792SMatthias Ringwald 
__SADD16(uint32_t op1,uint32_t op2)1361*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1362*e8380792SMatthias Ringwald {
1363*e8380792SMatthias Ringwald   uint32_t result;
1364*e8380792SMatthias Ringwald 
1365*e8380792SMatthias Ringwald   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1366*e8380792SMatthias Ringwald   return(result);
1367*e8380792SMatthias Ringwald }
1368*e8380792SMatthias Ringwald 
__QADD16(uint32_t op1,uint32_t op2)1369*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1370*e8380792SMatthias Ringwald {
1371*e8380792SMatthias Ringwald   uint32_t result;
1372*e8380792SMatthias Ringwald 
1373*e8380792SMatthias Ringwald   __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1374*e8380792SMatthias Ringwald   return(result);
1375*e8380792SMatthias Ringwald }
1376*e8380792SMatthias Ringwald 
__SHADD16(uint32_t op1,uint32_t op2)1377*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1378*e8380792SMatthias Ringwald {
1379*e8380792SMatthias Ringwald   uint32_t result;
1380*e8380792SMatthias Ringwald 
1381*e8380792SMatthias Ringwald   __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1382*e8380792SMatthias Ringwald   return(result);
1383*e8380792SMatthias Ringwald }
1384*e8380792SMatthias Ringwald 
__UADD16(uint32_t op1,uint32_t op2)1385*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1386*e8380792SMatthias Ringwald {
1387*e8380792SMatthias Ringwald   uint32_t result;
1388*e8380792SMatthias Ringwald 
1389*e8380792SMatthias Ringwald   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1390*e8380792SMatthias Ringwald   return(result);
1391*e8380792SMatthias Ringwald }
1392*e8380792SMatthias Ringwald 
__UQADD16(uint32_t op1,uint32_t op2)1393*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1394*e8380792SMatthias Ringwald {
1395*e8380792SMatthias Ringwald   uint32_t result;
1396*e8380792SMatthias Ringwald 
1397*e8380792SMatthias Ringwald   __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1398*e8380792SMatthias Ringwald   return(result);
1399*e8380792SMatthias Ringwald }
1400*e8380792SMatthias Ringwald 
__UHADD16(uint32_t op1,uint32_t op2)1401*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1402*e8380792SMatthias Ringwald {
1403*e8380792SMatthias Ringwald   uint32_t result;
1404*e8380792SMatthias Ringwald 
1405*e8380792SMatthias Ringwald   __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1406*e8380792SMatthias Ringwald   return(result);
1407*e8380792SMatthias Ringwald }
1408*e8380792SMatthias Ringwald 
__SSUB16(uint32_t op1,uint32_t op2)1409*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1410*e8380792SMatthias Ringwald {
1411*e8380792SMatthias Ringwald   uint32_t result;
1412*e8380792SMatthias Ringwald 
1413*e8380792SMatthias Ringwald   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1414*e8380792SMatthias Ringwald   return(result);
1415*e8380792SMatthias Ringwald }
1416*e8380792SMatthias Ringwald 
__QSUB16(uint32_t op1,uint32_t op2)1417*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1418*e8380792SMatthias Ringwald {
1419*e8380792SMatthias Ringwald   uint32_t result;
1420*e8380792SMatthias Ringwald 
1421*e8380792SMatthias Ringwald   __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1422*e8380792SMatthias Ringwald   return(result);
1423*e8380792SMatthias Ringwald }
1424*e8380792SMatthias Ringwald 
__SHSUB16(uint32_t op1,uint32_t op2)1425*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1426*e8380792SMatthias Ringwald {
1427*e8380792SMatthias Ringwald   uint32_t result;
1428*e8380792SMatthias Ringwald 
1429*e8380792SMatthias Ringwald   __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1430*e8380792SMatthias Ringwald   return(result);
1431*e8380792SMatthias Ringwald }
1432*e8380792SMatthias Ringwald 
__USUB16(uint32_t op1,uint32_t op2)1433*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1434*e8380792SMatthias Ringwald {
1435*e8380792SMatthias Ringwald   uint32_t result;
1436*e8380792SMatthias Ringwald 
1437*e8380792SMatthias Ringwald   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1438*e8380792SMatthias Ringwald   return(result);
1439*e8380792SMatthias Ringwald }
1440*e8380792SMatthias Ringwald 
__UQSUB16(uint32_t op1,uint32_t op2)1441*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1442*e8380792SMatthias Ringwald {
1443*e8380792SMatthias Ringwald   uint32_t result;
1444*e8380792SMatthias Ringwald 
1445*e8380792SMatthias Ringwald   __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1446*e8380792SMatthias Ringwald   return(result);
1447*e8380792SMatthias Ringwald }
1448*e8380792SMatthias Ringwald 
__UHSUB16(uint32_t op1,uint32_t op2)1449*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1450*e8380792SMatthias Ringwald {
1451*e8380792SMatthias Ringwald   uint32_t result;
1452*e8380792SMatthias Ringwald 
1453*e8380792SMatthias Ringwald   __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1454*e8380792SMatthias Ringwald   return(result);
1455*e8380792SMatthias Ringwald }
1456*e8380792SMatthias Ringwald 
__SASX(uint32_t op1,uint32_t op2)1457*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1458*e8380792SMatthias Ringwald {
1459*e8380792SMatthias Ringwald   uint32_t result;
1460*e8380792SMatthias Ringwald 
1461*e8380792SMatthias Ringwald   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1462*e8380792SMatthias Ringwald   return(result);
1463*e8380792SMatthias Ringwald }
1464*e8380792SMatthias Ringwald 
__QASX(uint32_t op1,uint32_t op2)1465*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1466*e8380792SMatthias Ringwald {
1467*e8380792SMatthias Ringwald   uint32_t result;
1468*e8380792SMatthias Ringwald 
1469*e8380792SMatthias Ringwald   __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1470*e8380792SMatthias Ringwald   return(result);
1471*e8380792SMatthias Ringwald }
1472*e8380792SMatthias Ringwald 
__SHASX(uint32_t op1,uint32_t op2)1473*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1474*e8380792SMatthias Ringwald {
1475*e8380792SMatthias Ringwald   uint32_t result;
1476*e8380792SMatthias Ringwald 
1477*e8380792SMatthias Ringwald   __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1478*e8380792SMatthias Ringwald   return(result);
1479*e8380792SMatthias Ringwald }
1480*e8380792SMatthias Ringwald 
__UASX(uint32_t op1,uint32_t op2)1481*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1482*e8380792SMatthias Ringwald {
1483*e8380792SMatthias Ringwald   uint32_t result;
1484*e8380792SMatthias Ringwald 
1485*e8380792SMatthias Ringwald   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1486*e8380792SMatthias Ringwald   return(result);
1487*e8380792SMatthias Ringwald }
1488*e8380792SMatthias Ringwald 
__UQASX(uint32_t op1,uint32_t op2)1489*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1490*e8380792SMatthias Ringwald {
1491*e8380792SMatthias Ringwald   uint32_t result;
1492*e8380792SMatthias Ringwald 
1493*e8380792SMatthias Ringwald   __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1494*e8380792SMatthias Ringwald   return(result);
1495*e8380792SMatthias Ringwald }
1496*e8380792SMatthias Ringwald 
__UHASX(uint32_t op1,uint32_t op2)1497*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1498*e8380792SMatthias Ringwald {
1499*e8380792SMatthias Ringwald   uint32_t result;
1500*e8380792SMatthias Ringwald 
1501*e8380792SMatthias Ringwald   __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1502*e8380792SMatthias Ringwald   return(result);
1503*e8380792SMatthias Ringwald }
1504*e8380792SMatthias Ringwald 
__SSAX(uint32_t op1,uint32_t op2)1505*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1506*e8380792SMatthias Ringwald {
1507*e8380792SMatthias Ringwald   uint32_t result;
1508*e8380792SMatthias Ringwald 
1509*e8380792SMatthias Ringwald   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1510*e8380792SMatthias Ringwald   return(result);
1511*e8380792SMatthias Ringwald }
1512*e8380792SMatthias Ringwald 
__QSAX(uint32_t op1,uint32_t op2)1513*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1514*e8380792SMatthias Ringwald {
1515*e8380792SMatthias Ringwald   uint32_t result;
1516*e8380792SMatthias Ringwald 
1517*e8380792SMatthias Ringwald   __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1518*e8380792SMatthias Ringwald   return(result);
1519*e8380792SMatthias Ringwald }
1520*e8380792SMatthias Ringwald 
__SHSAX(uint32_t op1,uint32_t op2)1521*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1522*e8380792SMatthias Ringwald {
1523*e8380792SMatthias Ringwald   uint32_t result;
1524*e8380792SMatthias Ringwald 
1525*e8380792SMatthias Ringwald   __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1526*e8380792SMatthias Ringwald   return(result);
1527*e8380792SMatthias Ringwald }
1528*e8380792SMatthias Ringwald 
__USAX(uint32_t op1,uint32_t op2)1529*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1530*e8380792SMatthias Ringwald {
1531*e8380792SMatthias Ringwald   uint32_t result;
1532*e8380792SMatthias Ringwald 
1533*e8380792SMatthias Ringwald   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1534*e8380792SMatthias Ringwald   return(result);
1535*e8380792SMatthias Ringwald }
1536*e8380792SMatthias Ringwald 
__UQSAX(uint32_t op1,uint32_t op2)1537*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1538*e8380792SMatthias Ringwald {
1539*e8380792SMatthias Ringwald   uint32_t result;
1540*e8380792SMatthias Ringwald 
1541*e8380792SMatthias Ringwald   __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1542*e8380792SMatthias Ringwald   return(result);
1543*e8380792SMatthias Ringwald }
1544*e8380792SMatthias Ringwald 
__UHSAX(uint32_t op1,uint32_t op2)1545*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1546*e8380792SMatthias Ringwald {
1547*e8380792SMatthias Ringwald   uint32_t result;
1548*e8380792SMatthias Ringwald 
1549*e8380792SMatthias Ringwald   __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1550*e8380792SMatthias Ringwald   return(result);
1551*e8380792SMatthias Ringwald }
1552*e8380792SMatthias Ringwald 
__USAD8(uint32_t op1,uint32_t op2)1553*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1554*e8380792SMatthias Ringwald {
1555*e8380792SMatthias Ringwald   uint32_t result;
1556*e8380792SMatthias Ringwald 
1557*e8380792SMatthias Ringwald   __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1558*e8380792SMatthias Ringwald   return(result);
1559*e8380792SMatthias Ringwald }
1560*e8380792SMatthias Ringwald 
__USADA8(uint32_t op1,uint32_t op2,uint32_t op3)1561*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1562*e8380792SMatthias Ringwald {
1563*e8380792SMatthias Ringwald   uint32_t result;
1564*e8380792SMatthias Ringwald 
1565*e8380792SMatthias Ringwald   __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1566*e8380792SMatthias Ringwald   return(result);
1567*e8380792SMatthias Ringwald }
1568*e8380792SMatthias Ringwald 
1569*e8380792SMatthias Ringwald #define __SSAT16(ARG1,ARG2) \
1570*e8380792SMatthias Ringwald ({                          \
1571*e8380792SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1572*e8380792SMatthias Ringwald   __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1573*e8380792SMatthias Ringwald   __RES; \
1574*e8380792SMatthias Ringwald  })
1575*e8380792SMatthias Ringwald 
1576*e8380792SMatthias Ringwald #define __USAT16(ARG1,ARG2) \
1577*e8380792SMatthias Ringwald ({                          \
1578*e8380792SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1); \
1579*e8380792SMatthias Ringwald   __ASM ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) ); \
1580*e8380792SMatthias Ringwald   __RES; \
1581*e8380792SMatthias Ringwald  })
1582*e8380792SMatthias Ringwald 
__UXTB16(uint32_t op1)1583*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1584*e8380792SMatthias Ringwald {
1585*e8380792SMatthias Ringwald   uint32_t result;
1586*e8380792SMatthias Ringwald 
1587*e8380792SMatthias Ringwald   __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1588*e8380792SMatthias Ringwald   return(result);
1589*e8380792SMatthias Ringwald }
1590*e8380792SMatthias Ringwald 
__UXTAB16(uint32_t op1,uint32_t op2)1591*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1592*e8380792SMatthias Ringwald {
1593*e8380792SMatthias Ringwald   uint32_t result;
1594*e8380792SMatthias Ringwald 
1595*e8380792SMatthias Ringwald   __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1596*e8380792SMatthias Ringwald   return(result);
1597*e8380792SMatthias Ringwald }
1598*e8380792SMatthias Ringwald 
__SXTB16(uint32_t op1)1599*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1600*e8380792SMatthias Ringwald {
1601*e8380792SMatthias Ringwald   uint32_t result;
1602*e8380792SMatthias Ringwald 
1603*e8380792SMatthias Ringwald   __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1604*e8380792SMatthias Ringwald   return(result);
1605*e8380792SMatthias Ringwald }
1606*e8380792SMatthias Ringwald 
__SXTAB16(uint32_t op1,uint32_t op2)1607*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1608*e8380792SMatthias Ringwald {
1609*e8380792SMatthias Ringwald   uint32_t result;
1610*e8380792SMatthias Ringwald 
1611*e8380792SMatthias Ringwald   __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1612*e8380792SMatthias Ringwald   return(result);
1613*e8380792SMatthias Ringwald }
1614*e8380792SMatthias Ringwald 
__SMUAD(uint32_t op1,uint32_t op2)1615*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
1616*e8380792SMatthias Ringwald {
1617*e8380792SMatthias Ringwald   uint32_t result;
1618*e8380792SMatthias Ringwald 
1619*e8380792SMatthias Ringwald   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1620*e8380792SMatthias Ringwald   return(result);
1621*e8380792SMatthias Ringwald }
1622*e8380792SMatthias Ringwald 
__SMUADX(uint32_t op1,uint32_t op2)1623*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1624*e8380792SMatthias Ringwald {
1625*e8380792SMatthias Ringwald   uint32_t result;
1626*e8380792SMatthias Ringwald 
1627*e8380792SMatthias Ringwald   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1628*e8380792SMatthias Ringwald   return(result);
1629*e8380792SMatthias Ringwald }
1630*e8380792SMatthias Ringwald 
__SMLAD(uint32_t op1,uint32_t op2,uint32_t op3)1631*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1632*e8380792SMatthias Ringwald {
1633*e8380792SMatthias Ringwald   uint32_t result;
1634*e8380792SMatthias Ringwald 
1635*e8380792SMatthias Ringwald   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1636*e8380792SMatthias Ringwald   return(result);
1637*e8380792SMatthias Ringwald }
1638*e8380792SMatthias Ringwald 
__SMLADX(uint32_t op1,uint32_t op2,uint32_t op3)1639*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1640*e8380792SMatthias Ringwald {
1641*e8380792SMatthias Ringwald   uint32_t result;
1642*e8380792SMatthias Ringwald 
1643*e8380792SMatthias Ringwald   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1644*e8380792SMatthias Ringwald   return(result);
1645*e8380792SMatthias Ringwald }
1646*e8380792SMatthias Ringwald 
__SMLALD(uint32_t op1,uint32_t op2,uint64_t acc)1647*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1648*e8380792SMatthias Ringwald {
1649*e8380792SMatthias Ringwald   union llreg_u{
1650*e8380792SMatthias Ringwald     uint32_t w32[2];
1651*e8380792SMatthias Ringwald     uint64_t w64;
1652*e8380792SMatthias Ringwald   } llr;
1653*e8380792SMatthias Ringwald   llr.w64 = acc;
1654*e8380792SMatthias Ringwald 
1655*e8380792SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1656*e8380792SMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1657*e8380792SMatthias Ringwald #else               /* Big endian */
1658*e8380792SMatthias Ringwald   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1659*e8380792SMatthias Ringwald #endif
1660*e8380792SMatthias Ringwald 
1661*e8380792SMatthias Ringwald   return(llr.w64);
1662*e8380792SMatthias Ringwald }
1663*e8380792SMatthias Ringwald 
__SMLALDX(uint32_t op1,uint32_t op2,uint64_t acc)1664*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1665*e8380792SMatthias Ringwald {
1666*e8380792SMatthias Ringwald   union llreg_u{
1667*e8380792SMatthias Ringwald     uint32_t w32[2];
1668*e8380792SMatthias Ringwald     uint64_t w64;
1669*e8380792SMatthias Ringwald   } llr;
1670*e8380792SMatthias Ringwald   llr.w64 = acc;
1671*e8380792SMatthias Ringwald 
1672*e8380792SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1673*e8380792SMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1674*e8380792SMatthias Ringwald #else               /* Big endian */
1675*e8380792SMatthias Ringwald   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1676*e8380792SMatthias Ringwald #endif
1677*e8380792SMatthias Ringwald 
1678*e8380792SMatthias Ringwald   return(llr.w64);
1679*e8380792SMatthias Ringwald }
1680*e8380792SMatthias Ringwald 
__SMUSD(uint32_t op1,uint32_t op2)1681*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
1682*e8380792SMatthias Ringwald {
1683*e8380792SMatthias Ringwald   uint32_t result;
1684*e8380792SMatthias Ringwald 
1685*e8380792SMatthias Ringwald   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1686*e8380792SMatthias Ringwald   return(result);
1687*e8380792SMatthias Ringwald }
1688*e8380792SMatthias Ringwald 
__SMUSDX(uint32_t op1,uint32_t op2)1689*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1690*e8380792SMatthias Ringwald {
1691*e8380792SMatthias Ringwald   uint32_t result;
1692*e8380792SMatthias Ringwald 
1693*e8380792SMatthias Ringwald   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1694*e8380792SMatthias Ringwald   return(result);
1695*e8380792SMatthias Ringwald }
1696*e8380792SMatthias Ringwald 
__SMLSD(uint32_t op1,uint32_t op2,uint32_t op3)1697*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1698*e8380792SMatthias Ringwald {
1699*e8380792SMatthias Ringwald   uint32_t result;
1700*e8380792SMatthias Ringwald 
1701*e8380792SMatthias Ringwald   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1702*e8380792SMatthias Ringwald   return(result);
1703*e8380792SMatthias Ringwald }
1704*e8380792SMatthias Ringwald 
__SMLSDX(uint32_t op1,uint32_t op2,uint32_t op3)1705*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1706*e8380792SMatthias Ringwald {
1707*e8380792SMatthias Ringwald   uint32_t result;
1708*e8380792SMatthias Ringwald 
1709*e8380792SMatthias Ringwald   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1710*e8380792SMatthias Ringwald   return(result);
1711*e8380792SMatthias Ringwald }
1712*e8380792SMatthias Ringwald 
__SMLSLD(uint32_t op1,uint32_t op2,uint64_t acc)1713*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1714*e8380792SMatthias Ringwald {
1715*e8380792SMatthias Ringwald   union llreg_u{
1716*e8380792SMatthias Ringwald     uint32_t w32[2];
1717*e8380792SMatthias Ringwald     uint64_t w64;
1718*e8380792SMatthias Ringwald   } llr;
1719*e8380792SMatthias Ringwald   llr.w64 = acc;
1720*e8380792SMatthias Ringwald 
1721*e8380792SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1722*e8380792SMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1723*e8380792SMatthias Ringwald #else               /* Big endian */
1724*e8380792SMatthias Ringwald   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1725*e8380792SMatthias Ringwald #endif
1726*e8380792SMatthias Ringwald 
1727*e8380792SMatthias Ringwald   return(llr.w64);
1728*e8380792SMatthias Ringwald }
1729*e8380792SMatthias Ringwald 
__SMLSLDX(uint32_t op1,uint32_t op2,uint64_t acc)1730*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1731*e8380792SMatthias Ringwald {
1732*e8380792SMatthias Ringwald   union llreg_u{
1733*e8380792SMatthias Ringwald     uint32_t w32[2];
1734*e8380792SMatthias Ringwald     uint64_t w64;
1735*e8380792SMatthias Ringwald   } llr;
1736*e8380792SMatthias Ringwald   llr.w64 = acc;
1737*e8380792SMatthias Ringwald 
1738*e8380792SMatthias Ringwald #ifndef __ARMEB__   /* Little endian */
1739*e8380792SMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1740*e8380792SMatthias Ringwald #else               /* Big endian */
1741*e8380792SMatthias Ringwald   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1742*e8380792SMatthias Ringwald #endif
1743*e8380792SMatthias Ringwald 
1744*e8380792SMatthias Ringwald   return(llr.w64);
1745*e8380792SMatthias Ringwald }
1746*e8380792SMatthias Ringwald 
__SEL(uint32_t op1,uint32_t op2)1747*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
1748*e8380792SMatthias Ringwald {
1749*e8380792SMatthias Ringwald   uint32_t result;
1750*e8380792SMatthias Ringwald 
1751*e8380792SMatthias Ringwald   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1752*e8380792SMatthias Ringwald   return(result);
1753*e8380792SMatthias Ringwald }
1754*e8380792SMatthias Ringwald 
__QADD(int32_t op1,int32_t op2)1755*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE  int32_t __QADD( int32_t op1,  int32_t op2)
1756*e8380792SMatthias Ringwald {
1757*e8380792SMatthias Ringwald   int32_t result;
1758*e8380792SMatthias Ringwald 
1759*e8380792SMatthias Ringwald   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1760*e8380792SMatthias Ringwald   return(result);
1761*e8380792SMatthias Ringwald }
1762*e8380792SMatthias Ringwald 
__QSUB(int32_t op1,int32_t op2)1763*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
1764*e8380792SMatthias Ringwald {
1765*e8380792SMatthias Ringwald   int32_t result;
1766*e8380792SMatthias Ringwald 
1767*e8380792SMatthias Ringwald   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1768*e8380792SMatthias Ringwald   return(result);
1769*e8380792SMatthias Ringwald }
1770*e8380792SMatthias Ringwald 
1771*e8380792SMatthias Ringwald #define __PKHBT(ARG1,ARG2,ARG3) \
1772*e8380792SMatthias Ringwald ({                          \
1773*e8380792SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1774*e8380792SMatthias Ringwald   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1775*e8380792SMatthias Ringwald   __RES; \
1776*e8380792SMatthias Ringwald  })
1777*e8380792SMatthias Ringwald 
1778*e8380792SMatthias Ringwald #define __PKHTB(ARG1,ARG2,ARG3) \
1779*e8380792SMatthias Ringwald ({                          \
1780*e8380792SMatthias Ringwald   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1781*e8380792SMatthias Ringwald   if (ARG3 == 0) \
1782*e8380792SMatthias Ringwald     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
1783*e8380792SMatthias Ringwald   else \
1784*e8380792SMatthias Ringwald     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
1785*e8380792SMatthias Ringwald   __RES; \
1786*e8380792SMatthias Ringwald  })
1787*e8380792SMatthias Ringwald 
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1788*e8380792SMatthias Ringwald __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1789*e8380792SMatthias Ringwald {
1790*e8380792SMatthias Ringwald  int32_t result;
1791*e8380792SMatthias Ringwald 
1792*e8380792SMatthias Ringwald  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
1793*e8380792SMatthias Ringwald  return(result);
1794*e8380792SMatthias Ringwald }
1795*e8380792SMatthias Ringwald 
1796*e8380792SMatthias Ringwald #endif /* (__ARM_FEATURE_DSP == 1U) */
1797*e8380792SMatthias Ringwald /*@} end of group CMSIS_SIMD_intrinsics */
1798*e8380792SMatthias Ringwald 
1799*e8380792SMatthias Ringwald 
1800*e8380792SMatthias Ringwald #endif /* __CMSIS_ARMCC_V6_H */
1801