1*150812a8SEvalZero /**************************************************************************//**
2*150812a8SEvalZero * @file cmsis_armcc.h
3*150812a8SEvalZero * @brief CMSIS Cortex-M Core Function/Instruction Header File
4*150812a8SEvalZero * @version V4.30
5*150812a8SEvalZero * @date 20. October 2015
6*150812a8SEvalZero ******************************************************************************/
7*150812a8SEvalZero /* Copyright (c) 2009 - 2015 ARM LIMITED
8*150812a8SEvalZero
9*150812a8SEvalZero All rights reserved.
10*150812a8SEvalZero Redistribution and use in source and binary forms, with or without
11*150812a8SEvalZero modification, are permitted provided that the following conditions are met:
12*150812a8SEvalZero - Redistributions of source code must retain the above copyright
13*150812a8SEvalZero notice, this list of conditions and the following disclaimer.
14*150812a8SEvalZero - Redistributions in binary form must reproduce the above copyright
15*150812a8SEvalZero notice, this list of conditions and the following disclaimer in the
16*150812a8SEvalZero documentation and/or other materials provided with the distribution.
17*150812a8SEvalZero - Neither the name of ARM nor the names of its contributors may be used
18*150812a8SEvalZero to endorse or promote products derived from this software without
19*150812a8SEvalZero specific prior written permission.
20*150812a8SEvalZero *
21*150812a8SEvalZero THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22*150812a8SEvalZero AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23*150812a8SEvalZero IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24*150812a8SEvalZero ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
25*150812a8SEvalZero LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26*150812a8SEvalZero CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27*150812a8SEvalZero SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28*150812a8SEvalZero INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29*150812a8SEvalZero CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30*150812a8SEvalZero ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31*150812a8SEvalZero POSSIBILITY OF SUCH DAMAGE.
32*150812a8SEvalZero ---------------------------------------------------------------------------*/
33*150812a8SEvalZero
34*150812a8SEvalZero
35*150812a8SEvalZero #ifndef __CMSIS_ARMCC_H
36*150812a8SEvalZero #define __CMSIS_ARMCC_H
37*150812a8SEvalZero
38*150812a8SEvalZero
39*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
40*150812a8SEvalZero #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
41*150812a8SEvalZero #endif
42*150812a8SEvalZero
43*150812a8SEvalZero /* ########################### Core Function Access ########################### */
44*150812a8SEvalZero /** \ingroup CMSIS_Core_FunctionInterface
45*150812a8SEvalZero \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
46*150812a8SEvalZero @{
47*150812a8SEvalZero */
48*150812a8SEvalZero
49*150812a8SEvalZero /* intrinsic void __enable_irq(); */
50*150812a8SEvalZero /* intrinsic void __disable_irq(); */
51*150812a8SEvalZero
52*150812a8SEvalZero /**
53*150812a8SEvalZero \brief Get Control Register
54*150812a8SEvalZero \details Returns the content of the Control Register.
55*150812a8SEvalZero \return Control Register value
56*150812a8SEvalZero */
__get_CONTROL(void)57*150812a8SEvalZero __STATIC_INLINE uint32_t __get_CONTROL(void)
58*150812a8SEvalZero {
59*150812a8SEvalZero register uint32_t __regControl __ASM("control");
60*150812a8SEvalZero return(__regControl);
61*150812a8SEvalZero }
62*150812a8SEvalZero
63*150812a8SEvalZero
64*150812a8SEvalZero /**
65*150812a8SEvalZero \brief Set Control Register
66*150812a8SEvalZero \details Writes the given value to the Control Register.
67*150812a8SEvalZero \param [in] control Control Register value to set
68*150812a8SEvalZero */
__set_CONTROL(uint32_t control)69*150812a8SEvalZero __STATIC_INLINE void __set_CONTROL(uint32_t control)
70*150812a8SEvalZero {
71*150812a8SEvalZero register uint32_t __regControl __ASM("control");
72*150812a8SEvalZero __regControl = control;
73*150812a8SEvalZero }
74*150812a8SEvalZero
75*150812a8SEvalZero
76*150812a8SEvalZero /**
77*150812a8SEvalZero \brief Get IPSR Register
78*150812a8SEvalZero \details Returns the content of the IPSR Register.
79*150812a8SEvalZero \return IPSR Register value
80*150812a8SEvalZero */
__get_IPSR(void)81*150812a8SEvalZero __STATIC_INLINE uint32_t __get_IPSR(void)
82*150812a8SEvalZero {
83*150812a8SEvalZero register uint32_t __regIPSR __ASM("ipsr");
84*150812a8SEvalZero return(__regIPSR);
85*150812a8SEvalZero }
86*150812a8SEvalZero
87*150812a8SEvalZero
88*150812a8SEvalZero /**
89*150812a8SEvalZero \brief Get APSR Register
90*150812a8SEvalZero \details Returns the content of the APSR Register.
91*150812a8SEvalZero \return APSR Register value
92*150812a8SEvalZero */
__get_APSR(void)93*150812a8SEvalZero __STATIC_INLINE uint32_t __get_APSR(void)
94*150812a8SEvalZero {
95*150812a8SEvalZero register uint32_t __regAPSR __ASM("apsr");
96*150812a8SEvalZero return(__regAPSR);
97*150812a8SEvalZero }
98*150812a8SEvalZero
99*150812a8SEvalZero
100*150812a8SEvalZero /**
101*150812a8SEvalZero \brief Get xPSR Register
102*150812a8SEvalZero \details Returns the content of the xPSR Register.
103*150812a8SEvalZero \return xPSR Register value
104*150812a8SEvalZero */
__get_xPSR(void)105*150812a8SEvalZero __STATIC_INLINE uint32_t __get_xPSR(void)
106*150812a8SEvalZero {
107*150812a8SEvalZero register uint32_t __regXPSR __ASM("xpsr");
108*150812a8SEvalZero return(__regXPSR);
109*150812a8SEvalZero }
110*150812a8SEvalZero
111*150812a8SEvalZero
112*150812a8SEvalZero /**
113*150812a8SEvalZero \brief Get Process Stack Pointer
114*150812a8SEvalZero \details Returns the current value of the Process Stack Pointer (PSP).
115*150812a8SEvalZero \return PSP Register value
116*150812a8SEvalZero */
__get_PSP(void)117*150812a8SEvalZero __STATIC_INLINE uint32_t __get_PSP(void)
118*150812a8SEvalZero {
119*150812a8SEvalZero register uint32_t __regProcessStackPointer __ASM("psp");
120*150812a8SEvalZero return(__regProcessStackPointer);
121*150812a8SEvalZero }
122*150812a8SEvalZero
123*150812a8SEvalZero
124*150812a8SEvalZero /**
125*150812a8SEvalZero \brief Set Process Stack Pointer
126*150812a8SEvalZero \details Assigns the given value to the Process Stack Pointer (PSP).
127*150812a8SEvalZero \param [in] topOfProcStack Process Stack Pointer value to set
128*150812a8SEvalZero */
__set_PSP(uint32_t topOfProcStack)129*150812a8SEvalZero __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
130*150812a8SEvalZero {
131*150812a8SEvalZero register uint32_t __regProcessStackPointer __ASM("psp");
132*150812a8SEvalZero __regProcessStackPointer = topOfProcStack;
133*150812a8SEvalZero }
134*150812a8SEvalZero
135*150812a8SEvalZero
136*150812a8SEvalZero /**
137*150812a8SEvalZero \brief Get Main Stack Pointer
138*150812a8SEvalZero \details Returns the current value of the Main Stack Pointer (MSP).
139*150812a8SEvalZero \return MSP Register value
140*150812a8SEvalZero */
__get_MSP(void)141*150812a8SEvalZero __STATIC_INLINE uint32_t __get_MSP(void)
142*150812a8SEvalZero {
143*150812a8SEvalZero register uint32_t __regMainStackPointer __ASM("msp");
144*150812a8SEvalZero return(__regMainStackPointer);
145*150812a8SEvalZero }
146*150812a8SEvalZero
147*150812a8SEvalZero
148*150812a8SEvalZero /**
149*150812a8SEvalZero \brief Set Main Stack Pointer
150*150812a8SEvalZero \details Assigns the given value to the Main Stack Pointer (MSP).
151*150812a8SEvalZero \param [in] topOfMainStack Main Stack Pointer value to set
152*150812a8SEvalZero */
__set_MSP(uint32_t topOfMainStack)153*150812a8SEvalZero __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
154*150812a8SEvalZero {
155*150812a8SEvalZero register uint32_t __regMainStackPointer __ASM("msp");
156*150812a8SEvalZero __regMainStackPointer = topOfMainStack;
157*150812a8SEvalZero }
158*150812a8SEvalZero
159*150812a8SEvalZero
160*150812a8SEvalZero /**
161*150812a8SEvalZero \brief Get Priority Mask
162*150812a8SEvalZero \details Returns the current state of the priority mask bit from the Priority Mask Register.
163*150812a8SEvalZero \return Priority Mask value
164*150812a8SEvalZero */
__get_PRIMASK(void)165*150812a8SEvalZero __STATIC_INLINE uint32_t __get_PRIMASK(void)
166*150812a8SEvalZero {
167*150812a8SEvalZero register uint32_t __regPriMask __ASM("primask");
168*150812a8SEvalZero return(__regPriMask);
169*150812a8SEvalZero }
170*150812a8SEvalZero
171*150812a8SEvalZero
172*150812a8SEvalZero /**
173*150812a8SEvalZero \brief Set Priority Mask
174*150812a8SEvalZero \details Assigns the given value to the Priority Mask Register.
175*150812a8SEvalZero \param [in] priMask Priority Mask
176*150812a8SEvalZero */
__set_PRIMASK(uint32_t priMask)177*150812a8SEvalZero __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
178*150812a8SEvalZero {
179*150812a8SEvalZero register uint32_t __regPriMask __ASM("primask");
180*150812a8SEvalZero __regPriMask = (priMask);
181*150812a8SEvalZero }
182*150812a8SEvalZero
183*150812a8SEvalZero
184*150812a8SEvalZero #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
185*150812a8SEvalZero
186*150812a8SEvalZero /**
187*150812a8SEvalZero \brief Enable FIQ
188*150812a8SEvalZero \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
189*150812a8SEvalZero Can only be executed in Privileged modes.
190*150812a8SEvalZero */
191*150812a8SEvalZero #define __enable_fault_irq __enable_fiq
192*150812a8SEvalZero
193*150812a8SEvalZero
194*150812a8SEvalZero /**
195*150812a8SEvalZero \brief Disable FIQ
196*150812a8SEvalZero \details Disables FIQ interrupts by setting the F-bit in the CPSR.
197*150812a8SEvalZero Can only be executed in Privileged modes.
198*150812a8SEvalZero */
199*150812a8SEvalZero #define __disable_fault_irq __disable_fiq
200*150812a8SEvalZero
201*150812a8SEvalZero
202*150812a8SEvalZero /**
203*150812a8SEvalZero \brief Get Base Priority
204*150812a8SEvalZero \details Returns the current value of the Base Priority register.
205*150812a8SEvalZero \return Base Priority register value
206*150812a8SEvalZero */
__get_BASEPRI(void)207*150812a8SEvalZero __STATIC_INLINE uint32_t __get_BASEPRI(void)
208*150812a8SEvalZero {
209*150812a8SEvalZero register uint32_t __regBasePri __ASM("basepri");
210*150812a8SEvalZero return(__regBasePri);
211*150812a8SEvalZero }
212*150812a8SEvalZero
213*150812a8SEvalZero
214*150812a8SEvalZero /**
215*150812a8SEvalZero \brief Set Base Priority
216*150812a8SEvalZero \details Assigns the given value to the Base Priority register.
217*150812a8SEvalZero \param [in] basePri Base Priority value to set
218*150812a8SEvalZero */
__set_BASEPRI(uint32_t basePri)219*150812a8SEvalZero __STATIC_INLINE void __set_BASEPRI(uint32_t basePri)
220*150812a8SEvalZero {
221*150812a8SEvalZero register uint32_t __regBasePri __ASM("basepri");
222*150812a8SEvalZero __regBasePri = (basePri & 0xFFU);
223*150812a8SEvalZero }
224*150812a8SEvalZero
225*150812a8SEvalZero
226*150812a8SEvalZero /**
227*150812a8SEvalZero \brief Set Base Priority with condition
228*150812a8SEvalZero \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
229*150812a8SEvalZero or the new value increases the BASEPRI priority level.
230*150812a8SEvalZero \param [in] basePri Base Priority value to set
231*150812a8SEvalZero */
__set_BASEPRI_MAX(uint32_t basePri)232*150812a8SEvalZero __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t basePri)
233*150812a8SEvalZero {
234*150812a8SEvalZero register uint32_t __regBasePriMax __ASM("basepri_max");
235*150812a8SEvalZero __regBasePriMax = (basePri & 0xFFU);
236*150812a8SEvalZero }
237*150812a8SEvalZero
238*150812a8SEvalZero
239*150812a8SEvalZero /**
240*150812a8SEvalZero \brief Get Fault Mask
241*150812a8SEvalZero \details Returns the current value of the Fault Mask register.
242*150812a8SEvalZero \return Fault Mask register value
243*150812a8SEvalZero */
__get_FAULTMASK(void)244*150812a8SEvalZero __STATIC_INLINE uint32_t __get_FAULTMASK(void)
245*150812a8SEvalZero {
246*150812a8SEvalZero register uint32_t __regFaultMask __ASM("faultmask");
247*150812a8SEvalZero return(__regFaultMask);
248*150812a8SEvalZero }
249*150812a8SEvalZero
250*150812a8SEvalZero
251*150812a8SEvalZero /**
252*150812a8SEvalZero \brief Set Fault Mask
253*150812a8SEvalZero \details Assigns the given value to the Fault Mask register.
254*150812a8SEvalZero \param [in] faultMask Fault Mask value to set
255*150812a8SEvalZero */
__set_FAULTMASK(uint32_t faultMask)256*150812a8SEvalZero __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
257*150812a8SEvalZero {
258*150812a8SEvalZero register uint32_t __regFaultMask __ASM("faultmask");
259*150812a8SEvalZero __regFaultMask = (faultMask & (uint32_t)1);
260*150812a8SEvalZero }
261*150812a8SEvalZero
262*150812a8SEvalZero #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
263*150812a8SEvalZero
264*150812a8SEvalZero
265*150812a8SEvalZero #if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)
266*150812a8SEvalZero
267*150812a8SEvalZero /**
268*150812a8SEvalZero \brief Get FPSCR
269*150812a8SEvalZero \details Returns the current value of the Floating Point Status/Control register.
270*150812a8SEvalZero \return Floating Point Status/Control register value
271*150812a8SEvalZero */
__get_FPSCR(void)272*150812a8SEvalZero __STATIC_INLINE uint32_t __get_FPSCR(void)
273*150812a8SEvalZero {
274*150812a8SEvalZero #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
275*150812a8SEvalZero register uint32_t __regfpscr __ASM("fpscr");
276*150812a8SEvalZero return(__regfpscr);
277*150812a8SEvalZero #else
278*150812a8SEvalZero return(0U);
279*150812a8SEvalZero #endif
280*150812a8SEvalZero }
281*150812a8SEvalZero
282*150812a8SEvalZero
283*150812a8SEvalZero /**
284*150812a8SEvalZero \brief Set FPSCR
285*150812a8SEvalZero \details Assigns the given value to the Floating Point Status/Control register.
286*150812a8SEvalZero \param [in] fpscr Floating Point Status/Control value to set
287*150812a8SEvalZero */
__set_FPSCR(uint32_t fpscr)288*150812a8SEvalZero __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
289*150812a8SEvalZero {
290*150812a8SEvalZero #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
291*150812a8SEvalZero register uint32_t __regfpscr __ASM("fpscr");
292*150812a8SEvalZero __regfpscr = (fpscr);
293*150812a8SEvalZero #endif
294*150812a8SEvalZero }
295*150812a8SEvalZero
296*150812a8SEvalZero #endif /* (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U) */
297*150812a8SEvalZero
298*150812a8SEvalZero
299*150812a8SEvalZero
300*150812a8SEvalZero /*@} end of CMSIS_Core_RegAccFunctions */
301*150812a8SEvalZero
302*150812a8SEvalZero
303*150812a8SEvalZero /* ########################## Core Instruction Access ######################### */
304*150812a8SEvalZero /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
305*150812a8SEvalZero Access to dedicated instructions
306*150812a8SEvalZero @{
307*150812a8SEvalZero */
308*150812a8SEvalZero
309*150812a8SEvalZero /**
310*150812a8SEvalZero \brief No Operation
311*150812a8SEvalZero \details No Operation does nothing. This instruction can be used for code alignment purposes.
312*150812a8SEvalZero */
313*150812a8SEvalZero #define __NOP __nop
314*150812a8SEvalZero
315*150812a8SEvalZero
316*150812a8SEvalZero /**
317*150812a8SEvalZero \brief Wait For Interrupt
318*150812a8SEvalZero \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
319*150812a8SEvalZero */
320*150812a8SEvalZero #define __WFI __wfi
321*150812a8SEvalZero
322*150812a8SEvalZero
323*150812a8SEvalZero /**
324*150812a8SEvalZero \brief Wait For Event
325*150812a8SEvalZero \details Wait For Event is a hint instruction that permits the processor to enter
326*150812a8SEvalZero a low-power state until one of a number of events occurs.
327*150812a8SEvalZero */
328*150812a8SEvalZero #define __WFE __wfe
329*150812a8SEvalZero
330*150812a8SEvalZero
331*150812a8SEvalZero /**
332*150812a8SEvalZero \brief Send Event
333*150812a8SEvalZero \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
334*150812a8SEvalZero */
335*150812a8SEvalZero #define __SEV __sev
336*150812a8SEvalZero
337*150812a8SEvalZero
338*150812a8SEvalZero /**
339*150812a8SEvalZero \brief Instruction Synchronization Barrier
340*150812a8SEvalZero \details Instruction Synchronization Barrier flushes the pipeline in the processor,
341*150812a8SEvalZero so that all instructions following the ISB are fetched from cache or memory,
342*150812a8SEvalZero after the instruction has been completed.
343*150812a8SEvalZero */
344*150812a8SEvalZero #define __ISB() do {\
345*150812a8SEvalZero __schedule_barrier();\
346*150812a8SEvalZero __isb(0xF);\
347*150812a8SEvalZero __schedule_barrier();\
348*150812a8SEvalZero } while (0U)
349*150812a8SEvalZero
350*150812a8SEvalZero /**
351*150812a8SEvalZero \brief Data Synchronization Barrier
352*150812a8SEvalZero \details Acts as a special kind of Data Memory Barrier.
353*150812a8SEvalZero It completes when all explicit memory accesses before this instruction complete.
354*150812a8SEvalZero */
355*150812a8SEvalZero #define __DSB() do {\
356*150812a8SEvalZero __schedule_barrier();\
357*150812a8SEvalZero __dsb(0xF);\
358*150812a8SEvalZero __schedule_barrier();\
359*150812a8SEvalZero } while (0U)
360*150812a8SEvalZero
361*150812a8SEvalZero /**
362*150812a8SEvalZero \brief Data Memory Barrier
363*150812a8SEvalZero \details Ensures the apparent order of the explicit memory operations before
364*150812a8SEvalZero and after the instruction, without ensuring their completion.
365*150812a8SEvalZero */
366*150812a8SEvalZero #define __DMB() do {\
367*150812a8SEvalZero __schedule_barrier();\
368*150812a8SEvalZero __dmb(0xF);\
369*150812a8SEvalZero __schedule_barrier();\
370*150812a8SEvalZero } while (0U)
371*150812a8SEvalZero
372*150812a8SEvalZero /**
373*150812a8SEvalZero \brief Reverse byte order (32 bit)
374*150812a8SEvalZero \details Reverses the byte order in integer value.
375*150812a8SEvalZero \param [in] value Value to reverse
376*150812a8SEvalZero \return Reversed value
377*150812a8SEvalZero */
378*150812a8SEvalZero #define __REV __rev
379*150812a8SEvalZero
380*150812a8SEvalZero
381*150812a8SEvalZero /**
382*150812a8SEvalZero \brief Reverse byte order (16 bit)
383*150812a8SEvalZero \details Reverses the byte order in two unsigned short values.
384*150812a8SEvalZero \param [in] value Value to reverse
385*150812a8SEvalZero \return Reversed value
386*150812a8SEvalZero */
387*150812a8SEvalZero #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)388*150812a8SEvalZero __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
389*150812a8SEvalZero {
390*150812a8SEvalZero rev16 r0, r0
391*150812a8SEvalZero bx lr
392*150812a8SEvalZero }
393*150812a8SEvalZero #endif
394*150812a8SEvalZero
395*150812a8SEvalZero /**
396*150812a8SEvalZero \brief Reverse byte order in signed short value
397*150812a8SEvalZero \details Reverses the byte order in a signed short value with sign extension to integer.
398*150812a8SEvalZero \param [in] value Value to reverse
399*150812a8SEvalZero \return Reversed value
400*150812a8SEvalZero */
401*150812a8SEvalZero #ifndef __NO_EMBEDDED_ASM
__REVSH(int32_t value)402*150812a8SEvalZero __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
403*150812a8SEvalZero {
404*150812a8SEvalZero revsh r0, r0
405*150812a8SEvalZero bx lr
406*150812a8SEvalZero }
407*150812a8SEvalZero #endif
408*150812a8SEvalZero
409*150812a8SEvalZero
410*150812a8SEvalZero /**
411*150812a8SEvalZero \brief Rotate Right in unsigned value (32 bit)
412*150812a8SEvalZero \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
413*150812a8SEvalZero \param [in] value Value to rotate
414*150812a8SEvalZero \param [in] value Number of Bits to rotate
415*150812a8SEvalZero \return Rotated value
416*150812a8SEvalZero */
417*150812a8SEvalZero #define __ROR __ror
418*150812a8SEvalZero
419*150812a8SEvalZero
420*150812a8SEvalZero /**
421*150812a8SEvalZero \brief Breakpoint
422*150812a8SEvalZero \details Causes the processor to enter Debug state.
423*150812a8SEvalZero Debug tools can use this to investigate system state when the instruction at a particular address is reached.
424*150812a8SEvalZero \param [in] value is ignored by the processor.
425*150812a8SEvalZero If required, a debugger can use it to store additional information about the breakpoint.
426*150812a8SEvalZero */
427*150812a8SEvalZero #define __BKPT(value) __breakpoint(value)
428*150812a8SEvalZero
429*150812a8SEvalZero
430*150812a8SEvalZero /**
431*150812a8SEvalZero \brief Reverse bit order of value
432*150812a8SEvalZero \details Reverses the bit order of the given value.
433*150812a8SEvalZero \param [in] value Value to reverse
434*150812a8SEvalZero \return Reversed value
435*150812a8SEvalZero */
436*150812a8SEvalZero #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
437*150812a8SEvalZero #define __RBIT __rbit
438*150812a8SEvalZero #else
__RBIT(uint32_t value)439*150812a8SEvalZero __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
440*150812a8SEvalZero {
441*150812a8SEvalZero uint32_t result;
442*150812a8SEvalZero int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
443*150812a8SEvalZero
444*150812a8SEvalZero result = value; /* r will be reversed bits of v; first get LSB of v */
445*150812a8SEvalZero for (value >>= 1U; value; value >>= 1U)
446*150812a8SEvalZero {
447*150812a8SEvalZero result <<= 1U;
448*150812a8SEvalZero result |= value & 1U;
449*150812a8SEvalZero s--;
450*150812a8SEvalZero }
451*150812a8SEvalZero result <<= s; /* shift when v's highest bits are zero */
452*150812a8SEvalZero return(result);
453*150812a8SEvalZero }
454*150812a8SEvalZero #endif
455*150812a8SEvalZero
456*150812a8SEvalZero
457*150812a8SEvalZero /**
458*150812a8SEvalZero \brief Count leading zeros
459*150812a8SEvalZero \details Counts the number of leading zeros of a data value.
460*150812a8SEvalZero \param [in] value Value to count the leading zeros
461*150812a8SEvalZero \return number of leading zeros in value
462*150812a8SEvalZero */
463*150812a8SEvalZero #define __CLZ __clz
464*150812a8SEvalZero
465*150812a8SEvalZero
466*150812a8SEvalZero #if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
467*150812a8SEvalZero
468*150812a8SEvalZero /**
469*150812a8SEvalZero \brief LDR Exclusive (8 bit)
470*150812a8SEvalZero \details Executes a exclusive LDR instruction for 8 bit value.
471*150812a8SEvalZero \param [in] ptr Pointer to data
472*150812a8SEvalZero \return value of type uint8_t at (*ptr)
473*150812a8SEvalZero */
474*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
475*150812a8SEvalZero #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
476*150812a8SEvalZero #else
477*150812a8SEvalZero #define __LDREXB(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr)) _Pragma("pop")
478*150812a8SEvalZero #endif
479*150812a8SEvalZero
480*150812a8SEvalZero
481*150812a8SEvalZero /**
482*150812a8SEvalZero \brief LDR Exclusive (16 bit)
483*150812a8SEvalZero \details Executes a exclusive LDR instruction for 16 bit values.
484*150812a8SEvalZero \param [in] ptr Pointer to data
485*150812a8SEvalZero \return value of type uint16_t at (*ptr)
486*150812a8SEvalZero */
487*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
488*150812a8SEvalZero #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
489*150812a8SEvalZero #else
490*150812a8SEvalZero #define __LDREXH(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr)) _Pragma("pop")
491*150812a8SEvalZero #endif
492*150812a8SEvalZero
493*150812a8SEvalZero
494*150812a8SEvalZero /**
495*150812a8SEvalZero \brief LDR Exclusive (32 bit)
496*150812a8SEvalZero \details Executes a exclusive LDR instruction for 32 bit values.
497*150812a8SEvalZero \param [in] ptr Pointer to data
498*150812a8SEvalZero \return value of type uint32_t at (*ptr)
499*150812a8SEvalZero */
500*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
501*150812a8SEvalZero #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
502*150812a8SEvalZero #else
503*150812a8SEvalZero #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr)) _Pragma("pop")
504*150812a8SEvalZero #endif
505*150812a8SEvalZero
506*150812a8SEvalZero
507*150812a8SEvalZero /**
508*150812a8SEvalZero \brief STR Exclusive (8 bit)
509*150812a8SEvalZero \details Executes a exclusive STR instruction for 8 bit values.
510*150812a8SEvalZero \param [in] value Value to store
511*150812a8SEvalZero \param [in] ptr Pointer to location
512*150812a8SEvalZero \return 0 Function succeeded
513*150812a8SEvalZero \return 1 Function failed
514*150812a8SEvalZero */
515*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
516*150812a8SEvalZero #define __STREXB(value, ptr) __strex(value, ptr)
517*150812a8SEvalZero #else
518*150812a8SEvalZero #define __STREXB(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
519*150812a8SEvalZero #endif
520*150812a8SEvalZero
521*150812a8SEvalZero
522*150812a8SEvalZero /**
523*150812a8SEvalZero \brief STR Exclusive (16 bit)
524*150812a8SEvalZero \details Executes a exclusive STR instruction for 16 bit values.
525*150812a8SEvalZero \param [in] value Value to store
526*150812a8SEvalZero \param [in] ptr Pointer to location
527*150812a8SEvalZero \return 0 Function succeeded
528*150812a8SEvalZero \return 1 Function failed
529*150812a8SEvalZero */
530*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
531*150812a8SEvalZero #define __STREXH(value, ptr) __strex(value, ptr)
532*150812a8SEvalZero #else
533*150812a8SEvalZero #define __STREXH(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
534*150812a8SEvalZero #endif
535*150812a8SEvalZero
536*150812a8SEvalZero
537*150812a8SEvalZero /**
538*150812a8SEvalZero \brief STR Exclusive (32 bit)
539*150812a8SEvalZero \details Executes a exclusive STR instruction for 32 bit values.
540*150812a8SEvalZero \param [in] value Value to store
541*150812a8SEvalZero \param [in] ptr Pointer to location
542*150812a8SEvalZero \return 0 Function succeeded
543*150812a8SEvalZero \return 1 Function failed
544*150812a8SEvalZero */
545*150812a8SEvalZero #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
546*150812a8SEvalZero #define __STREXW(value, ptr) __strex(value, ptr)
547*150812a8SEvalZero #else
548*150812a8SEvalZero #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
549*150812a8SEvalZero #endif
550*150812a8SEvalZero
551*150812a8SEvalZero
552*150812a8SEvalZero /**
553*150812a8SEvalZero \brief Remove the exclusive lock
554*150812a8SEvalZero \details Removes the exclusive lock which is created by LDREX.
555*150812a8SEvalZero */
556*150812a8SEvalZero #define __CLREX __clrex
557*150812a8SEvalZero
558*150812a8SEvalZero
559*150812a8SEvalZero /**
560*150812a8SEvalZero \brief Signed Saturate
561*150812a8SEvalZero \details Saturates a signed value.
562*150812a8SEvalZero \param [in] value Value to be saturated
563*150812a8SEvalZero \param [in] sat Bit position to saturate to (1..32)
564*150812a8SEvalZero \return Saturated value
565*150812a8SEvalZero */
566*150812a8SEvalZero #define __SSAT __ssat
567*150812a8SEvalZero
568*150812a8SEvalZero
569*150812a8SEvalZero /**
570*150812a8SEvalZero \brief Unsigned Saturate
571*150812a8SEvalZero \details Saturates an unsigned value.
572*150812a8SEvalZero \param [in] value Value to be saturated
573*150812a8SEvalZero \param [in] sat Bit position to saturate to (0..31)
574*150812a8SEvalZero \return Saturated value
575*150812a8SEvalZero */
576*150812a8SEvalZero #define __USAT __usat
577*150812a8SEvalZero
578*150812a8SEvalZero
579*150812a8SEvalZero /**
580*150812a8SEvalZero \brief Rotate Right with Extend (32 bit)
581*150812a8SEvalZero \details Moves each bit of a bitstring right by one bit.
582*150812a8SEvalZero The carry input is shifted in at the left end of the bitstring.
583*150812a8SEvalZero \param [in] value Value to rotate
584*150812a8SEvalZero \return Rotated value
585*150812a8SEvalZero */
586*150812a8SEvalZero #ifndef __NO_EMBEDDED_ASM
__RRX(uint32_t value)587*150812a8SEvalZero __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
588*150812a8SEvalZero {
589*150812a8SEvalZero rrx r0, r0
590*150812a8SEvalZero bx lr
591*150812a8SEvalZero }
592*150812a8SEvalZero #endif
593*150812a8SEvalZero
594*150812a8SEvalZero
595*150812a8SEvalZero /**
596*150812a8SEvalZero \brief LDRT Unprivileged (8 bit)
597*150812a8SEvalZero \details Executes a Unprivileged LDRT instruction for 8 bit value.
598*150812a8SEvalZero \param [in] ptr Pointer to data
599*150812a8SEvalZero \return value of type uint8_t at (*ptr)
600*150812a8SEvalZero */
601*150812a8SEvalZero #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
602*150812a8SEvalZero
603*150812a8SEvalZero
604*150812a8SEvalZero /**
605*150812a8SEvalZero \brief LDRT Unprivileged (16 bit)
606*150812a8SEvalZero \details Executes a Unprivileged LDRT instruction for 16 bit values.
607*150812a8SEvalZero \param [in] ptr Pointer to data
608*150812a8SEvalZero \return value of type uint16_t at (*ptr)
609*150812a8SEvalZero */
610*150812a8SEvalZero #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
611*150812a8SEvalZero
612*150812a8SEvalZero
613*150812a8SEvalZero /**
614*150812a8SEvalZero \brief LDRT Unprivileged (32 bit)
615*150812a8SEvalZero \details Executes a Unprivileged LDRT instruction for 32 bit values.
616*150812a8SEvalZero \param [in] ptr Pointer to data
617*150812a8SEvalZero \return value of type uint32_t at (*ptr)
618*150812a8SEvalZero */
619*150812a8SEvalZero #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
620*150812a8SEvalZero
621*150812a8SEvalZero
622*150812a8SEvalZero /**
623*150812a8SEvalZero \brief STRT Unprivileged (8 bit)
624*150812a8SEvalZero \details Executes a Unprivileged STRT instruction for 8 bit values.
625*150812a8SEvalZero \param [in] value Value to store
626*150812a8SEvalZero \param [in] ptr Pointer to location
627*150812a8SEvalZero */
628*150812a8SEvalZero #define __STRBT(value, ptr) __strt(value, ptr)
629*150812a8SEvalZero
630*150812a8SEvalZero
631*150812a8SEvalZero /**
632*150812a8SEvalZero \brief STRT Unprivileged (16 bit)
633*150812a8SEvalZero \details Executes a Unprivileged STRT instruction for 16 bit values.
634*150812a8SEvalZero \param [in] value Value to store
635*150812a8SEvalZero \param [in] ptr Pointer to location
636*150812a8SEvalZero */
637*150812a8SEvalZero #define __STRHT(value, ptr) __strt(value, ptr)
638*150812a8SEvalZero
639*150812a8SEvalZero
640*150812a8SEvalZero /**
641*150812a8SEvalZero \brief STRT Unprivileged (32 bit)
642*150812a8SEvalZero \details Executes a Unprivileged STRT instruction for 32 bit values.
643*150812a8SEvalZero \param [in] value Value to store
644*150812a8SEvalZero \param [in] ptr Pointer to location
645*150812a8SEvalZero */
646*150812a8SEvalZero #define __STRT(value, ptr) __strt(value, ptr)
647*150812a8SEvalZero
648*150812a8SEvalZero #endif /* (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U) */
649*150812a8SEvalZero
650*150812a8SEvalZero /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
651*150812a8SEvalZero
652*150812a8SEvalZero
653*150812a8SEvalZero /* ################### Compiler specific Intrinsics ########################### */
654*150812a8SEvalZero /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
655*150812a8SEvalZero Access to dedicated SIMD instructions
656*150812a8SEvalZero @{
657*150812a8SEvalZero */
658*150812a8SEvalZero
659*150812a8SEvalZero #if (__CORTEX_M >= 0x04U) /* only for Cortex-M4 and above */
660*150812a8SEvalZero
661*150812a8SEvalZero #define __SADD8 __sadd8
662*150812a8SEvalZero #define __QADD8 __qadd8
663*150812a8SEvalZero #define __SHADD8 __shadd8
664*150812a8SEvalZero #define __UADD8 __uadd8
665*150812a8SEvalZero #define __UQADD8 __uqadd8
666*150812a8SEvalZero #define __UHADD8 __uhadd8
667*150812a8SEvalZero #define __SSUB8 __ssub8
668*150812a8SEvalZero #define __QSUB8 __qsub8
669*150812a8SEvalZero #define __SHSUB8 __shsub8
670*150812a8SEvalZero #define __USUB8 __usub8
671*150812a8SEvalZero #define __UQSUB8 __uqsub8
672*150812a8SEvalZero #define __UHSUB8 __uhsub8
673*150812a8SEvalZero #define __SADD16 __sadd16
674*150812a8SEvalZero #define __QADD16 __qadd16
675*150812a8SEvalZero #define __SHADD16 __shadd16
676*150812a8SEvalZero #define __UADD16 __uadd16
677*150812a8SEvalZero #define __UQADD16 __uqadd16
678*150812a8SEvalZero #define __UHADD16 __uhadd16
679*150812a8SEvalZero #define __SSUB16 __ssub16
680*150812a8SEvalZero #define __QSUB16 __qsub16
681*150812a8SEvalZero #define __SHSUB16 __shsub16
682*150812a8SEvalZero #define __USUB16 __usub16
683*150812a8SEvalZero #define __UQSUB16 __uqsub16
684*150812a8SEvalZero #define __UHSUB16 __uhsub16
685*150812a8SEvalZero #define __SASX __sasx
686*150812a8SEvalZero #define __QASX __qasx
687*150812a8SEvalZero #define __SHASX __shasx
688*150812a8SEvalZero #define __UASX __uasx
689*150812a8SEvalZero #define __UQASX __uqasx
690*150812a8SEvalZero #define __UHASX __uhasx
691*150812a8SEvalZero #define __SSAX __ssax
692*150812a8SEvalZero #define __QSAX __qsax
693*150812a8SEvalZero #define __SHSAX __shsax
694*150812a8SEvalZero #define __USAX __usax
695*150812a8SEvalZero #define __UQSAX __uqsax
696*150812a8SEvalZero #define __UHSAX __uhsax
697*150812a8SEvalZero #define __USAD8 __usad8
698*150812a8SEvalZero #define __USADA8 __usada8
699*150812a8SEvalZero #define __SSAT16 __ssat16
700*150812a8SEvalZero #define __USAT16 __usat16
701*150812a8SEvalZero #define __UXTB16 __uxtb16
702*150812a8SEvalZero #define __UXTAB16 __uxtab16
703*150812a8SEvalZero #define __SXTB16 __sxtb16
704*150812a8SEvalZero #define __SXTAB16 __sxtab16
705*150812a8SEvalZero #define __SMUAD __smuad
706*150812a8SEvalZero #define __SMUADX __smuadx
707*150812a8SEvalZero #define __SMLAD __smlad
708*150812a8SEvalZero #define __SMLADX __smladx
709*150812a8SEvalZero #define __SMLALD __smlald
710*150812a8SEvalZero #define __SMLALDX __smlaldx
711*150812a8SEvalZero #define __SMUSD __smusd
712*150812a8SEvalZero #define __SMUSDX __smusdx
713*150812a8SEvalZero #define __SMLSD __smlsd
714*150812a8SEvalZero #define __SMLSDX __smlsdx
715*150812a8SEvalZero #define __SMLSLD __smlsld
716*150812a8SEvalZero #define __SMLSLDX __smlsldx
717*150812a8SEvalZero #define __SEL __sel
718*150812a8SEvalZero #define __QADD __qadd
719*150812a8SEvalZero #define __QSUB __qsub
720*150812a8SEvalZero
721*150812a8SEvalZero #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
722*150812a8SEvalZero ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
723*150812a8SEvalZero
724*150812a8SEvalZero #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
725*150812a8SEvalZero ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
726*150812a8SEvalZero
727*150812a8SEvalZero #define __SMMLA(ARG1,ARG2,ARG3) ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
728*150812a8SEvalZero ((int64_t)(ARG3) << 32U) ) >> 32U))
729*150812a8SEvalZero
730*150812a8SEvalZero #endif /* (__CORTEX_M >= 0x04) */
731*150812a8SEvalZero /*@} end of group CMSIS_SIMD_intrinsics */
732*150812a8SEvalZero
733*150812a8SEvalZero
734*150812a8SEvalZero #endif /* __CMSIS_ARMCC_H */
735