xref: /btstack/port/stm32-f4discovery-usb/Drivers/CMSIS/Core_A/Include/cmsis_armcc.h (revision a8f7f3fcbcd51f8d2e92aca076b6a9f812db358c)
1 /**************************************************************************//**
2  * @file     cmsis_armcc.h
3  * @brief    CMSIS compiler specific macros, functions, instructions
4  * @version  V1.0.2
5  * @date     10. January 2018
6  ******************************************************************************/
7 /*
8  * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
9  *
10  * SPDX-License-Identifier: Apache-2.0
11  *
12  * Licensed under the Apache License, Version 2.0 (the License); you may
13  * not use this file except in compliance with the License.
14  * You may obtain a copy of the License at
15  *
16  * www.apache.org/licenses/LICENSE-2.0
17  *
18  * Unless required by applicable law or agreed to in writing, software
19  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21  * See the License for the specific language governing permissions and
22  * limitations under the License.
23  */
24 
25 #ifndef __CMSIS_ARMCC_H
26 #define __CMSIS_ARMCC_H
27 
28 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
29   #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
30 #endif
31 
32 /* CMSIS compiler control architecture macros */
33 #if (defined (__TARGET_ARCH_7_A ) && (__TARGET_ARCH_7_A  == 1))
34   #define __ARM_ARCH_7A__           1
35 #endif
36 
37 /* CMSIS compiler specific defines */
38 #ifndef   __ASM
39   #define __ASM                                  __asm
40 #endif
41 #ifndef   __INLINE
42   #define __INLINE                               __inline
43 #endif
44 #ifndef   __FORCEINLINE
45   #define __FORCEINLINE                          __forceinline
46 #endif
47 #ifndef   __STATIC_INLINE
48   #define __STATIC_INLINE                        static __inline
49 #endif
50 #ifndef   __STATIC_FORCEINLINE
51   #define __STATIC_FORCEINLINE                   static __forceinline
52 #endif
53 #ifndef   __NO_RETURN
54   #define __NO_RETURN                            __declspec(noreturn)
55 #endif
56 #ifndef   CMSIS_DEPRECATED
57   #define CMSIS_DEPRECATED                       __attribute__((deprecated))
58 #endif
59 #ifndef   __USED
60   #define __USED                                 __attribute__((used))
61 #endif
62 #ifndef   __WEAK
63   #define __WEAK                                 __attribute__((weak))
64 #endif
65 #ifndef   __PACKED
66   #define __PACKED                               __attribute__((packed))
67 #endif
68 #ifndef   __PACKED_STRUCT
69   #define __PACKED_STRUCT                        __packed struct
70 #endif
71 #ifndef   __UNALIGNED_UINT16_WRITE
72   #define __UNALIGNED_UINT16_WRITE(addr, val)    ((*((__packed uint16_t *)(addr))) = (val))
73 #endif
74 #ifndef   __UNALIGNED_UINT16_READ
75   #define __UNALIGNED_UINT16_READ(addr)          (*((const __packed uint16_t *)(addr)))
76 #endif
77 #ifndef   __UNALIGNED_UINT32_WRITE
78   #define __UNALIGNED_UINT32_WRITE(addr, val)    ((*((__packed uint32_t *)(addr))) = (val))
79 #endif
80 #ifndef   __UNALIGNED_UINT32_READ
81   #define __UNALIGNED_UINT32_READ(addr)          (*((const __packed uint32_t *)(addr)))
82 #endif
83 #ifndef   __ALIGNED
84   #define __ALIGNED(x)                           __attribute__((aligned(x)))
85 #endif
86 #ifndef   __PACKED
87   #define __PACKED                               __attribute__((packed))
88 #endif
89 
90 /* ##########################  Core Instruction Access  ######################### */
91 /**
92   \brief   No Operation
93  */
94 #define __NOP                             __nop
95 
96 /**
97   \brief   Wait For Interrupt
98  */
99 #define __WFI                             __wfi
100 
101 /**
102   \brief   Wait For Event
103  */
104 #define __WFE                             __wfe
105 
106 /**
107   \brief   Send Event
108  */
109 #define __SEV                             __sev
110 
111 /**
112   \brief   Instruction Synchronization Barrier
113  */
114 #define __ISB() do {\
115                    __schedule_barrier();\
116                    __isb(0xF);\
117                    __schedule_barrier();\
118                 } while (0U)
119 
120 /**
121   \brief   Data Synchronization Barrier
122  */
123 #define __DSB() do {\
124                    __schedule_barrier();\
125                    __dsb(0xF);\
126                    __schedule_barrier();\
127                 } while (0U)
128 
129 /**
130   \brief   Data Memory Barrier
131  */
132 #define __DMB() do {\
133                    __schedule_barrier();\
134                    __dmb(0xF);\
135                    __schedule_barrier();\
136                 } while (0U)
137 
138 /**
139   \brief   Reverse byte order (32 bit)
140   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
141   \param [in]    value  Value to reverse
142   \return               Reversed value
143  */
144 #define __REV                             __rev
145 
146 /**
147   \brief   Reverse byte order (16 bit)
148   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
149   \param [in]    value  Value to reverse
150   \return               Reversed value
151  */
152 #ifndef __NO_EMBEDDED_ASM
__REV16(uint32_t value)153 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
154 {
155   rev16 r0, r0
156   bx lr
157 }
158 #endif
159 
160 /**
161   \brief   Reverse byte order (16 bit)
162   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
163   \param [in]    value  Value to reverse
164   \return               Reversed value
165  */
166 #ifndef __NO_EMBEDDED_ASM
__REVSH(int16_t value)167 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
168 {
169   revsh r0, r0
170   bx lr
171 }
172 #endif
173 
174 /**
175   \brief   Rotate Right in unsigned value (32 bit)
176   \param [in]    op1  Value to rotate
177   \param [in]    op2  Number of Bits to rotate
178   \return               Rotated value
179  */
180 #define __ROR                             __ror
181 
182 /**
183   \brief   Breakpoint
184   \param [in]    value  is ignored by the processor.
185                  If required, a debugger can use it to store additional information about the breakpoint.
186  */
187 #define __BKPT(value)                     __breakpoint(value)
188 
189 /**
190   \brief   Reverse bit order of value
191   \param [in]    value  Value to reverse
192   \return               Reversed value
193  */
194 #define __RBIT                            __rbit
195 
196 /**
197   \brief   Count leading zeros
198   \param [in]  value  Value to count the leading zeros
199   \return             number of leading zeros in value
200  */
201 #define __CLZ                             __clz
202 
203 /**
204   \brief   LDR Exclusive (8 bit)
205   \details Executes a exclusive LDR instruction for 8 bit value.
206   \param [in]    ptr  Pointer to data
207   \return             value of type uint8_t at (*ptr)
208  */
209 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
210   #define __LDREXB(ptr)                                                        ((uint8_t ) __ldrex(ptr))
211 #else
212   #define __LDREXB(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr))  _Pragma("pop")
213 #endif
214 
215 /**
216   \brief   LDR Exclusive (16 bit)
217   \details Executes a exclusive LDR instruction for 16 bit values.
218   \param [in]    ptr  Pointer to data
219   \return        value of type uint16_t at (*ptr)
220  */
221 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
222   #define __LDREXH(ptr)                                                        ((uint16_t) __ldrex(ptr))
223 #else
224   #define __LDREXH(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr))  _Pragma("pop")
225 #endif
226 
227 /**
228   \brief   LDR Exclusive (32 bit)
229   \details Executes a exclusive LDR instruction for 32 bit values.
230   \param [in]    ptr  Pointer to data
231   \return        value of type uint32_t at (*ptr)
232  */
233 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
234   #define __LDREXW(ptr)                                                        ((uint32_t ) __ldrex(ptr))
235 #else
236   #define __LDREXW(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr))  _Pragma("pop")
237 #endif
238 
239 /**
240   \brief   STR Exclusive (8 bit)
241   \details Executes a exclusive STR instruction for 8 bit values.
242   \param [in]  value  Value to store
243   \param [in]    ptr  Pointer to location
244   \return          0  Function succeeded
245   \return          1  Function failed
246  */
247 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
248   #define __STREXB(value, ptr)                                                 __strex(value, ptr)
249 #else
250   #define __STREXB(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
251 #endif
252 
253 /**
254   \brief   STR Exclusive (16 bit)
255   \details Executes a exclusive STR instruction for 16 bit values.
256   \param [in]  value  Value to store
257   \param [in]    ptr  Pointer to location
258   \return          0  Function succeeded
259   \return          1  Function failed
260  */
261 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
262   #define __STREXH(value, ptr)                                                 __strex(value, ptr)
263 #else
264   #define __STREXH(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
265 #endif
266 
267 /**
268   \brief   STR Exclusive (32 bit)
269   \details Executes a exclusive STR instruction for 32 bit values.
270   \param [in]  value  Value to store
271   \param [in]    ptr  Pointer to location
272   \return          0  Function succeeded
273   \return          1  Function failed
274  */
275 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
276   #define __STREXW(value, ptr)                                                 __strex(value, ptr)
277 #else
278   #define __STREXW(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
279 #endif
280 
281 /**
282   \brief   Remove the exclusive lock
283   \details Removes the exclusive lock which is created by LDREX.
284  */
285 #define __CLREX                           __clrex
286 
287 
288 /**
289   \brief   Signed Saturate
290   \details Saturates a signed value.
291   \param [in]  value  Value to be saturated
292   \param [in]    sat  Bit position to saturate to (1..32)
293   \return             Saturated value
294  */
295 #define __SSAT                            __ssat
296 
297 /**
298   \brief   Unsigned Saturate
299   \details Saturates an unsigned value.
300   \param [in]  value  Value to be saturated
301   \param [in]    sat  Bit position to saturate to (0..31)
302   \return             Saturated value
303  */
304 #define __USAT                            __usat
305 
306 /* ###########################  Core Function Access  ########################### */
307 
308 /**
309   \brief   Get FPSCR (Floating Point Status/Control)
310   \return               Floating Point Status/Control register value
311  */
__get_FPSCR(void)312 __STATIC_INLINE uint32_t __get_FPSCR(void)
313 {
314 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
315      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
316   register uint32_t __regfpscr         __ASM("fpscr");
317   return(__regfpscr);
318 #else
319    return(0U);
320 #endif
321 }
322 
323 /**
324   \brief   Set FPSCR (Floating Point Status/Control)
325   \param [in]    fpscr  Floating Point Status/Control value to set
326  */
__set_FPSCR(uint32_t fpscr)327 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
328 {
329 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
330      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
331   register uint32_t __regfpscr         __ASM("fpscr");
332   __regfpscr = (fpscr);
333 #else
334   (void)fpscr;
335 #endif
336 }
337 
338 /** \brief  Get CPSR (Current Program Status Register)
339     \return               CPSR Register value
340  */
__get_CPSR(void)341 __STATIC_INLINE uint32_t __get_CPSR(void)
342 {
343   register uint32_t __regCPSR          __ASM("cpsr");
344   return(__regCPSR);
345 }
346 
347 
348 /** \brief  Set CPSR (Current Program Status Register)
349     \param [in]    cpsr  CPSR value to set
350  */
__set_CPSR(uint32_t cpsr)351 __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
352 {
353   register uint32_t __regCPSR          __ASM("cpsr");
354   __regCPSR = cpsr;
355 }
356 
357 /** \brief  Get Mode
358     \return                Processor Mode
359  */
__get_mode(void)360 __STATIC_INLINE uint32_t __get_mode(void)
361 {
362   return (__get_CPSR() & 0x1FU);
363 }
364 
365 /** \brief  Set Mode
366     \param [in]    mode  Mode value to set
367  */
__set_mode(uint32_t mode)368 __STATIC_INLINE __ASM void __set_mode(uint32_t mode)
369 {
370   MOV  r1, lr
371   MSR  CPSR_C, r0
372   BX   r1
373 }
374 
375 /** \brief  Get Stack Pointer
376     \return Stack Pointer
377  */
__get_SP(void)378 __STATIC_INLINE __ASM uint32_t __get_SP(void)
379 {
380   MOV  r0, sp
381   BX   lr
382 }
383 
384 /** \brief  Set Stack Pointer
385     \param [in]    stack  Stack Pointer value to set
386  */
__set_SP(uint32_t stack)387 __STATIC_INLINE __ASM void __set_SP(uint32_t stack)
388 {
389   MOV  sp, r0
390   BX   lr
391 }
392 
393 
394 /** \brief  Get USR/SYS Stack Pointer
395     \return USR/SYSStack Pointer
396  */
__get_SP_usr(void)397 __STATIC_INLINE __ASM uint32_t __get_SP_usr(void)
398 {
399   ARM
400   PRESERVE8
401 
402   MRS     R1, CPSR
403   CPS     #0x1F       ;no effect in USR mode
404   MOV     R0, SP
405   MSR     CPSR_c, R1  ;no effect in USR mode
406   ISB
407   BX      LR
408 }
409 
410 /** \brief  Set USR/SYS Stack Pointer
411     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
412  */
__set_SP_usr(uint32_t topOfProcStack)413 __STATIC_INLINE __ASM void __set_SP_usr(uint32_t topOfProcStack)
414 {
415   ARM
416   PRESERVE8
417 
418   MRS     R1, CPSR
419   CPS     #0x1F       ;no effect in USR mode
420   MOV     SP, R0
421   MSR     CPSR_c, R1  ;no effect in USR mode
422   ISB
423   BX      LR
424 }
425 
426 /** \brief  Get FPEXC (Floating Point Exception Control Register)
427     \return               Floating Point Exception Control Register value
428  */
__get_FPEXC(void)429 __STATIC_INLINE uint32_t __get_FPEXC(void)
430 {
431 #if (__FPU_PRESENT == 1)
432   register uint32_t __regfpexc         __ASM("fpexc");
433   return(__regfpexc);
434 #else
435   return(0);
436 #endif
437 }
438 
439 /** \brief  Set FPEXC (Floating Point Exception Control Register)
440     \param [in]    fpexc  Floating Point Exception Control value to set
441  */
__set_FPEXC(uint32_t fpexc)442 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
443 {
444 #if (__FPU_PRESENT == 1)
445   register uint32_t __regfpexc         __ASM("fpexc");
446   __regfpexc = (fpexc);
447 #endif
448 }
449 
450 /*
451  * Include common core functions to access Coprocessor 15 registers
452  */
453 
454 #define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
455 #define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register volatile uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
456 #define __get_CP64(cp, op1, Rt, CRm) \
457   do { \
458     uint32_t ltmp, htmp; \
459     __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
460     (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
461   } while(0)
462 
463 #define __set_CP64(cp, op1, Rt, CRm) \
464   do { \
465     const uint64_t tmp = (Rt); \
466     const uint32_t ltmp = (uint32_t)(tmp); \
467     const uint32_t htmp = (uint32_t)(tmp >> 32U); \
468     __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
469   } while(0)
470 
471 #include "cmsis_cp15.h"
472 
473 /** \brief  Enable Floating Point Unit
474 
475   Critical section, called from undef handler, so systick is disabled
476  */
__FPU_Enable(void)477 __STATIC_INLINE __ASM void __FPU_Enable(void)
478 {
479         ARM
480 
481         //Permit access to VFP/NEON, registers by modifying CPACR
482         MRC     p15,0,R1,c1,c0,2
483         ORR     R1,R1,#0x00F00000
484         MCR     p15,0,R1,c1,c0,2
485 
486         //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
487         ISB
488 
489         //Enable VFP/NEON
490         VMRS    R1,FPEXC
491         ORR     R1,R1,#0x40000000
492         VMSR    FPEXC,R1
493 
494         //Initialise VFP/NEON registers to 0
495         MOV     R2,#0
496 
497         //Initialise D16 registers to 0
498         VMOV    D0, R2,R2
499         VMOV    D1, R2,R2
500         VMOV    D2, R2,R2
501         VMOV    D3, R2,R2
502         VMOV    D4, R2,R2
503         VMOV    D5, R2,R2
504         VMOV    D6, R2,R2
505         VMOV    D7, R2,R2
506         VMOV    D8, R2,R2
507         VMOV    D9, R2,R2
508         VMOV    D10,R2,R2
509         VMOV    D11,R2,R2
510         VMOV    D12,R2,R2
511         VMOV    D13,R2,R2
512         VMOV    D14,R2,R2
513         VMOV    D15,R2,R2
514 
515   IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
516         //Initialise D32 registers to 0
517         VMOV    D16,R2,R2
518         VMOV    D17,R2,R2
519         VMOV    D18,R2,R2
520         VMOV    D19,R2,R2
521         VMOV    D20,R2,R2
522         VMOV    D21,R2,R2
523         VMOV    D22,R2,R2
524         VMOV    D23,R2,R2
525         VMOV    D24,R2,R2
526         VMOV    D25,R2,R2
527         VMOV    D26,R2,R2
528         VMOV    D27,R2,R2
529         VMOV    D28,R2,R2
530         VMOV    D29,R2,R2
531         VMOV    D30,R2,R2
532         VMOV    D31,R2,R2
533   ENDIF
534 
535         //Initialise FPSCR to a known state
536         VMRS    R2,FPSCR
537         LDR     R3,=0x00086060 //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
538         AND     R2,R2,R3
539         VMSR    FPSCR,R2
540 
541         BX      LR
542 }
543 
544 #endif /* __CMSIS_ARMCC_H */
545