1 /**************************************************************************//**
2 * @file cmsis_armclang.h
3 * @brief CMSIS compiler armclang (Arm Compiler 6) header file
4 * @version V5.2.0
5 * @date 08. May 2019
6 ******************************************************************************/
7 /*
8 * Copyright (c) 2009-2019 Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: Apache-2.0
11 *
12 * Licensed under the Apache License, Version 2.0 (the License); you may
13 * not use this file except in compliance with the License.
14 * You may obtain a copy of the License at
15 *
16 * www.apache.org/licenses/LICENSE-2.0
17 *
18 * Unless required by applicable law or agreed to in writing, software
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 * See the License for the specific language governing permissions and
22 * limitations under the License.
23 */
24
25 /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
26
27 #ifndef __CMSIS_ARMCLANG_H
28 #define __CMSIS_ARMCLANG_H
29
30 #pragma clang system_header /* treat file as system include file */
31
32 #ifndef __ARM_COMPAT_H
33 #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
34 #endif
35
36 /* CMSIS compiler specific defines */
37 #ifndef __ASM
38 #define __ASM __asm
39 #endif
40 #ifndef __INLINE
41 #define __INLINE __inline
42 #endif
43 #ifndef __STATIC_INLINE
44 #define __STATIC_INLINE static __inline
45 #endif
46 #ifndef __STATIC_FORCEINLINE
47 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
48 #endif
49 #ifndef __NO_RETURN
50 #define __NO_RETURN __attribute__((__noreturn__))
51 #endif
52 #ifndef __USED
53 #define __USED __attribute__((used))
54 #endif
55 #ifndef __WEAK
56 #define __WEAK __attribute__((weak))
57 #endif
58 #ifndef __PACKED
59 #define __PACKED __attribute__((packed, aligned(1)))
60 #endif
61 #ifndef __PACKED_STRUCT
62 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
63 #endif
64 #ifndef __PACKED_UNION
65 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
66 #endif
67 #ifndef __UNALIGNED_UINT32 /* deprecated */
68 #pragma clang diagnostic push
69 #pragma clang diagnostic ignored "-Wpacked"
70 /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
71 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
72 #pragma clang diagnostic pop
73 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
74 #endif
75 #ifndef __UNALIGNED_UINT16_WRITE
76 #pragma clang diagnostic push
77 #pragma clang diagnostic ignored "-Wpacked"
78 /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
79 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
80 #pragma clang diagnostic pop
81 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
82 #endif
83 #ifndef __UNALIGNED_UINT16_READ
84 #pragma clang diagnostic push
85 #pragma clang diagnostic ignored "-Wpacked"
86 /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
87 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
88 #pragma clang diagnostic pop
89 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
90 #endif
91 #ifndef __UNALIGNED_UINT32_WRITE
92 #pragma clang diagnostic push
93 #pragma clang diagnostic ignored "-Wpacked"
94 /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
95 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
96 #pragma clang diagnostic pop
97 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
98 #endif
99 #ifndef __UNALIGNED_UINT32_READ
100 #pragma clang diagnostic push
101 #pragma clang diagnostic ignored "-Wpacked"
102 /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
103 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
104 #pragma clang diagnostic pop
105 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
106 #endif
107 #ifndef __ALIGNED
108 #define __ALIGNED(x) __attribute__((aligned(x)))
109 #endif
110 #ifndef __RESTRICT
111 #define __RESTRICT __restrict
112 #endif
113 #ifndef __COMPILER_BARRIER
114 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
115 #endif
116
117 /* ######################### Startup and Lowlevel Init ######################## */
118
119 #ifndef __PROGRAM_START
120 #define __PROGRAM_START __main
121 #endif
122
123 #ifndef __INITIAL_SP
124 #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
125 #endif
126
127 #ifndef __STACK_LIMIT
128 #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
129 #endif
130
131 #ifndef __VECTOR_TABLE
132 #define __VECTOR_TABLE __Vectors
133 #endif
134
135 #ifndef __VECTOR_TABLE_ATTRIBUTE
136 #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET")))
137 #endif
138
139 /* ########################### Core Function Access ########################### */
140 /** \ingroup CMSIS_Core_FunctionInterface
141 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
142 @{
143 */
144
145 /**
146 \brief Enable IRQ Interrupts
147 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
148 Can only be executed in Privileged modes.
149 */
150 /* intrinsic void __enable_irq(); see arm_compat.h */
151
152
153 /**
154 \brief Disable IRQ Interrupts
155 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
156 Can only be executed in Privileged modes.
157 */
158 /* intrinsic void __disable_irq(); see arm_compat.h */
159
160
161 /**
162 \brief Get Control Register
163 \details Returns the content of the Control Register.
164 \return Control Register value
165 */
__get_CONTROL(void)166 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
167 {
168 uint32_t result;
169
170 __ASM volatile ("MRS %0, control" : "=r" (result) );
171 return(result);
172 }
173
174
175 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
176 /**
177 \brief Get Control Register (non-secure)
178 \details Returns the content of the non-secure Control Register when in secure mode.
179 \return non-secure Control Register value
180 */
__TZ_get_CONTROL_NS(void)181 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
182 {
183 uint32_t result;
184
185 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
186 return(result);
187 }
188 #endif
189
190
191 /**
192 \brief Set Control Register
193 \details Writes the given value to the Control Register.
194 \param [in] control Control Register value to set
195 */
__set_CONTROL(uint32_t control)196 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
197 {
198 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
199 }
200
201
202 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
203 /**
204 \brief Set Control Register (non-secure)
205 \details Writes the given value to the non-secure Control Register when in secure state.
206 \param [in] control Control Register value to set
207 */
__TZ_set_CONTROL_NS(uint32_t control)208 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
209 {
210 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
211 }
212 #endif
213
214
215 /**
216 \brief Get IPSR Register
217 \details Returns the content of the IPSR Register.
218 \return IPSR Register value
219 */
__get_IPSR(void)220 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
221 {
222 uint32_t result;
223
224 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
225 return(result);
226 }
227
228
229 /**
230 \brief Get APSR Register
231 \details Returns the content of the APSR Register.
232 \return APSR Register value
233 */
__get_APSR(void)234 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
235 {
236 uint32_t result;
237
238 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
239 return(result);
240 }
241
242
243 /**
244 \brief Get xPSR Register
245 \details Returns the content of the xPSR Register.
246 \return xPSR Register value
247 */
__get_xPSR(void)248 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
249 {
250 uint32_t result;
251
252 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
253 return(result);
254 }
255
256
257 /**
258 \brief Get Process Stack Pointer
259 \details Returns the current value of the Process Stack Pointer (PSP).
260 \return PSP Register value
261 */
__get_PSP(void)262 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
263 {
264 uint32_t result;
265
266 __ASM volatile ("MRS %0, psp" : "=r" (result) );
267 return(result);
268 }
269
270
271 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
272 /**
273 \brief Get Process Stack Pointer (non-secure)
274 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
275 \return PSP Register value
276 */
__TZ_get_PSP_NS(void)277 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
278 {
279 uint32_t result;
280
281 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
282 return(result);
283 }
284 #endif
285
286
287 /**
288 \brief Set Process Stack Pointer
289 \details Assigns the given value to the Process Stack Pointer (PSP).
290 \param [in] topOfProcStack Process Stack Pointer value to set
291 */
__set_PSP(uint32_t topOfProcStack)292 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
293 {
294 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
295 }
296
297
298 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
299 /**
300 \brief Set Process Stack Pointer (non-secure)
301 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
302 \param [in] topOfProcStack Process Stack Pointer value to set
303 */
__TZ_set_PSP_NS(uint32_t topOfProcStack)304 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
305 {
306 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
307 }
308 #endif
309
310
311 /**
312 \brief Get Main Stack Pointer
313 \details Returns the current value of the Main Stack Pointer (MSP).
314 \return MSP Register value
315 */
__get_MSP(void)316 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
317 {
318 uint32_t result;
319
320 __ASM volatile ("MRS %0, msp" : "=r" (result) );
321 return(result);
322 }
323
324
325 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
326 /**
327 \brief Get Main Stack Pointer (non-secure)
328 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
329 \return MSP Register value
330 */
__TZ_get_MSP_NS(void)331 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
332 {
333 uint32_t result;
334
335 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
336 return(result);
337 }
338 #endif
339
340
341 /**
342 \brief Set Main Stack Pointer
343 \details Assigns the given value to the Main Stack Pointer (MSP).
344 \param [in] topOfMainStack Main Stack Pointer value to set
345 */
__set_MSP(uint32_t topOfMainStack)346 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
347 {
348 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
349 }
350
351
352 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
353 /**
354 \brief Set Main Stack Pointer (non-secure)
355 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
356 \param [in] topOfMainStack Main Stack Pointer value to set
357 */
__TZ_set_MSP_NS(uint32_t topOfMainStack)358 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
359 {
360 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
361 }
362 #endif
363
364
365 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
366 /**
367 \brief Get Stack Pointer (non-secure)
368 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
369 \return SP Register value
370 */
__TZ_get_SP_NS(void)371 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
372 {
373 uint32_t result;
374
375 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
376 return(result);
377 }
378
379
380 /**
381 \brief Set Stack Pointer (non-secure)
382 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
383 \param [in] topOfStack Stack Pointer value to set
384 */
__TZ_set_SP_NS(uint32_t topOfStack)385 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
386 {
387 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
388 }
389 #endif
390
391
392 /**
393 \brief Get Priority Mask
394 \details Returns the current state of the priority mask bit from the Priority Mask Register.
395 \return Priority Mask value
396 */
__get_PRIMASK(void)397 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
398 {
399 uint32_t result;
400
401 __ASM volatile ("MRS %0, primask" : "=r" (result) );
402 return(result);
403 }
404
405
406 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
407 /**
408 \brief Get Priority Mask (non-secure)
409 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
410 \return Priority Mask value
411 */
__TZ_get_PRIMASK_NS(void)412 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
413 {
414 uint32_t result;
415
416 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
417 return(result);
418 }
419 #endif
420
421
422 /**
423 \brief Set Priority Mask
424 \details Assigns the given value to the Priority Mask Register.
425 \param [in] priMask Priority Mask
426 */
__set_PRIMASK(uint32_t priMask)427 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
428 {
429 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
430 }
431
432
433 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
434 /**
435 \brief Set Priority Mask (non-secure)
436 \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
437 \param [in] priMask Priority Mask
438 */
__TZ_set_PRIMASK_NS(uint32_t priMask)439 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
440 {
441 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
442 }
443 #endif
444
445
446 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
447 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
448 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
449 /**
450 \brief Enable FIQ
451 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
452 Can only be executed in Privileged modes.
453 */
454 #define __enable_fault_irq __enable_fiq /* see arm_compat.h */
455
456
457 /**
458 \brief Disable FIQ
459 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
460 Can only be executed in Privileged modes.
461 */
462 #define __disable_fault_irq __disable_fiq /* see arm_compat.h */
463
464
465 /**
466 \brief Get Base Priority
467 \details Returns the current value of the Base Priority register.
468 \return Base Priority register value
469 */
__get_BASEPRI(void)470 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
471 {
472 uint32_t result;
473
474 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
475 return(result);
476 }
477
478
479 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
480 /**
481 \brief Get Base Priority (non-secure)
482 \details Returns the current value of the non-secure Base Priority register when in secure state.
483 \return Base Priority register value
484 */
__TZ_get_BASEPRI_NS(void)485 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
486 {
487 uint32_t result;
488
489 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
490 return(result);
491 }
492 #endif
493
494
495 /**
496 \brief Set Base Priority
497 \details Assigns the given value to the Base Priority register.
498 \param [in] basePri Base Priority value to set
499 */
__set_BASEPRI(uint32_t basePri)500 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
501 {
502 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
503 }
504
505
506 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
507 /**
508 \brief Set Base Priority (non-secure)
509 \details Assigns the given value to the non-secure Base Priority register when in secure state.
510 \param [in] basePri Base Priority value to set
511 */
__TZ_set_BASEPRI_NS(uint32_t basePri)512 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
513 {
514 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
515 }
516 #endif
517
518
519 /**
520 \brief Set Base Priority with condition
521 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
522 or the new value increases the BASEPRI priority level.
523 \param [in] basePri Base Priority value to set
524 */
__set_BASEPRI_MAX(uint32_t basePri)525 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
526 {
527 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
528 }
529
530
531 /**
532 \brief Get Fault Mask
533 \details Returns the current value of the Fault Mask register.
534 \return Fault Mask register value
535 */
__get_FAULTMASK(void)536 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
537 {
538 uint32_t result;
539
540 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
541 return(result);
542 }
543
544
545 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
546 /**
547 \brief Get Fault Mask (non-secure)
548 \details Returns the current value of the non-secure Fault Mask register when in secure state.
549 \return Fault Mask register value
550 */
__TZ_get_FAULTMASK_NS(void)551 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
552 {
553 uint32_t result;
554
555 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
556 return(result);
557 }
558 #endif
559
560
561 /**
562 \brief Set Fault Mask
563 \details Assigns the given value to the Fault Mask register.
564 \param [in] faultMask Fault Mask value to set
565 */
__set_FAULTMASK(uint32_t faultMask)566 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
567 {
568 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
569 }
570
571
572 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
573 /**
574 \brief Set Fault Mask (non-secure)
575 \details Assigns the given value to the non-secure Fault Mask register when in secure state.
576 \param [in] faultMask Fault Mask value to set
577 */
__TZ_set_FAULTMASK_NS(uint32_t faultMask)578 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
579 {
580 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
581 }
582 #endif
583
584 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
585 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
586 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
587
588
589 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
590 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
591
592 /**
593 \brief Get Process Stack Pointer Limit
594 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
595 Stack Pointer Limit register hence zero is returned always in non-secure
596 mode.
597
598 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
599 \return PSPLIM Register value
600 */
__get_PSPLIM(void)601 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
602 {
603 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
604 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
605 // without main extensions, the non-secure PSPLIM is RAZ/WI
606 return 0U;
607 #else
608 uint32_t result;
609 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
610 return result;
611 #endif
612 }
613
614 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
615 /**
616 \brief Get Process Stack Pointer Limit (non-secure)
617 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
618 Stack Pointer Limit register hence zero is returned always in non-secure
619 mode.
620
621 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
622 \return PSPLIM Register value
623 */
__TZ_get_PSPLIM_NS(void)624 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
625 {
626 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
627 // without main extensions, the non-secure PSPLIM is RAZ/WI
628 return 0U;
629 #else
630 uint32_t result;
631 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
632 return result;
633 #endif
634 }
635 #endif
636
637
638 /**
639 \brief Set Process Stack Pointer Limit
640 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
641 Stack Pointer Limit register hence the write is silently ignored in non-secure
642 mode.
643
644 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
645 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
646 */
__set_PSPLIM(uint32_t ProcStackPtrLimit)647 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
648 {
649 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
650 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
651 // without main extensions, the non-secure PSPLIM is RAZ/WI
652 (void)ProcStackPtrLimit;
653 #else
654 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
655 #endif
656 }
657
658
659 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
660 /**
661 \brief Set Process Stack Pointer (non-secure)
662 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
663 Stack Pointer Limit register hence the write is silently ignored in non-secure
664 mode.
665
666 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
667 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
668 */
__TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)669 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
670 {
671 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
672 // without main extensions, the non-secure PSPLIM is RAZ/WI
673 (void)ProcStackPtrLimit;
674 #else
675 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
676 #endif
677 }
678 #endif
679
680
681 /**
682 \brief Get Main Stack Pointer Limit
683 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
684 Stack Pointer Limit register hence zero is returned always.
685
686 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
687 \return MSPLIM Register value
688 */
__get_MSPLIM(void)689 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
690 {
691 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
692 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
693 // without main extensions, the non-secure MSPLIM is RAZ/WI
694 return 0U;
695 #else
696 uint32_t result;
697 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
698 return result;
699 #endif
700 }
701
702
703 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
704 /**
705 \brief Get Main Stack Pointer Limit (non-secure)
706 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
707 Stack Pointer Limit register hence zero is returned always.
708
709 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
710 \return MSPLIM Register value
711 */
__TZ_get_MSPLIM_NS(void)712 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
713 {
714 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
715 // without main extensions, the non-secure MSPLIM is RAZ/WI
716 return 0U;
717 #else
718 uint32_t result;
719 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
720 return result;
721 #endif
722 }
723 #endif
724
725
726 /**
727 \brief Set Main Stack Pointer Limit
728 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
729 Stack Pointer Limit register hence the write is silently ignored.
730
731 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
732 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
733 */
__set_MSPLIM(uint32_t MainStackPtrLimit)734 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
735 {
736 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
737 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
738 // without main extensions, the non-secure MSPLIM is RAZ/WI
739 (void)MainStackPtrLimit;
740 #else
741 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
742 #endif
743 }
744
745
746 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
747 /**
748 \brief Set Main Stack Pointer Limit (non-secure)
749 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
750 Stack Pointer Limit register hence the write is silently ignored.
751
752 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
753 \param [in] MainStackPtrLimit Main Stack Pointer value to set
754 */
__TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)755 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
756 {
757 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
758 // without main extensions, the non-secure MSPLIM is RAZ/WI
759 (void)MainStackPtrLimit;
760 #else
761 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
762 #endif
763 }
764 #endif
765
766 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
767 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
768
769 /**
770 \brief Get FPSCR
771 \details Returns the current value of the Floating Point Status/Control register.
772 \return Floating Point Status/Control register value
773 */
774 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
775 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
776 #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
777 #else
778 #define __get_FPSCR() ((uint32_t)0U)
779 #endif
780
781 /**
782 \brief Set FPSCR
783 \details Assigns the given value to the Floating Point Status/Control register.
784 \param [in] fpscr Floating Point Status/Control value to set
785 */
786 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
787 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
788 #define __set_FPSCR __builtin_arm_set_fpscr
789 #else
790 #define __set_FPSCR(x) ((void)(x))
791 #endif
792
793
794 /*@} end of CMSIS_Core_RegAccFunctions */
795
796
797 /* ########################## Core Instruction Access ######################### */
798 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
799 Access to dedicated instructions
800 @{
801 */
802
803 /* Define macros for porting to both thumb1 and thumb2.
804 * For thumb1, use low register (r0-r7), specified by constraint "l"
805 * Otherwise, use general registers, specified by constraint "r" */
806 #if defined (__thumb__) && !defined (__thumb2__)
807 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
808 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
809 #define __CMSIS_GCC_USE_REG(r) "l" (r)
810 #else
811 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
812 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
813 #define __CMSIS_GCC_USE_REG(r) "r" (r)
814 #endif
815
816 /**
817 \brief No Operation
818 \details No Operation does nothing. This instruction can be used for code alignment purposes.
819 */
820 #define __NOP __builtin_arm_nop
821
822 /**
823 \brief Wait For Interrupt
824 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
825 */
826 #define __WFI __builtin_arm_wfi
827
828
829 /**
830 \brief Wait For Event
831 \details Wait For Event is a hint instruction that permits the processor to enter
832 a low-power state until one of a number of events occurs.
833 */
834 #define __WFE __builtin_arm_wfe
835
836
837 /**
838 \brief Send Event
839 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
840 */
841 #define __SEV __builtin_arm_sev
842
843
844 /**
845 \brief Instruction Synchronization Barrier
846 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
847 so that all instructions following the ISB are fetched from cache or memory,
848 after the instruction has been completed.
849 */
850 #define __ISB() __builtin_arm_isb(0xF)
851
852 /**
853 \brief Data Synchronization Barrier
854 \details Acts as a special kind of Data Memory Barrier.
855 It completes when all explicit memory accesses before this instruction complete.
856 */
857 #define __DSB() __builtin_arm_dsb(0xF)
858
859
860 /**
861 \brief Data Memory Barrier
862 \details Ensures the apparent order of the explicit memory operations before
863 and after the instruction, without ensuring their completion.
864 */
865 #define __DMB() __builtin_arm_dmb(0xF)
866
867
868 /**
869 \brief Reverse byte order (32 bit)
870 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
871 \param [in] value Value to reverse
872 \return Reversed value
873 */
874 #define __REV(value) __builtin_bswap32(value)
875
876
877 /**
878 \brief Reverse byte order (16 bit)
879 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
880 \param [in] value Value to reverse
881 \return Reversed value
882 */
883 #define __REV16(value) __ROR(__REV(value), 16)
884
885
886 /**
887 \brief Reverse byte order (16 bit)
888 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
889 \param [in] value Value to reverse
890 \return Reversed value
891 */
892 #define __REVSH(value) (int16_t)__builtin_bswap16(value)
893
894
895 /**
896 \brief Rotate Right in unsigned value (32 bit)
897 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
898 \param [in] op1 Value to rotate
899 \param [in] op2 Number of Bits to rotate
900 \return Rotated value
901 */
__ROR(uint32_t op1,uint32_t op2)902 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
903 {
904 op2 %= 32U;
905 if (op2 == 0U)
906 {
907 return op1;
908 }
909 return (op1 >> op2) | (op1 << (32U - op2));
910 }
911
912
913 /**
914 \brief Breakpoint
915 \details Causes the processor to enter Debug state.
916 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
917 \param [in] value is ignored by the processor.
918 If required, a debugger can use it to store additional information about the breakpoint.
919 */
920 #define __BKPT(value) __ASM volatile ("bkpt "#value)
921
922
923 /**
924 \brief Reverse bit order of value
925 \details Reverses the bit order of the given value.
926 \param [in] value Value to reverse
927 \return Reversed value
928 */
929 #define __RBIT __builtin_arm_rbit
930
931 /**
932 \brief Count leading zeros
933 \details Counts the number of leading zeros of a data value.
934 \param [in] value Value to count the leading zeros
935 \return number of leading zeros in value
936 */
__CLZ(uint32_t value)937 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
938 {
939 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
940 __builtin_clz(0) is undefined behaviour, so handle this case specially.
941 This guarantees ARM-compatible results if happening to compile on a non-ARM
942 target, and ensures the compiler doesn't decide to activate any
943 optimisations using the logic "value was passed to __builtin_clz, so it
944 is non-zero".
945 ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
946 single CLZ instruction.
947 */
948 if (value == 0U)
949 {
950 return 32U;
951 }
952 return __builtin_clz(value);
953 }
954
955
956 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
957 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
958 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
959 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
960 /**
961 \brief LDR Exclusive (8 bit)
962 \details Executes a exclusive LDR instruction for 8 bit value.
963 \param [in] ptr Pointer to data
964 \return value of type uint8_t at (*ptr)
965 */
966 #define __LDREXB (uint8_t)__builtin_arm_ldrex
967
968
969 /**
970 \brief LDR Exclusive (16 bit)
971 \details Executes a exclusive LDR instruction for 16 bit values.
972 \param [in] ptr Pointer to data
973 \return value of type uint16_t at (*ptr)
974 */
975 #define __LDREXH (uint16_t)__builtin_arm_ldrex
976
977
978 /**
979 \brief LDR Exclusive (32 bit)
980 \details Executes a exclusive LDR instruction for 32 bit values.
981 \param [in] ptr Pointer to data
982 \return value of type uint32_t at (*ptr)
983 */
984 #define __LDREXW (uint32_t)__builtin_arm_ldrex
985
986
987 /**
988 \brief STR Exclusive (8 bit)
989 \details Executes a exclusive STR instruction for 8 bit values.
990 \param [in] value Value to store
991 \param [in] ptr Pointer to location
992 \return 0 Function succeeded
993 \return 1 Function failed
994 */
995 #define __STREXB (uint32_t)__builtin_arm_strex
996
997
998 /**
999 \brief STR Exclusive (16 bit)
1000 \details Executes a exclusive STR instruction for 16 bit values.
1001 \param [in] value Value to store
1002 \param [in] ptr Pointer to location
1003 \return 0 Function succeeded
1004 \return 1 Function failed
1005 */
1006 #define __STREXH (uint32_t)__builtin_arm_strex
1007
1008
1009 /**
1010 \brief STR Exclusive (32 bit)
1011 \details Executes a exclusive STR instruction for 32 bit values.
1012 \param [in] value Value to store
1013 \param [in] ptr Pointer to location
1014 \return 0 Function succeeded
1015 \return 1 Function failed
1016 */
1017 #define __STREXW (uint32_t)__builtin_arm_strex
1018
1019
1020 /**
1021 \brief Remove the exclusive lock
1022 \details Removes the exclusive lock which is created by LDREX.
1023 */
1024 #define __CLREX __builtin_arm_clrex
1025
1026 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1027 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1028 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1029 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1030
1031
1032 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1033 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1034 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1035
1036 /**
1037 \brief Signed Saturate
1038 \details Saturates a signed value.
1039 \param [in] value Value to be saturated
1040 \param [in] sat Bit position to saturate to (1..32)
1041 \return Saturated value
1042 */
1043 #define __SSAT __builtin_arm_ssat
1044
1045
1046 /**
1047 \brief Unsigned Saturate
1048 \details Saturates an unsigned value.
1049 \param [in] value Value to be saturated
1050 \param [in] sat Bit position to saturate to (0..31)
1051 \return Saturated value
1052 */
1053 #define __USAT __builtin_arm_usat
1054
1055
1056 /**
1057 \brief Rotate Right with Extend (32 bit)
1058 \details Moves each bit of a bitstring right by one bit.
1059 The carry input is shifted in at the left end of the bitstring.
1060 \param [in] value Value to rotate
1061 \return Rotated value
1062 */
__RRX(uint32_t value)1063 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
1064 {
1065 uint32_t result;
1066
1067 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1068 return(result);
1069 }
1070
1071
1072 /**
1073 \brief LDRT Unprivileged (8 bit)
1074 \details Executes a Unprivileged LDRT instruction for 8 bit value.
1075 \param [in] ptr Pointer to data
1076 \return value of type uint8_t at (*ptr)
1077 */
__LDRBT(volatile uint8_t * ptr)1078 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1079 {
1080 uint32_t result;
1081
1082 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1083 return ((uint8_t) result); /* Add explicit type cast here */
1084 }
1085
1086
1087 /**
1088 \brief LDRT Unprivileged (16 bit)
1089 \details Executes a Unprivileged LDRT instruction for 16 bit values.
1090 \param [in] ptr Pointer to data
1091 \return value of type uint16_t at (*ptr)
1092 */
__LDRHT(volatile uint16_t * ptr)1093 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1094 {
1095 uint32_t result;
1096
1097 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1098 return ((uint16_t) result); /* Add explicit type cast here */
1099 }
1100
1101
1102 /**
1103 \brief LDRT Unprivileged (32 bit)
1104 \details Executes a Unprivileged LDRT instruction for 32 bit values.
1105 \param [in] ptr Pointer to data
1106 \return value of type uint32_t at (*ptr)
1107 */
__LDRT(volatile uint32_t * ptr)1108 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
1109 {
1110 uint32_t result;
1111
1112 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1113 return(result);
1114 }
1115
1116
1117 /**
1118 \brief STRT Unprivileged (8 bit)
1119 \details Executes a Unprivileged STRT instruction for 8 bit values.
1120 \param [in] value Value to store
1121 \param [in] ptr Pointer to location
1122 */
__STRBT(uint8_t value,volatile uint8_t * ptr)1123 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1124 {
1125 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1126 }
1127
1128
1129 /**
1130 \brief STRT Unprivileged (16 bit)
1131 \details Executes a Unprivileged STRT instruction for 16 bit values.
1132 \param [in] value Value to store
1133 \param [in] ptr Pointer to location
1134 */
__STRHT(uint16_t value,volatile uint16_t * ptr)1135 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1136 {
1137 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1138 }
1139
1140
1141 /**
1142 \brief STRT Unprivileged (32 bit)
1143 \details Executes a Unprivileged STRT instruction for 32 bit values.
1144 \param [in] value Value to store
1145 \param [in] ptr Pointer to location
1146 */
__STRT(uint32_t value,volatile uint32_t * ptr)1147 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1148 {
1149 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1150 }
1151
1152 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1153 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1154 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1155
1156 /**
1157 \brief Signed Saturate
1158 \details Saturates a signed value.
1159 \param [in] value Value to be saturated
1160 \param [in] sat Bit position to saturate to (1..32)
1161 \return Saturated value
1162 */
__SSAT(int32_t val,uint32_t sat)1163 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
1164 {
1165 if ((sat >= 1U) && (sat <= 32U))
1166 {
1167 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
1168 const int32_t min = -1 - max ;
1169 if (val > max)
1170 {
1171 return max;
1172 }
1173 else if (val < min)
1174 {
1175 return min;
1176 }
1177 }
1178 return val;
1179 }
1180
1181 /**
1182 \brief Unsigned Saturate
1183 \details Saturates an unsigned value.
1184 \param [in] value Value to be saturated
1185 \param [in] sat Bit position to saturate to (0..31)
1186 \return Saturated value
1187 */
__USAT(int32_t val,uint32_t sat)1188 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
1189 {
1190 if (sat <= 31U)
1191 {
1192 const uint32_t max = ((1U << sat) - 1U);
1193 if (val > (int32_t)max)
1194 {
1195 return max;
1196 }
1197 else if (val < 0)
1198 {
1199 return 0U;
1200 }
1201 }
1202 return (uint32_t)val;
1203 }
1204
1205 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1206 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1207 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1208
1209
1210 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1211 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1212 /**
1213 \brief Load-Acquire (8 bit)
1214 \details Executes a LDAB instruction for 8 bit value.
1215 \param [in] ptr Pointer to data
1216 \return value of type uint8_t at (*ptr)
1217 */
__LDAB(volatile uint8_t * ptr)1218 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
1219 {
1220 uint32_t result;
1221
1222 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1223 return ((uint8_t) result);
1224 }
1225
1226
1227 /**
1228 \brief Load-Acquire (16 bit)
1229 \details Executes a LDAH instruction for 16 bit values.
1230 \param [in] ptr Pointer to data
1231 \return value of type uint16_t at (*ptr)
1232 */
__LDAH(volatile uint16_t * ptr)1233 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
1234 {
1235 uint32_t result;
1236
1237 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1238 return ((uint16_t) result);
1239 }
1240
1241
1242 /**
1243 \brief Load-Acquire (32 bit)
1244 \details Executes a LDA instruction for 32 bit values.
1245 \param [in] ptr Pointer to data
1246 \return value of type uint32_t at (*ptr)
1247 */
__LDA(volatile uint32_t * ptr)1248 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
1249 {
1250 uint32_t result;
1251
1252 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1253 return(result);
1254 }
1255
1256
1257 /**
1258 \brief Store-Release (8 bit)
1259 \details Executes a STLB instruction for 8 bit values.
1260 \param [in] value Value to store
1261 \param [in] ptr Pointer to location
1262 */
__STLB(uint8_t value,volatile uint8_t * ptr)1263 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1264 {
1265 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1266 }
1267
1268
1269 /**
1270 \brief Store-Release (16 bit)
1271 \details Executes a STLH instruction for 16 bit values.
1272 \param [in] value Value to store
1273 \param [in] ptr Pointer to location
1274 */
__STLH(uint16_t value,volatile uint16_t * ptr)1275 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1276 {
1277 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1278 }
1279
1280
1281 /**
1282 \brief Store-Release (32 bit)
1283 \details Executes a STL instruction for 32 bit values.
1284 \param [in] value Value to store
1285 \param [in] ptr Pointer to location
1286 */
__STL(uint32_t value,volatile uint32_t * ptr)1287 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1288 {
1289 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1290 }
1291
1292
1293 /**
1294 \brief Load-Acquire Exclusive (8 bit)
1295 \details Executes a LDAB exclusive instruction for 8 bit value.
1296 \param [in] ptr Pointer to data
1297 \return value of type uint8_t at (*ptr)
1298 */
1299 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1300
1301
1302 /**
1303 \brief Load-Acquire Exclusive (16 bit)
1304 \details Executes a LDAH exclusive instruction for 16 bit values.
1305 \param [in] ptr Pointer to data
1306 \return value of type uint16_t at (*ptr)
1307 */
1308 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1309
1310
1311 /**
1312 \brief Load-Acquire Exclusive (32 bit)
1313 \details Executes a LDA exclusive instruction for 32 bit values.
1314 \param [in] ptr Pointer to data
1315 \return value of type uint32_t at (*ptr)
1316 */
1317 #define __LDAEX (uint32_t)__builtin_arm_ldaex
1318
1319
1320 /**
1321 \brief Store-Release Exclusive (8 bit)
1322 \details Executes a STLB exclusive instruction for 8 bit values.
1323 \param [in] value Value to store
1324 \param [in] ptr Pointer to location
1325 \return 0 Function succeeded
1326 \return 1 Function failed
1327 */
1328 #define __STLEXB (uint32_t)__builtin_arm_stlex
1329
1330
1331 /**
1332 \brief Store-Release Exclusive (16 bit)
1333 \details Executes a STLH exclusive instruction for 16 bit values.
1334 \param [in] value Value to store
1335 \param [in] ptr Pointer to location
1336 \return 0 Function succeeded
1337 \return 1 Function failed
1338 */
1339 #define __STLEXH (uint32_t)__builtin_arm_stlex
1340
1341
1342 /**
1343 \brief Store-Release Exclusive (32 bit)
1344 \details Executes a STL exclusive instruction for 32 bit values.
1345 \param [in] value Value to store
1346 \param [in] ptr Pointer to location
1347 \return 0 Function succeeded
1348 \return 1 Function failed
1349 */
1350 #define __STLEX (uint32_t)__builtin_arm_stlex
1351
1352 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1353 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1354
1355 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
1356
1357
1358 /* ################### Compiler specific Intrinsics ########################### */
1359 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1360 Access to dedicated SIMD instructions
1361 @{
1362 */
1363
1364 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1365
1366 #define __SADD8 __builtin_arm_sadd8
1367 #define __QADD8 __builtin_arm_qadd8
1368 #define __SHADD8 __builtin_arm_shadd8
1369 #define __UADD8 __builtin_arm_uadd8
1370 #define __UQADD8 __builtin_arm_uqadd8
1371 #define __UHADD8 __builtin_arm_uhadd8
1372 #define __SSUB8 __builtin_arm_ssub8
1373 #define __QSUB8 __builtin_arm_qsub8
1374 #define __SHSUB8 __builtin_arm_shsub8
1375 #define __USUB8 __builtin_arm_usub8
1376 #define __UQSUB8 __builtin_arm_uqsub8
1377 #define __UHSUB8 __builtin_arm_uhsub8
1378 #define __SADD16 __builtin_arm_sadd16
1379 #define __QADD16 __builtin_arm_qadd16
1380 #define __SHADD16 __builtin_arm_shadd16
1381 #define __UADD16 __builtin_arm_uadd16
1382 #define __UQADD16 __builtin_arm_uqadd16
1383 #define __UHADD16 __builtin_arm_uhadd16
1384 #define __SSUB16 __builtin_arm_ssub16
1385 #define __QSUB16 __builtin_arm_qsub16
1386 #define __SHSUB16 __builtin_arm_shsub16
1387 #define __USUB16 __builtin_arm_usub16
1388 #define __UQSUB16 __builtin_arm_uqsub16
1389 #define __UHSUB16 __builtin_arm_uhsub16
1390 #define __SASX __builtin_arm_sasx
1391 #define __QASX __builtin_arm_qasx
1392 #define __SHASX __builtin_arm_shasx
1393 #define __UASX __builtin_arm_uasx
1394 #define __UQASX __builtin_arm_uqasx
1395 #define __UHASX __builtin_arm_uhasx
1396 #define __SSAX __builtin_arm_ssax
1397 #define __QSAX __builtin_arm_qsax
1398 #define __SHSAX __builtin_arm_shsax
1399 #define __USAX __builtin_arm_usax
1400 #define __UQSAX __builtin_arm_uqsax
1401 #define __UHSAX __builtin_arm_uhsax
1402 #define __USAD8 __builtin_arm_usad8
1403 #define __USADA8 __builtin_arm_usada8
1404 #define __SSAT16 __builtin_arm_ssat16
1405 #define __USAT16 __builtin_arm_usat16
1406 #define __UXTB16 __builtin_arm_uxtb16
1407 #define __UXTAB16 __builtin_arm_uxtab16
1408 #define __SXTB16 __builtin_arm_sxtb16
1409 #define __SXTAB16 __builtin_arm_sxtab16
1410 #define __SMUAD __builtin_arm_smuad
1411 #define __SMUADX __builtin_arm_smuadx
1412 #define __SMLAD __builtin_arm_smlad
1413 #define __SMLADX __builtin_arm_smladx
1414 #define __SMLALD __builtin_arm_smlald
1415 #define __SMLALDX __builtin_arm_smlaldx
1416 #define __SMUSD __builtin_arm_smusd
1417 #define __SMUSDX __builtin_arm_smusdx
1418 #define __SMLSD __builtin_arm_smlsd
1419 #define __SMLSDX __builtin_arm_smlsdx
1420 #define __SMLSLD __builtin_arm_smlsld
1421 #define __SMLSLDX __builtin_arm_smlsldx
1422 #define __SEL __builtin_arm_sel
1423 #define __QADD __builtin_arm_qadd
1424 #define __QSUB __builtin_arm_qsub
1425
1426 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
1427 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
1428
1429 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
1430 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
1431
__SMMLA(int32_t op1,int32_t op2,int32_t op3)1432 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1433 {
1434 int32_t result;
1435
1436 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1437 return(result);
1438 }
1439
1440 #endif /* (__ARM_FEATURE_DSP == 1) */
1441 /*@} end of group CMSIS_SIMD_intrinsics */
1442
1443
1444 #endif /* __CMSIS_ARMCLANG_H */
1445