RTEMS 6.1-rc5
Loading...
Searching...
No Matches
cmsis_gcc.h
Go to the documentation of this file.
1/*
2 * The file was modified by RTEMS contributors.
3 */
4/**************************************************************************/
10/*
11 * Copyright (c) 2009-2021 Arm Limited. All rights reserved.
12 *
13 * SPDX-License-Identifier: Apache-2.0
14 *
15 * Licensed under the Apache License, Version 2.0 (the License); you may
16 * not use this file except in compliance with the License.
17 * You may obtain a copy of the License at
18 *
19 * www.apache.org/licenses/LICENSE-2.0
20 *
21 * Unless required by applicable law or agreed to in writing, software
22 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
23 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 * See the License for the specific language governing permissions and
25 * limitations under the License.
26 */
27
28#ifndef __CMSIS_GCC_H
29#define __CMSIS_GCC_H
30
31/* ignore some GCC warnings */
32#pragma GCC diagnostic push
33#pragma GCC diagnostic ignored "-Wsign-conversion"
34#pragma GCC diagnostic ignored "-Wconversion"
35#pragma GCC diagnostic ignored "-Wunused-parameter"
36
37/* Fallback for __has_builtin */
38#ifndef __has_builtin
39 #define __has_builtin(x) (0)
40#endif
41
42/* CMSIS compiler specific defines */
43#ifndef __ASM
44 #define __ASM __asm
45#endif
46#ifndef __INLINE
47 #define __INLINE inline
48#endif
49#ifndef __STATIC_INLINE
50 #define __STATIC_INLINE static inline
51#endif
52#ifndef __STATIC_FORCEINLINE
53 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
54#endif
55#ifndef __NO_RETURN
56 #define __NO_RETURN __attribute__((__noreturn__))
57#endif
58#ifndef __USED
59 #define __USED __attribute__((used))
60#endif
61#ifndef __WEAK
62 #define __WEAK __attribute__((weak))
63#endif
64#ifndef __PACKED
65 #define __PACKED __attribute__((packed, aligned(1)))
66#endif
67#ifndef __PACKED_STRUCT
68 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
69#endif
70#ifndef __PACKED_UNION
71 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
72#endif
73#ifndef __UNALIGNED_UINT32 /* deprecated */
74 #pragma GCC diagnostic push
75 #pragma GCC diagnostic ignored "-Wpacked"
76 #pragma GCC diagnostic ignored "-Wattributes"
77 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
78 #pragma GCC diagnostic pop
79 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
80#endif
81#ifndef __UNALIGNED_UINT16_WRITE
82 #pragma GCC diagnostic push
83 #pragma GCC diagnostic ignored "-Wpacked"
84 #pragma GCC diagnostic ignored "-Wattributes"
85 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
86 #pragma GCC diagnostic pop
87 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
88#endif
89#ifndef __UNALIGNED_UINT16_READ
90 #pragma GCC diagnostic push
91 #pragma GCC diagnostic ignored "-Wpacked"
92 #pragma GCC diagnostic ignored "-Wattributes"
93 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
94 #pragma GCC diagnostic pop
95 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
96#endif
97#ifndef __UNALIGNED_UINT32_WRITE
98 #pragma GCC diagnostic push
99 #pragma GCC diagnostic ignored "-Wpacked"
100 #pragma GCC diagnostic ignored "-Wattributes"
101 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
102 #pragma GCC diagnostic pop
103 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
104#endif
105#ifndef __UNALIGNED_UINT32_READ
106 #pragma GCC diagnostic push
107 #pragma GCC diagnostic ignored "-Wpacked"
108 #pragma GCC diagnostic ignored "-Wattributes"
109 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
110 #pragma GCC diagnostic pop
111 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
112#endif
113#ifndef __ALIGNED
114 #define __ALIGNED(x) __attribute__((aligned(x)))
115#endif
116#ifndef __RESTRICT
117 #define __RESTRICT __restrict
118#endif
119#ifndef __COMPILER_BARRIER
120 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
121#endif
122#ifndef __NO_INIT
123 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
124#endif
125#ifndef __ALIAS
126 #define __ALIAS(x) __attribute__ ((alias(x)))
127#endif
128
129/* ######################### Startup and Lowlevel Init ######################## */
130
131#ifndef __PROGRAM_START
132
140__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
141{
142#ifdef __rtems__
143#pragma GCC diagnostic push
144#pragma GCC diagnostic ignored "-Wnested-externs"
145#endif /* __rtems__ */
146 extern void _start(void) __NO_RETURN;
147
148 typedef struct __copy_table {
149 uint32_t const* src;
150 uint32_t* dest;
151 uint32_t wlen;
152 } __copy_table_t;
153
154 typedef struct __zero_table {
155 uint32_t* dest;
156 uint32_t wlen;
157 } __zero_table_t;
158
159 extern const __copy_table_t __copy_table_start__;
160 extern const __copy_table_t __copy_table_end__;
161 extern const __zero_table_t __zero_table_start__;
162 extern const __zero_table_t __zero_table_end__;
163
164#ifdef __rtems__
165#pragma GCC diagnostic pop
166#endif /* __rtems__ */
167
168 for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
169 for(uint32_t i=0u; i<pTable->wlen; ++i) {
170 pTable->dest[i] = pTable->src[i];
171 }
172 }
173
174 for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
175 for(uint32_t i=0u; i<pTable->wlen; ++i) {
176 pTable->dest[i] = 0u;
177 }
178 }
179
180 _start();
181}
182
183#define __PROGRAM_START __cmsis_start
184#endif
185
186#ifndef __INITIAL_SP
187#define __INITIAL_SP __StackTop
188#endif
189
190#ifndef __STACK_LIMIT
191#define __STACK_LIMIT __StackLimit
192#endif
193
194#ifndef __VECTOR_TABLE
195#define __VECTOR_TABLE __Vectors
196#endif
197
198#ifndef __VECTOR_TABLE_ATTRIBUTE
199#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
200#endif
201
202#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
203#ifndef __STACK_SEAL
204#define __STACK_SEAL __StackSeal
205#endif
206
207#ifndef __TZ_STACK_SEAL_SIZE
208#define __TZ_STACK_SEAL_SIZE 8U
209#endif
210
211#ifndef __TZ_STACK_SEAL_VALUE
212#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
213#endif
214
215
216__STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
217 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
218}
219#endif
220
221
222/* ########################## Core Instruction Access ######################### */
228/* Define macros for porting to both thumb1 and thumb2.
229 * For thumb1, use low register (r0-r7), specified by constraint "l"
230 * Otherwise, use general registers, specified by constraint "r" */
231#if defined (__thumb__) && !defined (__thumb2__)
232#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
233#define __CMSIS_GCC_RW_REG(r) "+l" (r)
234#define __CMSIS_GCC_USE_REG(r) "l" (r)
235#else
236#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
237#define __CMSIS_GCC_RW_REG(r) "+r" (r)
238#define __CMSIS_GCC_USE_REG(r) "r" (r)
239#endif
240
245#define __NOP() __ASM volatile ("nop")
246
251#define __WFI() __ASM volatile ("wfi":::"memory")
252
253
259#define __WFE() __ASM volatile ("wfe":::"memory")
260
261
266#define __SEV() __ASM volatile ("sev")
267
268
275__STATIC_FORCEINLINE void __ISB(void)
276{
277 __ASM volatile ("isb 0xF":::"memory");
278}
279
280
286__STATIC_FORCEINLINE void __DSB(void)
287{
288 __ASM volatile ("dsb 0xF":::"memory");
289}
290
291
297__STATIC_FORCEINLINE void __DMB(void)
298{
299 __ASM volatile ("dmb 0xF":::"memory");
300}
301
302
309__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
310{
311#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
312 return __builtin_bswap32(value);
313#else
314 uint32_t result;
315
316 __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
317 return result;
318#endif
319}
320
321
328__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
329{
330 uint32_t result;
331
332 __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
333 return result;
334}
335
336
343__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
344{
345#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
346 return (int16_t)__builtin_bswap16(value);
347#else
348 int16_t result;
349
350 __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
351 return result;
352#endif
353}
354
355
363__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
364{
365 op2 %= 32U;
366 if (op2 == 0U)
367 {
368 return op1;
369 }
370 return (op1 >> op2) | (op1 << (32U - op2));
371}
372
373
381#define __BKPT(value) __ASM volatile ("bkpt "#value)
382
383
390__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
391{
392 uint32_t result;
393
394#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
395 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
396 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
397 __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
398#else
399 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
400
401 result = value; /* r will be reversed bits of v; first get LSB of v */
402 for (value >>= 1U; value != 0U; value >>= 1U)
403 {
404 result <<= 1U;
405 result |= value & 1U;
406 s--;
407 }
408 result <<= s; /* shift when v's highest bits are zero */
409#endif
410 return result;
411}
412
413
420__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
421{
422 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
423 __builtin_clz(0) is undefined behaviour, so handle this case specially.
424 This guarantees ARM-compatible results if happening to compile on a non-ARM
425 target, and ensures the compiler doesn't decide to activate any
426 optimisations using the logic "value was passed to __builtin_clz, so it
427 is non-zero".
428 ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
429 single CLZ instruction.
430 */
431 if (value == 0U)
432 {
433 return 32U;
434 }
435 return __builtin_clz(value);
436}
437
438
439#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
440 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
441 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
442 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
449__STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
450{
451 uint32_t result;
452
453#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
454 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
455#else
456 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
457 accepted by assembler. So has to use following less efficient pattern.
458 */
459 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
460#endif
461 return ((uint8_t) result); /* Add explicit type cast here */
462}
463
464
471__STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
472{
473 uint32_t result;
474
475#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
476 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
477#else
478 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
479 accepted by assembler. So has to use following less efficient pattern.
480 */
481 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
482#endif
483 return ((uint16_t) result); /* Add explicit type cast here */
484}
485
486
493__STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
494{
495 uint32_t result;
496
497 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
498 return(result);
499}
500
501
510__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
511{
512 uint32_t result;
513
514 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
515 return(result);
516}
517
518
527__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
528{
529 uint32_t result;
530
531 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
532 return(result);
533}
534
535
544__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
545{
546 uint32_t result;
547
548 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
549 return(result);
550}
551
552
557__STATIC_FORCEINLINE void __CLREX(void)
558{
559 __ASM volatile ("clrex" ::: "memory");
560}
561
562#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
563 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
564 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
565 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
566
567
568#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
569 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
570 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
578#define __SSAT(ARG1, ARG2) \
579__extension__ \
580({ \
581 int32_t __RES, __ARG1 = (ARG1); \
582 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
583 __RES; \
584 })
585
586
594#define __USAT(ARG1, ARG2) \
595__extension__ \
596({ \
597 uint32_t __RES, __ARG1 = (ARG1); \
598 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
599 __RES; \
600 })
601
602
610__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
611{
612 uint32_t result;
613
614 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
615 return(result);
616}
617
618
625__STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
626{
627 uint32_t result;
628
629#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
630 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
631#else
632 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
633 accepted by assembler. So has to use following less efficient pattern.
634 */
635 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
636#endif
637 return ((uint8_t) result); /* Add explicit type cast here */
638}
639
640
647__STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
648{
649 uint32_t result;
650
651#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
652 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
653#else
654 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
655 accepted by assembler. So has to use following less efficient pattern.
656 */
657 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
658#endif
659 return ((uint16_t) result); /* Add explicit type cast here */
660}
661
662
669__STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
670{
671 uint32_t result;
672
673 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
674 return(result);
675}
676
677
684__STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
685{
686 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
687}
688
689
696__STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
697{
698 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
699}
700
701
708__STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
709{
710 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
711}
712
713#else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
714 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
715 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
716
724__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
725{
726 if ((sat >= 1U) && (sat <= 32U))
727 {
728 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
729 const int32_t min = -1 - max ;
730 if (val > max)
731 {
732 return max;
733 }
734 else if (val < min)
735 {
736 return min;
737 }
738 }
739 return val;
740}
741
749__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
750{
751 if (sat <= 31U)
752 {
753 const uint32_t max = ((1U << sat) - 1U);
754 if (val > (int32_t)max)
755 {
756 return max;
757 }
758 else if (val < 0)
759 {
760 return 0U;
761 }
762 }
763 return (uint32_t)val;
764}
765
766#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
767 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
768 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
769
770
771#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
772 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
779__STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
780{
781 uint32_t result;
782
783 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
784 return ((uint8_t) result);
785}
786
787
794__STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
795{
796 uint32_t result;
797
798 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
799 return ((uint16_t) result);
800}
801
802
809__STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
810{
811 uint32_t result;
812
813 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
814 return(result);
815}
816
817
824__STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
825{
826 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
827}
828
829
836__STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
837{
838 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
839}
840
841
848__STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
849{
850 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
851}
852
853
860__STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
861{
862 uint32_t result;
863
864 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
865 return ((uint8_t) result);
866}
867
868
875__STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
876{
877 uint32_t result;
878
879 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
880 return ((uint16_t) result);
881}
882
883
890__STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
891{
892 uint32_t result;
893
894 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
895 return(result);
896}
897
898
907__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
908{
909 uint32_t result;
910
911 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
912 return(result);
913}
914
915
924__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
925{
926 uint32_t result;
927
928 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
929 return(result);
930}
931
932
941__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
942{
943 uint32_t result;
944
945 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
946 return(result);
947}
948
949#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
950 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
951 /* end of group CMSIS_Core_InstructionInterface */
953
954
955/* ########################### Core Function Access ########################### */
966__STATIC_FORCEINLINE void __enable_irq(void)
967{
968 __ASM volatile ("cpsie i" : : : "memory");
969}
970
971
977__STATIC_FORCEINLINE void __disable_irq(void)
978{
979 __ASM volatile ("cpsid i" : : : "memory");
980}
981
982
988__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
989{
990 uint32_t result;
991
992 __ASM volatile ("MRS %0, control" : "=r" (result) );
993 return(result);
994}
995
996
997#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1003__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
1004{
1005 uint32_t result;
1006
1007 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
1008 return(result);
1009}
1010#endif
1011
1012
1018__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
1019{
1020 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
1021 __ISB();
1022}
1023
1024
1025#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1031__STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
1032{
1033 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
1034 __ISB();
1035}
1036#endif
1037
1038
1044__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
1045{
1046 uint32_t result;
1047
1048 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
1049 return(result);
1050}
1051
1052
1058__STATIC_FORCEINLINE uint32_t __get_APSR(void)
1059{
1060 uint32_t result;
1061
1062 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
1063 return(result);
1064}
1065
1066
1072__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
1073{
1074 uint32_t result;
1075
1076 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
1077 return(result);
1078}
1079
1080
1086__STATIC_FORCEINLINE uint32_t __get_PSP(void)
1087{
1088 uint32_t result;
1089
1090 __ASM volatile ("MRS %0, psp" : "=r" (result) );
1091 return(result);
1092}
1093
1094
1095#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1101__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
1102{
1103 uint32_t result;
1104
1105 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
1106 return(result);
1107}
1108#endif
1109
1110
1116__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
1117{
1118 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
1119}
1120
1121
1122#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1128__STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1129{
1130 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
1131}
1132#endif
1133
1134
1140__STATIC_FORCEINLINE uint32_t __get_MSP(void)
1141{
1142 uint32_t result;
1143
1144 __ASM volatile ("MRS %0, msp" : "=r" (result) );
1145 return(result);
1146}
1147
1148
1149#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1155__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
1156{
1157 uint32_t result;
1158
1159 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
1160 return(result);
1161}
1162#endif
1163
1164
1170__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
1171{
1172 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
1173}
1174
1175
1176#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1182__STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1183{
1184 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
1185}
1186#endif
1187
1188
1189#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1195__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
1196{
1197 uint32_t result;
1198
1199 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
1200 return(result);
1201}
1202
1203
1209__STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
1210{
1211 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
1212}
1213#endif
1214
1215
1221__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
1222{
1223 uint32_t result;
1224
1225 __ASM volatile ("MRS %0, primask" : "=r" (result) );
1226 return(result);
1227}
1228
1229
1230#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1236__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1237{
1238 uint32_t result;
1239
1240 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1241 return(result);
1242}
1243#endif
1244
1245
1251__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1252{
1253 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1254}
1255
1256
1257#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1263__STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1264{
1265 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1266}
1267#endif
1268
1269
1270#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1271 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1272 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1278__STATIC_FORCEINLINE void __enable_fault_irq(void)
1279{
1280 __ASM volatile ("cpsie f" : : : "memory");
1281}
1282
1283
1289__STATIC_FORCEINLINE void __disable_fault_irq(void)
1290{
1291 __ASM volatile ("cpsid f" : : : "memory");
1292}
1293
1294
1300__STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1301{
1302 uint32_t result;
1303
1304 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1305 return(result);
1306}
1307
1308
1309#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1315__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1316{
1317 uint32_t result;
1318
1319 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1320 return(result);
1321}
1322#endif
1323
1324
1330__STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1331{
1332 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1333}
1334
1335
1336#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1342__STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1343{
1344 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1345}
1346#endif
1347
1348
1355__STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1356{
1357 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1358}
1359
1360
1366__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1367{
1368 uint32_t result;
1369
1370 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1371 return(result);
1372}
1373
1374
1375#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1381__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1382{
1383 uint32_t result;
1384
1385 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1386 return(result);
1387}
1388#endif
1389
1390
1396__STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1397{
1398 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1399}
1400
1401
1402#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1408__STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1409{
1410 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1411}
1412#endif
1413
1414#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1415 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1416 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
1417
1418
1419#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1420 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1421
1431__STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1432{
1433#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1434 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1435 // without main extensions, the non-secure PSPLIM is RAZ/WI
1436 return 0U;
1437#else
1438 uint32_t result;
1439 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1440 return result;
1441#endif
1442}
1443
1444#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1453__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1454{
1455#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1456 // without main extensions, the non-secure PSPLIM is RAZ/WI
1457 return 0U;
1458#else
1459 uint32_t result;
1460 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1461 return result;
1462#endif
1463}
1464#endif
1465
1466
1476__STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1477{
1478#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1479 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1480 // without main extensions, the non-secure PSPLIM is RAZ/WI
1481 (void)ProcStackPtrLimit;
1482#else
1483 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1484#endif
1485}
1486
1487
1488#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1497__STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1498{
1499#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1500 // without main extensions, the non-secure PSPLIM is RAZ/WI
1501 (void)ProcStackPtrLimit;
1502#else
1503 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1504#endif
1505}
1506#endif
1507
1508
1518__STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1519{
1520#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1521 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1522 // without main extensions, the non-secure MSPLIM is RAZ/WI
1523 return 0U;
1524#else
1525 uint32_t result;
1526 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1527 return result;
1528#endif
1529}
1530
1531
1532#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1541__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1542{
1543#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1544 // without main extensions, the non-secure MSPLIM is RAZ/WI
1545 return 0U;
1546#else
1547 uint32_t result;
1548 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1549 return result;
1550#endif
1551}
1552#endif
1553
1554
1564__STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1565{
1566#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1567 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1568 // without main extensions, the non-secure MSPLIM is RAZ/WI
1569 (void)MainStackPtrLimit;
1570#else
1571 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1572#endif
1573}
1574
1575
1576#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1585__STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1586{
1587#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1588 // without main extensions, the non-secure MSPLIM is RAZ/WI
1589 (void)MainStackPtrLimit;
1590#else
1591 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1592#endif
1593}
1594#endif
1595
1596#endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1597 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
1598
1599
1605__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
1606{
1607#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1608 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1609#if __has_builtin(__builtin_arm_get_fpscr)
1610// Re-enable using built-in when GCC has been fixed
1611// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1612 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1613 return __builtin_arm_get_fpscr();
1614#else
1615 uint32_t result;
1616
1617 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
1618 return(result);
1619#endif
1620#else
1621 return(0U);
1622#endif
1623}
1624
1625
1631__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1632{
1633#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1634 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1635#if __has_builtin(__builtin_arm_set_fpscr)
1636// Re-enable using built-in when GCC has been fixed
1637// || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1638 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1639 __builtin_arm_set_fpscr(fpscr);
1640#else
1641 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
1642#endif
1643#else
1644 (void)fpscr;
1645#endif
1646}
1647
1648
1652/* ################### Compiler specific Intrinsics ########################### */
1658#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1659
1660__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1661{
1662 uint32_t result;
1663
1664 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665 return(result);
1666}
1667
1668__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1669{
1670 uint32_t result;
1671
1672 __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673 return(result);
1674}
1675
1676__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1677{
1678 uint32_t result;
1679
1680 __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681 return(result);
1682}
1683
1684__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1685{
1686 uint32_t result;
1687
1688 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689 return(result);
1690}
1691
1692__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1693{
1694 uint32_t result;
1695
1696 __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697 return(result);
1698}
1699
1700__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1701{
1702 uint32_t result;
1703
1704 __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705 return(result);
1706}
1707
1708
1709__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1710{
1711 uint32_t result;
1712
1713 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1714 return(result);
1715}
1716
1717__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1718{
1719 uint32_t result;
1720
1721 __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1722 return(result);
1723}
1724
1725__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1726{
1727 uint32_t result;
1728
1729 __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1730 return(result);
1731}
1732
1733__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1734{
1735 uint32_t result;
1736
1737 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1738 return(result);
1739}
1740
1741__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1742{
1743 uint32_t result;
1744
1745 __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1746 return(result);
1747}
1748
1749__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1750{
1751 uint32_t result;
1752
1753 __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1754 return(result);
1755}
1756
1757
1758__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1759{
1760 uint32_t result;
1761
1762 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1763 return(result);
1764}
1765
1766__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1767{
1768 uint32_t result;
1769
1770 __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1771 return(result);
1772}
1773
1774__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1775{
1776 uint32_t result;
1777
1778 __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1779 return(result);
1780}
1781
1782__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1783{
1784 uint32_t result;
1785
1786 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1787 return(result);
1788}
1789
1790__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1791{
1792 uint32_t result;
1793
1794 __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1795 return(result);
1796}
1797
1798__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1799{
1800 uint32_t result;
1801
1802 __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1803 return(result);
1804}
1805
1806__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1807{
1808 uint32_t result;
1809
1810 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1811 return(result);
1812}
1813
1814__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1815{
1816 uint32_t result;
1817
1818 __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1819 return(result);
1820}
1821
1822__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1823{
1824 uint32_t result;
1825
1826 __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1827 return(result);
1828}
1829
1830__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1831{
1832 uint32_t result;
1833
1834 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1835 return(result);
1836}
1837
1838__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1839{
1840 uint32_t result;
1841
1842 __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1843 return(result);
1844}
1845
1846__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1847{
1848 uint32_t result;
1849
1850 __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1851 return(result);
1852}
1853
1854__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1855{
1856 uint32_t result;
1857
1858 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1859 return(result);
1860}
1861
1862__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1863{
1864 uint32_t result;
1865
1866 __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1867 return(result);
1868}
1869
1870__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1871{
1872 uint32_t result;
1873
1874 __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1875 return(result);
1876}
1877
1878__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1879{
1880 uint32_t result;
1881
1882 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1883 return(result);
1884}
1885
1886__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1887{
1888 uint32_t result;
1889
1890 __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1891 return(result);
1892}
1893
1894__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1895{
1896 uint32_t result;
1897
1898 __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1899 return(result);
1900}
1901
1902__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1903{
1904 uint32_t result;
1905
1906 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1907 return(result);
1908}
1909
1910__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1911{
1912 uint32_t result;
1913
1914 __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1915 return(result);
1916}
1917
1918__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1919{
1920 uint32_t result;
1921
1922 __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1923 return(result);
1924}
1925
1926__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1927{
1928 uint32_t result;
1929
1930 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1931 return(result);
1932}
1933
1934__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1935{
1936 uint32_t result;
1937
1938 __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1939 return(result);
1940}
1941
1942__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1943{
1944 uint32_t result;
1945
1946 __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1947 return(result);
1948}
1949
1950__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1951{
1952 uint32_t result;
1953
1954 __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1955 return(result);
1956}
1957
1958__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1959{
1960 uint32_t result;
1961
1962 __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1963 return(result);
1964}
1965
1966#define __SSAT16(ARG1, ARG2) \
1967__extension__ \
1968({ \
1969 int32_t __RES, __ARG1 = (ARG1); \
1970 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1971 __RES; \
1972 })
1973
1974#define __USAT16(ARG1, ARG2) \
1975__extension__ \
1976({ \
1977 uint32_t __RES, __ARG1 = (ARG1); \
1978 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1979 __RES; \
1980 })
1981
1982__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1983{
1984 uint32_t result;
1985
1986 __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1987 return(result);
1988}
1989
1990__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1991{
1992 uint32_t result;
1993
1994 __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1995 return(result);
1996}
1997
1998__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1999{
2000 uint32_t result;
2001
2002 __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
2003 return(result);
2004}
2005
2006__STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
2007{
2008 uint32_t result;
2009 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2010 __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) );
2011 } else {
2012 result = __SXTB16(__ROR(op1, rotate)) ;
2013 }
2014 return result;
2015}
2016
2017__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2018{
2019 uint32_t result;
2020
2021 __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2022 return(result);
2023}
2024
2025__STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2026{
2027 uint32_t result;
2028 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2029 __ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate));
2030 } else {
2031 result = __SXTAB16(op1, __ROR(op2, rotate));
2032 }
2033 return result;
2034}
2035
2036
2037__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
2038{
2039 uint32_t result;
2040
2041 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2042 return(result);
2043}
2044
2045__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2046{
2047 uint32_t result;
2048
2049 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2050 return(result);
2051}
2052
2053__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2054{
2055 uint32_t result;
2056
2057 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2058 return(result);
2059}
2060
2061__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2062{
2063 uint32_t result;
2064
2065 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2066 return(result);
2067}
2068
2069__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2070{
2071 union llreg_u{
2072 uint32_t w32[2];
2073 uint64_t w64;
2074 } llr;
2075 llr.w64 = acc;
2076
2077#ifndef __ARMEB__ /* Little endian */
2078 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2079#else /* Big endian */
2080 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2081#endif
2082
2083 return(llr.w64);
2084}
2085
2086__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2087{
2088 union llreg_u{
2089 uint32_t w32[2];
2090 uint64_t w64;
2091 } llr;
2092 llr.w64 = acc;
2093
2094#ifndef __ARMEB__ /* Little endian */
2095 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2096#else /* Big endian */
2097 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2098#endif
2099
2100 return(llr.w64);
2101}
2102
2103__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2104{
2105 uint32_t result;
2106
2107 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2108 return(result);
2109}
2110
2111__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2112{
2113 uint32_t result;
2114
2115 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2116 return(result);
2117}
2118
2119__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2120{
2121 uint32_t result;
2122
2123 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2124 return(result);
2125}
2126
2127__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2128{
2129 uint32_t result;
2130
2131 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2132 return(result);
2133}
2134
2135__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2136{
2137 union llreg_u{
2138 uint32_t w32[2];
2139 uint64_t w64;
2140 } llr;
2141 llr.w64 = acc;
2142
2143#ifndef __ARMEB__ /* Little endian */
2144 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2145#else /* Big endian */
2146 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2147#endif
2148
2149 return(llr.w64);
2150}
2151
2152__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2153{
2154 union llreg_u{
2155 uint32_t w32[2];
2156 uint64_t w64;
2157 } llr;
2158 llr.w64 = acc;
2159
2160#ifndef __ARMEB__ /* Little endian */
2161 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2162#else /* Big endian */
2163 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2164#endif
2165
2166 return(llr.w64);
2167}
2168
2169__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2170{
2171 uint32_t result;
2172
2173 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2174 return(result);
2175}
2176
2177__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2178{
2179 int32_t result;
2180
2181 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2182 return(result);
2183}
2184
2185__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2186{
2187 int32_t result;
2188
2189 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2190 return(result);
2191}
2192
2193
2194#define __PKHBT(ARG1,ARG2,ARG3) \
2195__extension__ \
2196({ \
2197 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2198 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2199 __RES; \
2200 })
2201
2202#define __PKHTB(ARG1,ARG2,ARG3) \
2203__extension__ \
2204({ \
2205 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2206 if (ARG3 == 0) \
2207 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2208 else \
2209 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2210 __RES; \
2211 })
2212
2213
2214__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2215{
2216 int32_t result;
2217
2218 __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2219 return(result);
2220}
2221
2222#endif /* (__ARM_FEATURE_DSP == 1) */
2226#pragma GCC diagnostic pop
2227
2228#endif /* __CMSIS_GCC_H */
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition: cmsis_gcc.h:140
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_gcc.h:724
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_gcc.h:749
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:286
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:328
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_gcc.h:363
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:297
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:343
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:309
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:275
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_gcc.h:420
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:390
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_gcc.h:1018
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_gcc.h:1170
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_gcc.h:1116
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:977
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: cmsis_gcc.h:1605
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_gcc.h:1221
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_gcc.h:1072
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: cmsis_gcc.h:1631
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_gcc.h:1140
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_gcc.h:988
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_gcc.h:1086
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_gcc.h:1058
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:966
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_gcc.h:1044
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_gcc.h:1251
void _start(void)
System start entry.
Definition: xnandpsu_onfi.h:185
Definition: intercom.c:87