32#pragma GCC diagnostic push
33#pragma GCC diagnostic ignored "-Wsign-conversion"
34#pragma GCC diagnostic ignored "-Wconversion"
35#pragma GCC diagnostic ignored "-Wunused-parameter"
39 #define __has_builtin(x) (0)
47 #define __INLINE inline
49#ifndef __STATIC_INLINE
50 #define __STATIC_INLINE static inline
52#ifndef __STATIC_FORCEINLINE
53 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
56 #define __NO_RETURN __attribute__((__noreturn__))
59 #define __USED __attribute__((used))
62 #define __WEAK __attribute__((weak))
65 #define __PACKED __attribute__((packed, aligned(1)))
67#ifndef __PACKED_STRUCT
68 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
71 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
73#ifndef __UNALIGNED_UINT32
74 #pragma GCC diagnostic push
75 #pragma GCC diagnostic ignored "-Wpacked"
76 #pragma GCC diagnostic ignored "-Wattributes"
78 #pragma GCC diagnostic pop
79 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
81#ifndef __UNALIGNED_UINT16_WRITE
82 #pragma GCC diagnostic push
83 #pragma GCC diagnostic ignored "-Wpacked"
84 #pragma GCC diagnostic ignored "-Wattributes"
85 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
86 #pragma GCC diagnostic pop
87 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
89#ifndef __UNALIGNED_UINT16_READ
90 #pragma GCC diagnostic push
91 #pragma GCC diagnostic ignored "-Wpacked"
92 #pragma GCC diagnostic ignored "-Wattributes"
93 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
94 #pragma GCC diagnostic pop
95 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
97#ifndef __UNALIGNED_UINT32_WRITE
98 #pragma GCC diagnostic push
99 #pragma GCC diagnostic ignored "-Wpacked"
100 #pragma GCC diagnostic ignored "-Wattributes"
101 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
102 #pragma GCC diagnostic pop
103 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
105#ifndef __UNALIGNED_UINT32_READ
106 #pragma GCC diagnostic push
107 #pragma GCC diagnostic ignored "-Wpacked"
108 #pragma GCC diagnostic ignored "-Wattributes"
109 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
110 #pragma GCC diagnostic pop
111 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
114 #define __ALIGNED(x) __attribute__((aligned(x)))
117 #define __RESTRICT __restrict
119#ifndef __COMPILER_BARRIER
120 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
123 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
126 #define __ALIAS(x) __attribute__ ((alias(x)))
131#ifndef __PROGRAM_START
143#pragma GCC diagnostic push
144#pragma GCC diagnostic ignored "-Wnested-externs"
146 extern void _start(
void) __NO_RETURN;
148 typedef struct __copy_table {
154 typedef struct __zero_table {
159 extern const __copy_table_t __copy_table_start__;
160 extern const __copy_table_t __copy_table_end__;
161 extern const __zero_table_t __zero_table_start__;
162 extern const __zero_table_t __zero_table_end__;
165#pragma GCC diagnostic pop
168 for (__copy_table_t
const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
169 for(uint32_t i=0u; i<pTable->wlen; ++i) {
170 pTable->dest[i] = pTable->src[i];
174 for (__zero_table_t
const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
175 for(uint32_t i=0u; i<pTable->wlen; ++i) {
176 pTable->dest[i] = 0u;
183#define __PROGRAM_START __cmsis_start
187#define __INITIAL_SP __StackTop
191#define __STACK_LIMIT __StackLimit
194#ifndef __VECTOR_TABLE
195#define __VECTOR_TABLE __Vectors
198#ifndef __VECTOR_TABLE_ATTRIBUTE
199#define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
202#if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
204#define __STACK_SEAL __StackSeal
207#ifndef __TZ_STACK_SEAL_SIZE
208#define __TZ_STACK_SEAL_SIZE 8U
211#ifndef __TZ_STACK_SEAL_VALUE
212#define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
216__STATIC_FORCEINLINE
void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
217 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
231#if defined (__thumb__) && !defined (__thumb2__)
232#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
233#define __CMSIS_GCC_RW_REG(r) "+l" (r)
234#define __CMSIS_GCC_USE_REG(r) "l" (r)
236#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
237#define __CMSIS_GCC_RW_REG(r) "+r" (r)
238#define __CMSIS_GCC_USE_REG(r) "r" (r)
245#define __NOP() __ASM volatile ("nop")
251#define __WFI() __ASM volatile ("wfi":::"memory")
259#define __WFE() __ASM volatile ("wfe":::"memory")
266#define __SEV() __ASM volatile ("sev")
275__STATIC_FORCEINLINE
void __ISB(
void)
277 __ASM
volatile (
"isb 0xF":::
"memory");
286__STATIC_FORCEINLINE
void __DSB(
void)
288 __ASM
volatile (
"dsb 0xF":::
"memory");
297__STATIC_FORCEINLINE
void __DMB(
void)
299 __ASM
volatile (
"dmb 0xF":::
"memory");
309__STATIC_FORCEINLINE uint32_t
__REV(uint32_t value)
311#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
312 return __builtin_bswap32(value);
316 __ASM (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
328__STATIC_FORCEINLINE uint32_t
__REV16(uint32_t value)
332 __ASM (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
343__STATIC_FORCEINLINE int16_t
__REVSH(int16_t value)
345#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
346 return (int16_t)__builtin_bswap16(value);
350 __ASM (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
363__STATIC_FORCEINLINE uint32_t
__ROR(uint32_t op1, uint32_t op2)
370 return (op1 >> op2) | (op1 << (32U - op2));
381#define __BKPT(value) __ASM volatile ("bkpt "#value)
390__STATIC_FORCEINLINE uint32_t
__RBIT(uint32_t value)
394#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
395 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
396 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
397 __ASM (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
399 uint32_t s = (4U * 8U) - 1U;
402 for (value >>= 1U; value != 0U; value >>= 1U)
405 result |= value & 1U;
420__STATIC_FORCEINLINE uint8_t
__CLZ(uint32_t value)
435 return __builtin_clz(value);
439#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
440 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
441 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
442 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
449__STATIC_FORCEINLINE uint8_t __LDREXB(
volatile uint8_t *addr)
453#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
454 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
459 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
461 return ((uint8_t) result);
471__STATIC_FORCEINLINE uint16_t __LDREXH(
volatile uint16_t *addr)
475#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
476 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
481 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
483 return ((uint16_t) result);
493__STATIC_FORCEINLINE uint32_t __LDREXW(
volatile uint32_t *addr)
497 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
510__STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
514 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
527__STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
531 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
544__STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
548 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
557__STATIC_FORCEINLINE
void __CLREX(
void)
559 __ASM
volatile (
"clrex" :::
"memory");
568#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
569 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
570 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
578#define __SSAT(ARG1, ARG2) \
581 int32_t __RES, __ARG1 = (ARG1); \
582 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
594#define __USAT(ARG1, ARG2) \
597 uint32_t __RES, __ARG1 = (ARG1); \
598 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
610__STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
614 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
625__STATIC_FORCEINLINE uint8_t __LDRBT(
volatile uint8_t *ptr)
629#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
630 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
635 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
637 return ((uint8_t) result);
647__STATIC_FORCEINLINE uint16_t __LDRHT(
volatile uint16_t *ptr)
651#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
652 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*ptr) );
657 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (ptr) :
"memory" );
659 return ((uint16_t) result);
669__STATIC_FORCEINLINE uint32_t __LDRT(
volatile uint32_t *ptr)
673 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*ptr) );
684__STATIC_FORCEINLINE
void __STRBT(uint8_t value,
volatile uint8_t *ptr)
686 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
696__STATIC_FORCEINLINE
void __STRHT(uint16_t value,
volatile uint16_t *ptr)
698 __ASM
volatile (
"strht %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) );
708__STATIC_FORCEINLINE
void __STRT(uint32_t value,
volatile uint32_t *ptr)
710 __ASM
volatile (
"strt %1, %0" :
"=Q" (*ptr) :
"r" (value) );
724__STATIC_FORCEINLINE int32_t
__SSAT(int32_t val, uint32_t sat)
726 if ((sat >= 1U) && (sat <= 32U))
728 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
729 const int32_t min = -1 - max ;
749__STATIC_FORCEINLINE uint32_t
__USAT(int32_t val, uint32_t sat)
753 const uint32_t max = ((1U << sat) - 1U);
754 if (val > (int32_t)max)
763 return (uint32_t)val;
771#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
772 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
779__STATIC_FORCEINLINE uint8_t __LDAB(
volatile uint8_t *ptr)
783 __ASM
volatile (
"ldab %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
784 return ((uint8_t) result);
794__STATIC_FORCEINLINE uint16_t __LDAH(
volatile uint16_t *ptr)
798 __ASM
volatile (
"ldah %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
799 return ((uint16_t) result);
809__STATIC_FORCEINLINE uint32_t __LDA(
volatile uint32_t *ptr)
813 __ASM
volatile (
"lda %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
824__STATIC_FORCEINLINE
void __STLB(uint8_t value,
volatile uint8_t *ptr)
826 __ASM
volatile (
"stlb %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
836__STATIC_FORCEINLINE
void __STLH(uint16_t value,
volatile uint16_t *ptr)
838 __ASM
volatile (
"stlh %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
848__STATIC_FORCEINLINE
void __STL(uint32_t value,
volatile uint32_t *ptr)
850 __ASM
volatile (
"stl %1, %0" :
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
860__STATIC_FORCEINLINE uint8_t __LDAEXB(
volatile uint8_t *ptr)
864 __ASM
volatile (
"ldaexb %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
865 return ((uint8_t) result);
875__STATIC_FORCEINLINE uint16_t __LDAEXH(
volatile uint16_t *ptr)
879 __ASM
volatile (
"ldaexh %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
880 return ((uint16_t) result);
890__STATIC_FORCEINLINE uint32_t __LDAEX(
volatile uint32_t *ptr)
894 __ASM
volatile (
"ldaex %0, %1" :
"=r" (result) :
"Q" (*ptr) :
"memory" );
907__STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value,
volatile uint8_t *ptr)
911 __ASM
volatile (
"stlexb %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
924__STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value,
volatile uint16_t *ptr)
928 __ASM
volatile (
"stlexh %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
941__STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value,
volatile uint32_t *ptr)
945 __ASM
volatile (
"stlex %0, %2, %1" :
"=&r" (result),
"=Q" (*ptr) :
"r" ((uint32_t)value) :
"memory" );
968 __ASM
volatile (
"cpsie i" : : :
"memory");
979 __ASM
volatile (
"cpsid i" : : :
"memory");
992 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
997#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1003__STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(
void)
1007 __ASM
volatile (
"MRS %0, control_ns" :
"=r" (result) );
1020 __ASM
volatile (
"MSR control, %0" : :
"r" (
control) :
"memory");
1025#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1031__STATIC_FORCEINLINE
void __TZ_set_CONTROL_NS(uint32_t
control)
1033 __ASM
volatile (
"MSR control_ns, %0" : :
"r" (
control) :
"memory");
1048 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
1062 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
1076 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
1090 __ASM
volatile (
"MRS %0, psp" :
"=r" (result) );
1095#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1101__STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(
void)
1105 __ASM
volatile (
"MRS %0, psp_ns" :
"=r" (result) );
1118 __ASM
volatile (
"MSR psp, %0" : :
"r" (topOfProcStack) : );
1122#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1128__STATIC_FORCEINLINE
void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1130 __ASM
volatile (
"MSR psp_ns, %0" : :
"r" (topOfProcStack) : );
1144 __ASM
volatile (
"MRS %0, msp" :
"=r" (result) );
1149#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1155__STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(
void)
1159 __ASM
volatile (
"MRS %0, msp_ns" :
"=r" (result) );
1172 __ASM
volatile (
"MSR msp, %0" : :
"r" (topOfMainStack) : );
1176#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1182__STATIC_FORCEINLINE
void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1184 __ASM
volatile (
"MSR msp_ns, %0" : :
"r" (topOfMainStack) : );
1189#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1195__STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(
void)
1199 __ASM
volatile (
"MRS %0, sp_ns" :
"=r" (result) );
1209__STATIC_FORCEINLINE
void __TZ_set_SP_NS(uint32_t topOfStack)
1211 __ASM
volatile (
"MSR sp_ns, %0" : :
"r" (topOfStack) : );
1225 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
1230#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1236__STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(
void)
1240 __ASM
volatile (
"MRS %0, primask_ns" :
"=r" (result) );
1253 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
1257#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1263__STATIC_FORCEINLINE
void __TZ_set_PRIMASK_NS(uint32_t priMask)
1265 __ASM
volatile (
"MSR primask_ns, %0" : :
"r" (priMask) :
"memory");
1270#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1271 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1272 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1278__STATIC_FORCEINLINE
void __enable_fault_irq(
void)
1280 __ASM
volatile (
"cpsie f" : : :
"memory");
1289__STATIC_FORCEINLINE
void __disable_fault_irq(
void)
1291 __ASM
volatile (
"cpsid f" : : :
"memory");
1300__STATIC_FORCEINLINE uint32_t __get_BASEPRI(
void)
1304 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
1309#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1315__STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(
void)
1319 __ASM
volatile (
"MRS %0, basepri_ns" :
"=r" (result) );
1330__STATIC_FORCEINLINE
void __set_BASEPRI(uint32_t basePri)
1332 __ASM
volatile (
"MSR basepri, %0" : :
"r" (basePri) :
"memory");
1336#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1342__STATIC_FORCEINLINE
void __TZ_set_BASEPRI_NS(uint32_t basePri)
1344 __ASM
volatile (
"MSR basepri_ns, %0" : :
"r" (basePri) :
"memory");
1355__STATIC_FORCEINLINE
void __set_BASEPRI_MAX(uint32_t basePri)
1357 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (basePri) :
"memory");
1366__STATIC_FORCEINLINE uint32_t __get_FAULTMASK(
void)
1370 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
1375#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1381__STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(
void)
1385 __ASM
volatile (
"MRS %0, faultmask_ns" :
"=r" (result) );
1396__STATIC_FORCEINLINE
void __set_FAULTMASK(uint32_t faultMask)
1398 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
1402#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1408__STATIC_FORCEINLINE
void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1410 __ASM
volatile (
"MSR faultmask_ns, %0" : :
"r" (faultMask) :
"memory");
1419#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1420 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1431__STATIC_FORCEINLINE uint32_t __get_PSPLIM(
void)
1433#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1434 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1439 __ASM
volatile (
"MRS %0, psplim" :
"=r" (result) );
1444#if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1453__STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(
void)
1455#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1460 __ASM
volatile (
"MRS %0, psplim_ns" :
"=r" (result) );
1476__STATIC_FORCEINLINE
void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1478#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1479 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1481 (void)ProcStackPtrLimit;
1483 __ASM
volatile (
"MSR psplim, %0" : :
"r" (ProcStackPtrLimit));
1488#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1497__STATIC_FORCEINLINE
void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1499#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1501 (void)ProcStackPtrLimit;
1503 __ASM
volatile (
"MSR psplim_ns, %0\n" : :
"r" (ProcStackPtrLimit));
1518__STATIC_FORCEINLINE uint32_t __get_MSPLIM(
void)
1520#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1521 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1526 __ASM
volatile (
"MRS %0, msplim" :
"=r" (result) );
1532#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1541__STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(
void)
1543#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1548 __ASM
volatile (
"MRS %0, msplim_ns" :
"=r" (result) );
1564__STATIC_FORCEINLINE
void __set_MSPLIM(uint32_t MainStackPtrLimit)
1566#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1567 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1569 (void)MainStackPtrLimit;
1571 __ASM
volatile (
"MSR msplim, %0" : :
"r" (MainStackPtrLimit));
1576#if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1585__STATIC_FORCEINLINE
void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1587#if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1589 (void)MainStackPtrLimit;
1591 __ASM
volatile (
"MSR msplim_ns, %0" : :
"r" (MainStackPtrLimit));
1607#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1608 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1609#if __has_builtin(__builtin_arm_get_fpscr)
1613 return __builtin_arm_get_fpscr();
1617 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
1633#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1634 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1635#if __has_builtin(__builtin_arm_set_fpscr)
1639 __builtin_arm_set_fpscr(fpscr);
1641 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc",
"memory");
1658#if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1660__STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1664 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1668__STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1672 __ASM (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1676__STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1680 __ASM (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1684__STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1688 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1692__STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1696 __ASM (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1700__STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1704 __ASM (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1709__STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1713 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1717__STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1721 __ASM (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1725__STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1729 __ASM (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1733__STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1737 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1741__STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1745 __ASM (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1749__STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1753 __ASM (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1758__STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1762 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1766__STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1770 __ASM (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1774__STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1778 __ASM (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1782__STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1786 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1790__STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1794 __ASM (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1798__STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1802 __ASM (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1806__STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1810 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1814__STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1818 __ASM (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1822__STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1826 __ASM (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1830__STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1834 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1838__STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1842 __ASM (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1846__STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1850 __ASM (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1854__STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1858 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1862__STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1866 __ASM (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1870__STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1874 __ASM (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1878__STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1882 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1886__STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1890 __ASM (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1894__STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1898 __ASM (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1902__STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1906 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1910__STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1914 __ASM (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1918__STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1922 __ASM (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1926__STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1930 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1934__STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1938 __ASM (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1942__STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1946 __ASM (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1950__STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1954 __ASM (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1958__STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1962 __ASM (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1966#define __SSAT16(ARG1, ARG2) \
1969 int32_t __RES, __ARG1 = (ARG1); \
1970 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1974#define __USAT16(ARG1, ARG2) \
1977 uint32_t __RES, __ARG1 = (ARG1); \
1978 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1982__STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1986 __ASM (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1990__STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1994 __ASM (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1998__STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
2002 __ASM (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
2006__STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
2009 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2010 __ASM
volatile (
"sxtb16 %0, %1, ROR %2" :
"=r" (result) :
"r" (op1),
"i" (rotate) );
2012 result = __SXTB16(
__ROR(op1, rotate)) ;
2017__STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2021 __ASM (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2025__STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2028 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2029 __ASM
volatile (
"sxtab16 %0, %1, %2, ROR %3" :
"=r" (result) :
"r" (op1) ,
"r" (op2) ,
"i" (rotate));
2031 result = __SXTAB16(op1,
__ROR(op2, rotate));
2037__STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
2041 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2045__STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2049 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2053__STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2057 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2061__STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2065 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2069__STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2078 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2080 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2086__STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2095 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2097 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2103__STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2107 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2111__STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2115 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2119__STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2123 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2127__STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2131 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
2135__STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2144 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2146 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2152__STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2161 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
2163 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
2169__STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2173 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2177__STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2181 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2185__STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2189 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
2194#define __PKHBT(ARG1,ARG2,ARG3) \
2197 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2198 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2202#define __PKHTB(ARG1,ARG2,ARG3) \
2205 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2207 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2209 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2214__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2218 __ASM (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
2226#pragma GCC diagnostic pop
__STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
Initializes data and bss sections.
Definition: cmsis_gcc.h:140
__STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
Signed Saturate.
Definition: cmsis_gcc.h:724
__STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
Unsigned Saturate.
Definition: cmsis_gcc.h:749
__STATIC_FORCEINLINE void __DSB(void)
Data Synchronization Barrier.
Definition: cmsis_gcc.h:286
__STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:328
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
Rotate Right in unsigned value (32 bit)
Definition: cmsis_gcc.h:363
__STATIC_FORCEINLINE void __DMB(void)
Data Memory Barrier.
Definition: cmsis_gcc.h:297
__STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
Reverse byte order (16 bit)
Definition: cmsis_gcc.h:343
__STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
Reverse byte order (32 bit)
Definition: cmsis_gcc.h:309
__STATIC_FORCEINLINE void __ISB(void)
Instruction Synchronization Barrier.
Definition: cmsis_gcc.h:275
__STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
Count leading zeros.
Definition: cmsis_gcc.h:420
__STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
Reverse bit order of value.
Definition: cmsis_gcc.h:390
__STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_gcc.h:1018
__STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_gcc.h:1170
__STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_gcc.h:1116
__STATIC_FORCEINLINE void __disable_irq(void)
Disable IRQ Interrupts.
Definition: cmsis_gcc.h:977
__STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
Get FPSCR.
Definition: cmsis_gcc.h:1605
__STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_gcc.h:1221
__STATIC_FORCEINLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_gcc.h:1072
__STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
Set FPSCR.
Definition: cmsis_gcc.h:1631
__STATIC_FORCEINLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_gcc.h:1140
__STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_gcc.h:988
__STATIC_FORCEINLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_gcc.h:1086
__STATIC_FORCEINLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_gcc.h:1058
__STATIC_FORCEINLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:966
__STATIC_FORCEINLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_gcc.h:1044
__STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_gcc.h:1251
void _start(void)
System start entry.
Definition: xnandpsu_onfi.h:185
Definition: intercom.c:87