39#if defined ( __GNUC__ )
40#pragma GCC diagnostic push
41#pragma GCC diagnostic ignored "-Wsign-conversion"
42#pragma GCC diagnostic ignored "-Wconversion"
43#pragma GCC diagnostic ignored "-Wunused-parameter"
58__attribute__( ( always_inline ) ) __STATIC_INLINE
void __enable_irq(
void)
60 __ASM
volatile (
"cpsie i" : : :
"memory");
69__attribute__( ( always_inline ) ) __STATIC_INLINE
void __disable_irq(
void)
71 __ASM
volatile (
"cpsid i" : : :
"memory");
80__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CONTROL(
void)
84 __ASM
volatile (
"MRS %0, control" :
"=r" (result) );
96 __ASM
volatile (
"MSR control, %0" : :
"r" (
control) :
"memory");
105__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_IPSR(
void)
109 __ASM
volatile (
"MRS %0, ipsr" :
"=r" (result) );
119__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(
void)
123 __ASM
volatile (
"MRS %0, apsr" :
"=r" (result) );
134__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_xPSR(
void)
138 __ASM
volatile (
"MRS %0, xpsr" :
"=r" (result) );
148__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PSP(
void)
150 register uint32_t result;
152 __ASM
volatile (
"MRS %0, psp\n" :
"=r" (result) );
162__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_PSP(uint32_t topOfProcStack)
164 __ASM
volatile (
"MSR psp, %0\n" : :
"r" (topOfProcStack) :
"sp");
173__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_MSP(
void)
175 register uint32_t result;
177 __ASM
volatile (
"MRS %0, msp\n" :
"=r" (result) );
188__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_MSP(uint32_t topOfMainStack)
190 __ASM
volatile (
"MSR msp, %0\n" : :
"r" (topOfMainStack) :
"sp");
199__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_PRIMASK(
void)
203 __ASM
volatile (
"MRS %0, primask" :
"=r" (result) );
213__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_PRIMASK(uint32_t priMask)
215 __ASM
volatile (
"MSR primask, %0" : :
"r" (priMask) :
"memory");
219#if (__CORTEX_M >= 0x03U)
226__attribute__( ( always_inline ) ) __STATIC_INLINE
void __enable_fault_irq(
void)
228 __ASM
volatile (
"cpsie f" : : :
"memory");
237__attribute__( ( always_inline ) ) __STATIC_INLINE
void __disable_fault_irq(
void)
239 __ASM
volatile (
"cpsid f" : : :
"memory");
248__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_BASEPRI(
void)
252 __ASM
volatile (
"MRS %0, basepri" :
"=r" (result) );
262__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_BASEPRI(uint32_t value)
264 __ASM
volatile (
"MSR basepri, %0" : :
"r" (value) :
"memory");
274__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_BASEPRI_MAX(uint32_t value)
276 __ASM
volatile (
"MSR basepri_max, %0" : :
"r" (value) :
"memory");
285__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FAULTMASK(
void)
289 __ASM
volatile (
"MRS %0, faultmask" :
"=r" (result) );
299__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_FAULTMASK(uint32_t faultMask)
301 __ASM
volatile (
"MSR faultmask, %0" : :
"r" (faultMask) :
"memory");
307#if (__CORTEX_M == 0x04U) || (__CORTEX_M == 0x07U)
314__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(
void)
316#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
321 __ASM
volatile (
"VMRS %0, fpscr" :
"=r" (result) );
335__attribute__( ( always_inline ) ) __STATIC_INLINE
void __set_FPSCR(uint32_t fpscr)
337#if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
340 __ASM
volatile (
"VMSR fpscr, %0" : :
"r" (fpscr) :
"vfpcc");
362#if defined (__thumb__) && !defined (__thumb2__)
363#define __CMSIS_GCC_OUT_REG(r) "=l" (r)
364#define __CMSIS_GCC_USE_REG(r) "l" (r)
366#define __CMSIS_GCC_OUT_REG(r) "=r" (r)
367#define __CMSIS_GCC_USE_REG(r) "r" (r)
374__attribute__((always_inline)) __STATIC_INLINE
void __NOP(
void)
376 __ASM
volatile (
"nop");
384__attribute__((always_inline)) __STATIC_INLINE
void __WFI(
void)
386 __ASM
volatile (
"wfi");
395__attribute__((always_inline)) __STATIC_INLINE
void __WFE(
void)
397 __ASM
volatile (
"wfe");
405__attribute__((always_inline)) __STATIC_INLINE
void __SEV(
void)
407 __ASM
volatile (
"sev");
417__attribute__((always_inline)) __STATIC_INLINE
void __ISB(
void)
419 __ASM
volatile (
"isb 0xF":::
"memory");
428__attribute__((always_inline)) __STATIC_INLINE
void __DSB(
void)
430 __ASM
volatile (
"dsb 0xF":::
"memory");
439__attribute__((always_inline)) __STATIC_INLINE
void __DMB(
void)
441 __ASM
volatile (
"dmb 0xF":::
"memory");
451__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV(uint32_t value)
453#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
454 return __builtin_bswap32(value);
458 __ASM
volatile (
"rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
470__attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
474 __ASM
volatile (
"rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
485__attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
487#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
488 return (
short)__builtin_bswap16(value);
492 __ASM
volatile (
"revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
505__attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
507 return (op1 >> op2) | (op1 << (32U - op2));
518#define __BKPT(value) __ASM volatile ("bkpt "#value)
527__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
531#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
532 __ASM
volatile (
"rbit %0, %1" :
"=r" (result) :
"r" (value) );
534 int32_t s = 4 * 8 - 1;
537 for (value >>= 1U; value; value >>= 1U)
540 result |= value & 1U;
555#define __CLZ __builtin_clz
558#if (__CORTEX_M >= 0x03U) || (__CORTEX_SC >= 300U)
566__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDREXB(
volatile uint8_t *addr)
570#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
571 __ASM
volatile (
"ldrexb %0, %1" :
"=r" (result) :
"Q" (*addr) );
576 __ASM
volatile (
"ldrexb %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
578 return ((uint8_t) result);
588__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDREXH(
volatile uint16_t *addr)
592#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
593 __ASM
volatile (
"ldrexh %0, %1" :
"=r" (result) :
"Q" (*addr) );
598 __ASM
volatile (
"ldrexh %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
600 return ((uint16_t) result);
610__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDREXW(
volatile uint32_t *addr)
614 __ASM
volatile (
"ldrex %0, %1" :
"=r" (result) :
"Q" (*addr) );
627__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXB(uint8_t value,
volatile uint8_t *addr)
631 __ASM
volatile (
"strexb %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
644__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXH(uint16_t value,
volatile uint16_t *addr)
648 __ASM
volatile (
"strexh %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" ((uint32_t)value) );
661__attribute__((always_inline)) __STATIC_INLINE uint32_t __STREXW(uint32_t value,
volatile uint32_t *addr)
665 __ASM
volatile (
"strex %0, %2, %1" :
"=&r" (result),
"=Q" (*addr) :
"r" (value) );
674__attribute__((always_inline)) __STATIC_INLINE
void __CLREX(
void)
676 __ASM
volatile (
"clrex" :::
"memory");
687#define __SSAT(ARG1,ARG2) \
689 uint32_t __RES, __ARG1 = (ARG1); \
690 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
702#define __USAT(ARG1,ARG2) \
704 uint32_t __RES, __ARG1 = (ARG1); \
705 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
717__attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
721 __ASM
volatile (
"rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
732__attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(
volatile uint8_t *addr)
736#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
737 __ASM
volatile (
"ldrbt %0, %1" :
"=r" (result) :
"Q" (*addr) );
742 __ASM
volatile (
"ldrbt %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
744 return ((uint8_t) result);
754__attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(
volatile uint16_t *addr)
758#if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
759 __ASM
volatile (
"ldrht %0, %1" :
"=r" (result) :
"Q" (*addr) );
764 __ASM
volatile (
"ldrht %0, [%1]" :
"=r" (result) :
"r" (addr) :
"memory" );
766 return ((uint16_t) result);
776__attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(
volatile uint32_t *addr)
780 __ASM
volatile (
"ldrt %0, %1" :
"=r" (result) :
"Q" (*addr) );
791__attribute__((always_inline)) __STATIC_INLINE
void __STRBT(uint8_t value,
volatile uint8_t *addr)
793 __ASM
volatile (
"strbt %1, %0" :
"=Q" (*addr) :
"r" ((uint32_t)value) );
803__attribute__((always_inline)) __STATIC_INLINE
void __STRHT(uint16_t value,
volatile uint16_t *addr)
805 __ASM
volatile (
"strht %1, %0" :
"=Q" (*addr) :
"r" ((uint32_t)value) );
815__attribute__((always_inline)) __STATIC_INLINE
void __STRT(uint32_t value,
volatile uint32_t *addr)
817 __ASM
volatile (
"strt %1, %0" :
"=Q" (*addr) :
"r" (value) );
832#if (__CORTEX_M >= 0x04U)
834__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
838 __ASM
volatile (
"sadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
842__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
846 __ASM
volatile (
"qadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
850__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
854 __ASM
volatile (
"shadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
858__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
862 __ASM
volatile (
"uadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
866__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
870 __ASM
volatile (
"uqadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
874__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
878 __ASM
volatile (
"uhadd8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
883__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
887 __ASM
volatile (
"ssub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
891__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
895 __ASM
volatile (
"qsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
899__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
903 __ASM
volatile (
"shsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
907__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
911 __ASM
volatile (
"usub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
915__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
919 __ASM
volatile (
"uqsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
923__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
927 __ASM
volatile (
"uhsub8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
932__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
936 __ASM
volatile (
"sadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
940__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
944 __ASM
volatile (
"qadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
948__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
952 __ASM
volatile (
"shadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
956__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
960 __ASM
volatile (
"uadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
964__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
968 __ASM
volatile (
"uqadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
972__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
976 __ASM
volatile (
"uhadd16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
980__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
984 __ASM
volatile (
"ssub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
988__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
992 __ASM
volatile (
"qsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
996__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1000 __ASM
volatile (
"shsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1004__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1008 __ASM
volatile (
"usub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1012__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1016 __ASM
volatile (
"uqsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1020__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1024 __ASM
volatile (
"uhsub16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1028__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1032 __ASM
volatile (
"sasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1036__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1040 __ASM
volatile (
"qasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1044__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1048 __ASM
volatile (
"shasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1052__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1056 __ASM
volatile (
"uasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1060__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1064 __ASM
volatile (
"uqasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1068__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1072 __ASM
volatile (
"uhasx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1076__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1080 __ASM
volatile (
"ssax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1084__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1088 __ASM
volatile (
"qsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1092__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1096 __ASM
volatile (
"shsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1100__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1104 __ASM
volatile (
"usax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1108__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1112 __ASM
volatile (
"uqsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1116__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1120 __ASM
volatile (
"uhsax %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1124__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1128 __ASM
volatile (
"usad8 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1132__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1136 __ASM
volatile (
"usada8 %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1140#define __SSAT16(ARG1,ARG2) \
1142 int32_t __RES, __ARG1 = (ARG1); \
1143 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1147#define __USAT16(ARG1,ARG2) \
1149 uint32_t __RES, __ARG1 = (ARG1); \
1150 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1154__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1158 __ASM
volatile (
"uxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1162__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1166 __ASM
volatile (
"uxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1170__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1174 __ASM
volatile (
"sxtb16 %0, %1" :
"=r" (result) :
"r" (op1));
1178__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1182 __ASM
volatile (
"sxtab16 %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1186__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1190 __ASM
volatile (
"smuad %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1194__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1198 __ASM
volatile (
"smuadx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1202__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1206 __ASM
volatile (
"smlad %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1210__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1214 __ASM
volatile (
"smladx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1218__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1227 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1229 __ASM
volatile (
"smlald %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1235__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1244 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1246 __ASM
volatile (
"smlaldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1252__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1256 __ASM
volatile (
"smusd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1260__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1264 __ASM
volatile (
"smusdx %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1268__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1272 __ASM
volatile (
"smlsd %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1276__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1280 __ASM
volatile (
"smlsdx %0, %1, %2, %3" :
"=r" (result) :
"r" (op1),
"r" (op2),
"r" (op3) );
1284__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1293 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1295 __ASM
volatile (
"smlsld %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1301__attribute__( ( always_inline ) ) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1310 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[0]),
"=r" (llr.w32[1]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[0]),
"1" (llr.w32[1]) );
1312 __ASM
volatile (
"smlsldx %0, %1, %2, %3" :
"=r" (llr.w32[1]),
"=r" (llr.w32[0]):
"r" (op1),
"r" (op2) ,
"0" (llr.w32[1]),
"1" (llr.w32[0]) );
1318__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1322 __ASM
volatile (
"sel %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1326__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)
1330 __ASM
volatile (
"qadd %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1334__attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)
1338 __ASM
volatile (
"qsub %0, %1, %2" :
"=r" (result) :
"r" (op1),
"r" (op2) );
1342#define __PKHBT(ARG1,ARG2,ARG3) \
1344 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1345 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1349#define __PKHTB(ARG1,ARG2,ARG3) \
1351 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1353 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1355 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1359__attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1363 __ASM
volatile (
"smmla %0, %1, %2, %3" :
"=r" (result):
"r" (op1),
"r" (op2),
"r" (op3) );
1371#if defined ( __GNUC__ )
1372#pragma GCC diagnostic pop
__attribute__((always_inline)) __STATIC_INLINE void __enable_irq(void)
Enable IRQ Interrupts.
Definition: cmsis_gcc.h:58
Definition: intercom.c:74