File indexing completed on 2025-05-11 08:23:02
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028 #ifndef __CMSIS_GCC_H
0029 #define __CMSIS_GCC_H
0030
0031
0032 #pragma GCC diagnostic push
0033 #pragma GCC diagnostic ignored "-Wsign-conversion"
0034 #pragma GCC diagnostic ignored "-Wconversion"
0035 #pragma GCC diagnostic ignored "-Wunused-parameter"
0036
0037
0038 #ifndef __has_builtin
0039 #define __has_builtin(x) (0)
0040 #endif
0041
0042
0043 #ifndef __ASM
0044 #define __ASM __asm
0045 #endif
0046 #ifndef __INLINE
0047 #define __INLINE inline
0048 #endif
0049 #ifndef __STATIC_INLINE
0050 #define __STATIC_INLINE static inline
0051 #endif
0052 #ifndef __STATIC_FORCEINLINE
0053 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
0054 #endif
0055 #ifndef __NO_RETURN
0056 #define __NO_RETURN __attribute__((__noreturn__))
0057 #endif
0058 #ifndef __USED
0059 #define __USED __attribute__((used))
0060 #endif
0061 #ifndef __WEAK
0062 #define __WEAK __attribute__((weak))
0063 #endif
0064 #ifndef __PACKED
0065 #define __PACKED __attribute__((packed, aligned(1)))
0066 #endif
0067 #ifndef __PACKED_STRUCT
0068 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
0069 #endif
0070 #ifndef __PACKED_UNION
0071 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
0072 #endif
0073 #ifndef __UNALIGNED_UINT32
0074 #pragma GCC diagnostic push
0075 #pragma GCC diagnostic ignored "-Wpacked"
0076 #pragma GCC diagnostic ignored "-Wattributes"
0077 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
0078 #pragma GCC diagnostic pop
0079 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
0080 #endif
0081 #ifndef __UNALIGNED_UINT16_WRITE
0082 #pragma GCC diagnostic push
0083 #pragma GCC diagnostic ignored "-Wpacked"
0084 #pragma GCC diagnostic ignored "-Wattributes"
0085 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
0086 #pragma GCC diagnostic pop
0087 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
0088 #endif
0089 #ifndef __UNALIGNED_UINT16_READ
0090 #pragma GCC diagnostic push
0091 #pragma GCC diagnostic ignored "-Wpacked"
0092 #pragma GCC diagnostic ignored "-Wattributes"
0093 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
0094 #pragma GCC diagnostic pop
0095 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
0096 #endif
0097 #ifndef __UNALIGNED_UINT32_WRITE
0098 #pragma GCC diagnostic push
0099 #pragma GCC diagnostic ignored "-Wpacked"
0100 #pragma GCC diagnostic ignored "-Wattributes"
0101 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
0102 #pragma GCC diagnostic pop
0103 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
0104 #endif
0105 #ifndef __UNALIGNED_UINT32_READ
0106 #pragma GCC diagnostic push
0107 #pragma GCC diagnostic ignored "-Wpacked"
0108 #pragma GCC diagnostic ignored "-Wattributes"
0109 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
0110 #pragma GCC diagnostic pop
0111 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
0112 #endif
0113 #ifndef __ALIGNED
0114 #define __ALIGNED(x) __attribute__((aligned(x)))
0115 #endif
0116 #ifndef __RESTRICT
0117 #define __RESTRICT __restrict
0118 #endif
0119 #ifndef __COMPILER_BARRIER
0120 #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
0121 #endif
0122 #ifndef __NO_INIT
0123 #define __NO_INIT __attribute__ ((section (".bss.noinit")))
0124 #endif
0125 #ifndef __ALIAS
0126 #define __ALIAS(x) __attribute__ ((alias(x)))
0127 #endif
0128
0129
0130
0131 #ifndef __PROGRAM_START
0132
0133
0134
0135
0136
0137
0138
0139
0140 __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
0141 {
0142 #ifdef __rtems__
0143 #pragma GCC diagnostic push
0144 #pragma GCC diagnostic ignored "-Wnested-externs"
0145 #endif
0146 extern void _start(void) __NO_RETURN;
0147
0148 typedef struct __copy_table {
0149 uint32_t const* src;
0150 uint32_t* dest;
0151 uint32_t wlen;
0152 } __copy_table_t;
0153
0154 typedef struct __zero_table {
0155 uint32_t* dest;
0156 uint32_t wlen;
0157 } __zero_table_t;
0158
0159 extern const __copy_table_t __copy_table_start__;
0160 extern const __copy_table_t __copy_table_end__;
0161 extern const __zero_table_t __zero_table_start__;
0162 extern const __zero_table_t __zero_table_end__;
0163
0164 #ifdef __rtems__
0165 #pragma GCC diagnostic pop
0166 #endif
0167
0168 for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
0169 for(uint32_t i=0u; i<pTable->wlen; ++i) {
0170 pTable->dest[i] = pTable->src[i];
0171 }
0172 }
0173
0174 for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
0175 for(uint32_t i=0u; i<pTable->wlen; ++i) {
0176 pTable->dest[i] = 0u;
0177 }
0178 }
0179
0180 _start();
0181 }
0182
0183 #define __PROGRAM_START __cmsis_start
0184 #endif
0185
0186 #ifndef __INITIAL_SP
0187 #define __INITIAL_SP __StackTop
0188 #endif
0189
0190 #ifndef __STACK_LIMIT
0191 #define __STACK_LIMIT __StackLimit
0192 #endif
0193
0194 #ifndef __VECTOR_TABLE
0195 #define __VECTOR_TABLE __Vectors
0196 #endif
0197
0198 #ifndef __VECTOR_TABLE_ATTRIBUTE
0199 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors")))
0200 #endif
0201
0202 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
0203 #ifndef __STACK_SEAL
0204 #define __STACK_SEAL __StackSeal
0205 #endif
0206
0207 #ifndef __TZ_STACK_SEAL_SIZE
0208 #define __TZ_STACK_SEAL_SIZE 8U
0209 #endif
0210
0211 #ifndef __TZ_STACK_SEAL_VALUE
0212 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL
0213 #endif
0214
0215
0216 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
0217 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
0218 }
0219 #endif
0220
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230
0231 #if defined (__thumb__) && !defined (__thumb2__)
0232 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
0233 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
0234 #define __CMSIS_GCC_USE_REG(r) "l" (r)
0235 #else
0236 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
0237 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
0238 #define __CMSIS_GCC_USE_REG(r) "r" (r)
0239 #endif
0240
0241
0242
0243
0244
0245 #define __NOP() __ASM volatile ("nop")
0246
0247
0248
0249
0250
0251 #define __WFI() __ASM volatile ("wfi":::"memory")
0252
0253
0254
0255
0256
0257
0258
0259 #define __WFE() __ASM volatile ("wfe":::"memory")
0260
0261
0262
0263
0264
0265
0266 #define __SEV() __ASM volatile ("sev")
0267
0268
0269
0270
0271
0272
0273
0274
0275 __STATIC_FORCEINLINE void __ISB(void)
0276 {
0277 __ASM volatile ("isb 0xF":::"memory");
0278 }
0279
0280
0281
0282
0283
0284
0285
0286 __STATIC_FORCEINLINE void __DSB(void)
0287 {
0288 __ASM volatile ("dsb 0xF":::"memory");
0289 }
0290
0291
0292
0293
0294
0295
0296
0297 __STATIC_FORCEINLINE void __DMB(void)
0298 {
0299 __ASM volatile ("dmb 0xF":::"memory");
0300 }
0301
0302
0303
0304
0305
0306
0307
0308
0309 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
0310 {
0311 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
0312 return __builtin_bswap32(value);
0313 #else
0314 uint32_t result;
0315
0316 __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0317 return result;
0318 #endif
0319 }
0320
0321
0322
0323
0324
0325
0326
0327
0328 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
0329 {
0330 uint32_t result;
0331
0332 __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0333 return result;
0334 }
0335
0336
0337
0338
0339
0340
0341
0342
0343 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
0344 {
0345 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0346 return (int16_t)__builtin_bswap16(value);
0347 #else
0348 int16_t result;
0349
0350 __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0351 return result;
0352 #endif
0353 }
0354
0355
0356
0357
0358
0359
0360
0361
0362
0363 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
0364 {
0365 op2 %= 32U;
0366 if (op2 == 0U)
0367 {
0368 return op1;
0369 }
0370 return (op1 >> op2) | (op1 << (32U - op2));
0371 }
0372
0373
0374
0375
0376
0377
0378
0379
0380
0381 #define __BKPT(value) __ASM volatile ("bkpt "#value)
0382
0383
0384
0385
0386
0387
0388
0389
0390 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
0391 {
0392 uint32_t result;
0393
0394 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
0395 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
0396 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
0397 __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
0398 #else
0399 uint32_t s = (4U * 8U) - 1U;
0400
0401 result = value;
0402 for (value >>= 1U; value != 0U; value >>= 1U)
0403 {
0404 result <<= 1U;
0405 result |= value & 1U;
0406 s--;
0407 }
0408 result <<= s;
0409 #endif
0410 return result;
0411 }
0412
0413
0414
0415
0416
0417
0418
0419
0420 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
0421 {
0422
0423
0424
0425
0426
0427
0428
0429
0430
0431 if (value == 0U)
0432 {
0433 return 32U;
0434 }
0435 return __builtin_clz(value);
0436 }
0437
0438
0439 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
0440 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
0441 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0442 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
0443
0444
0445
0446
0447
0448
0449 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
0450 {
0451 uint32_t result;
0452
0453 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0454 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
0455 #else
0456
0457
0458
0459 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
0460 #endif
0461 return ((uint8_t) result);
0462 }
0463
0464
0465
0466
0467
0468
0469
0470
0471 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
0472 {
0473 uint32_t result;
0474
0475 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0476 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
0477 #else
0478
0479
0480
0481 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
0482 #endif
0483 return ((uint16_t) result);
0484 }
0485
0486
0487
0488
0489
0490
0491
0492
0493 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
0494 {
0495 uint32_t result;
0496
0497 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
0498 return(result);
0499 }
0500
0501
0502
0503
0504
0505
0506
0507
0508
0509
0510 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
0511 {
0512 uint32_t result;
0513
0514 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
0515 return(result);
0516 }
0517
0518
0519
0520
0521
0522
0523
0524
0525
0526
0527 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
0528 {
0529 uint32_t result;
0530
0531 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
0532 return(result);
0533 }
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543
0544 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
0545 {
0546 uint32_t result;
0547
0548 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
0549 return(result);
0550 }
0551
0552
0553
0554
0555
0556
0557 __STATIC_FORCEINLINE void __CLREX(void)
0558 {
0559 __ASM volatile ("clrex" ::: "memory");
0560 }
0561
0562 #endif
0563
0564
0565
0566
0567
0568 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
0569 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
0570 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
0571
0572
0573
0574
0575
0576
0577
0578 #define __SSAT(ARG1, ARG2) \
0579 __extension__ \
0580 ({ \
0581 int32_t __RES, __ARG1 = (ARG1); \
0582 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
0583 __RES; \
0584 })
0585
0586
0587
0588
0589
0590
0591
0592
0593
0594 #define __USAT(ARG1, ARG2) \
0595 __extension__ \
0596 ({ \
0597 uint32_t __RES, __ARG1 = (ARG1); \
0598 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
0599 __RES; \
0600 })
0601
0602
0603
0604
0605
0606
0607
0608
0609
0610 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
0611 {
0612 uint32_t result;
0613
0614 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0615 return(result);
0616 }
0617
0618
0619
0620
0621
0622
0623
0624
0625 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
0626 {
0627 uint32_t result;
0628
0629 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0630 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
0631 #else
0632
0633
0634
0635 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
0636 #endif
0637 return ((uint8_t) result);
0638 }
0639
0640
0641
0642
0643
0644
0645
0646
0647 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
0648 {
0649 uint32_t result;
0650
0651 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0652 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
0653 #else
0654
0655
0656
0657 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
0658 #endif
0659 return ((uint16_t) result);
0660 }
0661
0662
0663
0664
0665
0666
0667
0668
0669 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
0670 {
0671 uint32_t result;
0672
0673 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
0674 return(result);
0675 }
0676
0677
0678
0679
0680
0681
0682
0683
0684 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
0685 {
0686 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
0687 }
0688
0689
0690
0691
0692
0693
0694
0695
0696 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
0697 {
0698 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
0699 }
0700
0701
0702
0703
0704
0705
0706
0707
0708 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
0709 {
0710 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
0711 }
0712
0713 #else
0714
0715
0716
0717
0718
0719
0720
0721
0722
0723
0724 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
0725 {
0726 if ((sat >= 1U) && (sat <= 32U))
0727 {
0728 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
0729 const int32_t min = -1 - max ;
0730 if (val > max)
0731 {
0732 return max;
0733 }
0734 else if (val < min)
0735 {
0736 return min;
0737 }
0738 }
0739 return val;
0740 }
0741
0742
0743
0744
0745
0746
0747
0748
0749 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
0750 {
0751 if (sat <= 31U)
0752 {
0753 const uint32_t max = ((1U << sat) - 1U);
0754 if (val > (int32_t)max)
0755 {
0756 return max;
0757 }
0758 else if (val < 0)
0759 {
0760 return 0U;
0761 }
0762 }
0763 return (uint32_t)val;
0764 }
0765
0766 #endif
0767
0768
0769
0770
0771 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0772 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
0773
0774
0775
0776
0777
0778
0779 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
0780 {
0781 uint32_t result;
0782
0783 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0784 return ((uint8_t) result);
0785 }
0786
0787
0788
0789
0790
0791
0792
0793
0794 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
0795 {
0796 uint32_t result;
0797
0798 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0799 return ((uint16_t) result);
0800 }
0801
0802
0803
0804
0805
0806
0807
0808
0809 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
0810 {
0811 uint32_t result;
0812
0813 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0814 return(result);
0815 }
0816
0817
0818
0819
0820
0821
0822
0823
0824 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
0825 {
0826 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0827 }
0828
0829
0830
0831
0832
0833
0834
0835
0836 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
0837 {
0838 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0839 }
0840
0841
0842
0843
0844
0845
0846
0847
0848 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
0849 {
0850 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0851 }
0852
0853
0854
0855
0856
0857
0858
0859
0860 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
0861 {
0862 uint32_t result;
0863
0864 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0865 return ((uint8_t) result);
0866 }
0867
0868
0869
0870
0871
0872
0873
0874
0875 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
0876 {
0877 uint32_t result;
0878
0879 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0880 return ((uint16_t) result);
0881 }
0882
0883
0884
0885
0886
0887
0888
0889
0890 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
0891 {
0892 uint32_t result;
0893
0894 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0895 return(result);
0896 }
0897
0898
0899
0900
0901
0902
0903
0904
0905
0906
0907 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
0908 {
0909 uint32_t result;
0910
0911 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0912 return(result);
0913 }
0914
0915
0916
0917
0918
0919
0920
0921
0922
0923
0924 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
0925 {
0926 uint32_t result;
0927
0928 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0929 return(result);
0930 }
0931
0932
0933
0934
0935
0936
0937
0938
0939
0940
0941 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
0942 {
0943 uint32_t result;
0944
0945 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0946 return(result);
0947 }
0948
0949 #endif
0950
0951
0952
0953
0954
0955
0956
0957
0958
0959
0960
0961
0962
0963
0964
0965
0966 __STATIC_FORCEINLINE void __enable_irq(void)
0967 {
0968 __ASM volatile ("cpsie i" : : : "memory");
0969 }
0970
0971
0972
0973
0974
0975
0976
0977 __STATIC_FORCEINLINE void __disable_irq(void)
0978 {
0979 __ASM volatile ("cpsid i" : : : "memory");
0980 }
0981
0982
0983
0984
0985
0986
0987
0988 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
0989 {
0990 uint32_t result;
0991
0992 __ASM volatile ("MRS %0, control" : "=r" (result) );
0993 return(result);
0994 }
0995
0996
0997 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
0998
0999
1000
1001
1002
1003 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
1004 {
1005 uint32_t result;
1006
1007 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
1008 return(result);
1009 }
1010 #endif
1011
1012
1013
1014
1015
1016
1017
1018 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
1019 {
1020 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
1021 __ISB();
1022 }
1023
1024
1025 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1026
1027
1028
1029
1030
1031 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
1032 {
1033 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
1034 __ISB();
1035 }
1036 #endif
1037
1038
1039
1040
1041
1042
1043
1044 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
1045 {
1046 uint32_t result;
1047
1048 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
1049 return(result);
1050 }
1051
1052
1053
1054
1055
1056
1057
1058 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
1059 {
1060 uint32_t result;
1061
1062 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
1063 return(result);
1064 }
1065
1066
1067
1068
1069
1070
1071
1072 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
1073 {
1074 uint32_t result;
1075
1076 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
1077 return(result);
1078 }
1079
1080
1081
1082
1083
1084
1085
1086 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
1087 {
1088 uint32_t result;
1089
1090 __ASM volatile ("MRS %0, psp" : "=r" (result) );
1091 return(result);
1092 }
1093
1094
1095 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1096
1097
1098
1099
1100
1101 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
1102 {
1103 uint32_t result;
1104
1105 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
1106 return(result);
1107 }
1108 #endif
1109
1110
1111
1112
1113
1114
1115
1116 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
1117 {
1118 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
1119 }
1120
1121
1122 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1123
1124
1125
1126
1127
1128 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1129 {
1130 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
1131 }
1132 #endif
1133
1134
1135
1136
1137
1138
1139
1140 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
1141 {
1142 uint32_t result;
1143
1144 __ASM volatile ("MRS %0, msp" : "=r" (result) );
1145 return(result);
1146 }
1147
1148
1149 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1150
1151
1152
1153
1154
1155 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
1156 {
1157 uint32_t result;
1158
1159 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
1160 return(result);
1161 }
1162 #endif
1163
1164
1165
1166
1167
1168
1169
1170 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
1171 {
1172 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
1173 }
1174
1175
1176 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1177
1178
1179
1180
1181
1182 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1183 {
1184 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
1185 }
1186 #endif
1187
1188
1189 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1190
1191
1192
1193
1194
1195 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
1196 {
1197 uint32_t result;
1198
1199 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
1200 return(result);
1201 }
1202
1203
1204
1205
1206
1207
1208
1209 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
1210 {
1211 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
1212 }
1213 #endif
1214
1215
1216
1217
1218
1219
1220
1221 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
1222 {
1223 uint32_t result;
1224
1225 __ASM volatile ("MRS %0, primask" : "=r" (result) );
1226 return(result);
1227 }
1228
1229
1230 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1231
1232
1233
1234
1235
1236 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1237 {
1238 uint32_t result;
1239
1240 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1241 return(result);
1242 }
1243 #endif
1244
1245
1246
1247
1248
1249
1250
1251 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1252 {
1253 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1254 }
1255
1256
1257 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1258
1259
1260
1261
1262
1263 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1264 {
1265 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1266 }
1267 #endif
1268
1269
1270 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
1271 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
1272 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
1273
1274
1275
1276
1277
1278 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1279 {
1280 __ASM volatile ("cpsie f" : : : "memory");
1281 }
1282
1283
1284
1285
1286
1287
1288
1289 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1290 {
1291 __ASM volatile ("cpsid f" : : : "memory");
1292 }
1293
1294
1295
1296
1297
1298
1299
1300 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1301 {
1302 uint32_t result;
1303
1304 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1305 return(result);
1306 }
1307
1308
1309 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1310
1311
1312
1313
1314
1315 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1316 {
1317 uint32_t result;
1318
1319 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1320 return(result);
1321 }
1322 #endif
1323
1324
1325
1326
1327
1328
1329
1330 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1331 {
1332 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1333 }
1334
1335
1336 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1337
1338
1339
1340
1341
1342 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1343 {
1344 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1345 }
1346 #endif
1347
1348
1349
1350
1351
1352
1353
1354
1355 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1356 {
1357 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1358 }
1359
1360
1361
1362
1363
1364
1365
1366 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1367 {
1368 uint32_t result;
1369
1370 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1371 return(result);
1372 }
1373
1374
1375 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1376
1377
1378
1379
1380
1381 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1382 {
1383 uint32_t result;
1384
1385 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1386 return(result);
1387 }
1388 #endif
1389
1390
1391
1392
1393
1394
1395
1396 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1397 {
1398 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1399 }
1400
1401
1402 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1403
1404
1405
1406
1407
1408 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1409 {
1410 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1411 }
1412 #endif
1413
1414 #endif
1415
1416
1417
1418
1419 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1420 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1432 {
1433 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1434 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1435
1436 return 0U;
1437 #else
1438 uint32_t result;
1439 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
1440 return result;
1441 #endif
1442 }
1443
1444 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1445
1446
1447
1448
1449
1450
1451
1452
1453 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1454 {
1455 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1456
1457 return 0U;
1458 #else
1459 uint32_t result;
1460 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
1461 return result;
1462 #endif
1463 }
1464 #endif
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1477 {
1478 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1479 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1480
1481 (void)ProcStackPtrLimit;
1482 #else
1483 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1484 #endif
1485 }
1486
1487
1488 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1489
1490
1491
1492
1493
1494
1495
1496
1497 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1498 {
1499 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1500
1501 (void)ProcStackPtrLimit;
1502 #else
1503 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1504 #endif
1505 }
1506 #endif
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1519 {
1520 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1521 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1522
1523 return 0U;
1524 #else
1525 uint32_t result;
1526 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1527 return result;
1528 #endif
1529 }
1530
1531
1532 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1533
1534
1535
1536
1537
1538
1539
1540
1541 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1542 {
1543 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1544
1545 return 0U;
1546 #else
1547 uint32_t result;
1548 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1549 return result;
1550 #endif
1551 }
1552 #endif
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1565 {
1566 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1567 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1568
1569 (void)MainStackPtrLimit;
1570 #else
1571 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1572 #endif
1573 }
1574
1575
1576 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1577
1578
1579
1580
1581
1582
1583
1584
1585 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1586 {
1587 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1588
1589 (void)MainStackPtrLimit;
1590 #else
1591 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1592 #endif
1593 }
1594 #endif
1595
1596 #endif
1597
1598
1599
1600
1601
1602
1603
1604
1605 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
1606 {
1607 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1608 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1609 #if __has_builtin(__builtin_arm_get_fpscr)
1610
1611
1612
1613 return __builtin_arm_get_fpscr();
1614 #else
1615 uint32_t result;
1616
1617 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
1618 return(result);
1619 #endif
1620 #else
1621 return(0U);
1622 #endif
1623 }
1624
1625
1626
1627
1628
1629
1630
1631 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1632 {
1633 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1634 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
1635 #if __has_builtin(__builtin_arm_set_fpscr)
1636
1637
1638
1639 __builtin_arm_set_fpscr(fpscr);
1640 #else
1641 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
1642 #endif
1643 #else
1644 (void)fpscr;
1645 #endif
1646 }
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1659
1660 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1661 {
1662 uint32_t result;
1663
1664 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665 return(result);
1666 }
1667
1668 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1669 {
1670 uint32_t result;
1671
1672 __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673 return(result);
1674 }
1675
1676 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1677 {
1678 uint32_t result;
1679
1680 __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681 return(result);
1682 }
1683
1684 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1685 {
1686 uint32_t result;
1687
1688 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689 return(result);
1690 }
1691
1692 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1693 {
1694 uint32_t result;
1695
1696 __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697 return(result);
1698 }
1699
1700 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1701 {
1702 uint32_t result;
1703
1704 __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705 return(result);
1706 }
1707
1708
1709 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1710 {
1711 uint32_t result;
1712
1713 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1714 return(result);
1715 }
1716
1717 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1718 {
1719 uint32_t result;
1720
1721 __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1722 return(result);
1723 }
1724
1725 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1726 {
1727 uint32_t result;
1728
1729 __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1730 return(result);
1731 }
1732
1733 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1734 {
1735 uint32_t result;
1736
1737 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1738 return(result);
1739 }
1740
1741 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1742 {
1743 uint32_t result;
1744
1745 __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1746 return(result);
1747 }
1748
1749 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1750 {
1751 uint32_t result;
1752
1753 __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1754 return(result);
1755 }
1756
1757
1758 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1759 {
1760 uint32_t result;
1761
1762 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1763 return(result);
1764 }
1765
1766 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1767 {
1768 uint32_t result;
1769
1770 __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1771 return(result);
1772 }
1773
1774 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1775 {
1776 uint32_t result;
1777
1778 __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1779 return(result);
1780 }
1781
1782 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1783 {
1784 uint32_t result;
1785
1786 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1787 return(result);
1788 }
1789
1790 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1791 {
1792 uint32_t result;
1793
1794 __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1795 return(result);
1796 }
1797
1798 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1799 {
1800 uint32_t result;
1801
1802 __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1803 return(result);
1804 }
1805
1806 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1807 {
1808 uint32_t result;
1809
1810 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1811 return(result);
1812 }
1813
1814 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1815 {
1816 uint32_t result;
1817
1818 __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1819 return(result);
1820 }
1821
1822 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1823 {
1824 uint32_t result;
1825
1826 __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1827 return(result);
1828 }
1829
1830 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1831 {
1832 uint32_t result;
1833
1834 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1835 return(result);
1836 }
1837
1838 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1839 {
1840 uint32_t result;
1841
1842 __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1843 return(result);
1844 }
1845
1846 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1847 {
1848 uint32_t result;
1849
1850 __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1851 return(result);
1852 }
1853
1854 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1855 {
1856 uint32_t result;
1857
1858 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1859 return(result);
1860 }
1861
1862 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1863 {
1864 uint32_t result;
1865
1866 __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1867 return(result);
1868 }
1869
1870 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1871 {
1872 uint32_t result;
1873
1874 __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1875 return(result);
1876 }
1877
1878 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1879 {
1880 uint32_t result;
1881
1882 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1883 return(result);
1884 }
1885
1886 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1887 {
1888 uint32_t result;
1889
1890 __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1891 return(result);
1892 }
1893
1894 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1895 {
1896 uint32_t result;
1897
1898 __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1899 return(result);
1900 }
1901
1902 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1903 {
1904 uint32_t result;
1905
1906 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1907 return(result);
1908 }
1909
1910 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1911 {
1912 uint32_t result;
1913
1914 __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1915 return(result);
1916 }
1917
1918 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1919 {
1920 uint32_t result;
1921
1922 __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1923 return(result);
1924 }
1925
1926 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1927 {
1928 uint32_t result;
1929
1930 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1931 return(result);
1932 }
1933
1934 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1935 {
1936 uint32_t result;
1937
1938 __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1939 return(result);
1940 }
1941
1942 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1943 {
1944 uint32_t result;
1945
1946 __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1947 return(result);
1948 }
1949
1950 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1951 {
1952 uint32_t result;
1953
1954 __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1955 return(result);
1956 }
1957
1958 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1959 {
1960 uint32_t result;
1961
1962 __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1963 return(result);
1964 }
1965
1966 #define __SSAT16(ARG1, ARG2) \
1967 __extension__ \
1968 ({ \
1969 int32_t __RES, __ARG1 = (ARG1); \
1970 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1971 __RES; \
1972 })
1973
1974 #define __USAT16(ARG1, ARG2) \
1975 __extension__ \
1976 ({ \
1977 uint32_t __RES, __ARG1 = (ARG1); \
1978 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \
1979 __RES; \
1980 })
1981
1982 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1983 {
1984 uint32_t result;
1985
1986 __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1987 return(result);
1988 }
1989
1990 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1991 {
1992 uint32_t result;
1993
1994 __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1995 return(result);
1996 }
1997
1998 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1999 {
2000 uint32_t result;
2001
2002 __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
2003 return(result);
2004 }
2005
2006 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
2007 {
2008 uint32_t result;
2009 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2010 __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) );
2011 } else {
2012 result = __SXTB16(__ROR(op1, rotate)) ;
2013 }
2014 return result;
2015 }
2016
2017 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2018 {
2019 uint32_t result;
2020
2021 __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2022 return(result);
2023 }
2024
2025 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2026 {
2027 uint32_t result;
2028 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2029 __ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate));
2030 } else {
2031 result = __SXTAB16(op1, __ROR(op2, rotate));
2032 }
2033 return result;
2034 }
2035
2036
2037 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
2038 {
2039 uint32_t result;
2040
2041 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2042 return(result);
2043 }
2044
2045 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2046 {
2047 uint32_t result;
2048
2049 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2050 return(result);
2051 }
2052
2053 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2054 {
2055 uint32_t result;
2056
2057 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2058 return(result);
2059 }
2060
2061 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2062 {
2063 uint32_t result;
2064
2065 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2066 return(result);
2067 }
2068
2069 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2070 {
2071 union llreg_u{
2072 uint32_t w32[2];
2073 uint64_t w64;
2074 } llr;
2075 llr.w64 = acc;
2076
2077 #ifndef __ARMEB__
2078 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2079 #else
2080 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2081 #endif
2082
2083 return(llr.w64);
2084 }
2085
2086 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2087 {
2088 union llreg_u{
2089 uint32_t w32[2];
2090 uint64_t w64;
2091 } llr;
2092 llr.w64 = acc;
2093
2094 #ifndef __ARMEB__
2095 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2096 #else
2097 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2098 #endif
2099
2100 return(llr.w64);
2101 }
2102
2103 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
2104 {
2105 uint32_t result;
2106
2107 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2108 return(result);
2109 }
2110
2111 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2112 {
2113 uint32_t result;
2114
2115 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2116 return(result);
2117 }
2118
2119 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2120 {
2121 uint32_t result;
2122
2123 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2124 return(result);
2125 }
2126
2127 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2128 {
2129 uint32_t result;
2130
2131 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2132 return(result);
2133 }
2134
2135 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2136 {
2137 union llreg_u{
2138 uint32_t w32[2];
2139 uint64_t w64;
2140 } llr;
2141 llr.w64 = acc;
2142
2143 #ifndef __ARMEB__
2144 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2145 #else
2146 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2147 #endif
2148
2149 return(llr.w64);
2150 }
2151
2152 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2153 {
2154 union llreg_u{
2155 uint32_t w32[2];
2156 uint64_t w64;
2157 } llr;
2158 llr.w64 = acc;
2159
2160 #ifndef __ARMEB__
2161 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2162 #else
2163 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2164 #endif
2165
2166 return(llr.w64);
2167 }
2168
2169 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
2170 {
2171 uint32_t result;
2172
2173 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2174 return(result);
2175 }
2176
2177 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
2178 {
2179 int32_t result;
2180
2181 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2182 return(result);
2183 }
2184
2185 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
2186 {
2187 int32_t result;
2188
2189 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2190 return(result);
2191 }
2192
2193
2194 #define __PKHBT(ARG1,ARG2,ARG3) \
2195 __extension__ \
2196 ({ \
2197 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2198 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2199 __RES; \
2200 })
2201
2202 #define __PKHTB(ARG1,ARG2,ARG3) \
2203 __extension__ \
2204 ({ \
2205 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2206 if (ARG3 == 0) \
2207 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
2208 else \
2209 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
2210 __RES; \
2211 })
2212
2213
2214 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2215 {
2216 int32_t result;
2217
2218 __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
2219 return(result);
2220 }
2221
2222 #endif
2223
2224
2225
2226 #pragma GCC diagnostic pop
2227
2228 #endif