Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:23:02

0001 /*
0002  * The file was modified by RTEMS contributors.
0003  */
0004 /**************************************************************************//**
0005  * @file     cmsis_gcc.h
0006  * @brief    CMSIS compiler GCC header file
0007  * @version  V5.4.2
0008  * @date     17. December 2022
0009  ******************************************************************************/
0010 /*
0011  * Copyright (c) 2009-2021 Arm Limited. All rights reserved.
0012  *
0013  * SPDX-License-Identifier: Apache-2.0
0014  *
0015  * Licensed under the Apache License, Version 2.0 (the License); you may
0016  * not use this file except in compliance with the License.
0017  * You may obtain a copy of the License at
0018  *
0019  * www.apache.org/licenses/LICENSE-2.0
0020  *
0021  * Unless required by applicable law or agreed to in writing, software
0022  * distributed under the License is distributed on an AS IS BASIS, WITHOUT
0023  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0024  * See the License for the specific language governing permissions and
0025  * limitations under the License.
0026  */
0027 
0028 #ifndef __CMSIS_GCC_H
0029 #define __CMSIS_GCC_H
0030 
0031 /* ignore some GCC warnings */
0032 #pragma GCC diagnostic push
0033 #pragma GCC diagnostic ignored "-Wsign-conversion"
0034 #pragma GCC diagnostic ignored "-Wconversion"
0035 #pragma GCC diagnostic ignored "-Wunused-parameter"
0036 
0037 /* Fallback for __has_builtin */
0038 #ifndef __has_builtin
0039   #define __has_builtin(x) (0)
0040 #endif
0041 
0042 /* CMSIS compiler specific defines */
0043 #ifndef   __ASM
0044   #define __ASM                                  __asm
0045 #endif
0046 #ifndef   __INLINE
0047   #define __INLINE                               inline
0048 #endif
0049 #ifndef   __STATIC_INLINE
0050   #define __STATIC_INLINE                        static inline
0051 #endif
0052 #ifndef   __STATIC_FORCEINLINE
0053   #define __STATIC_FORCEINLINE                   __attribute__((always_inline)) static inline
0054 #endif
0055 #ifndef   __NO_RETURN
0056   #define __NO_RETURN                            __attribute__((__noreturn__))
0057 #endif
0058 #ifndef   __USED
0059   #define __USED                                 __attribute__((used))
0060 #endif
0061 #ifndef   __WEAK
0062   #define __WEAK                                 __attribute__((weak))
0063 #endif
0064 #ifndef   __PACKED
0065   #define __PACKED                               __attribute__((packed, aligned(1)))
0066 #endif
0067 #ifndef   __PACKED_STRUCT
0068   #define __PACKED_STRUCT                        struct __attribute__((packed, aligned(1)))
0069 #endif
0070 #ifndef   __PACKED_UNION
0071   #define __PACKED_UNION                         union __attribute__((packed, aligned(1)))
0072 #endif
0073 #ifndef   __UNALIGNED_UINT32        /* deprecated */
0074   #pragma GCC diagnostic push
0075   #pragma GCC diagnostic ignored "-Wpacked"
0076   #pragma GCC diagnostic ignored "-Wattributes"
0077   struct __attribute__((packed)) T_UINT32 { uint32_t v; };
0078   #pragma GCC diagnostic pop
0079   #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
0080 #endif
0081 #ifndef   __UNALIGNED_UINT16_WRITE
0082   #pragma GCC diagnostic push
0083   #pragma GCC diagnostic ignored "-Wpacked"
0084   #pragma GCC diagnostic ignored "-Wattributes"
0085   __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
0086   #pragma GCC diagnostic pop
0087   #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
0088 #endif
0089 #ifndef   __UNALIGNED_UINT16_READ
0090   #pragma GCC diagnostic push
0091   #pragma GCC diagnostic ignored "-Wpacked"
0092   #pragma GCC diagnostic ignored "-Wattributes"
0093   __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
0094   #pragma GCC diagnostic pop
0095   #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
0096 #endif
0097 #ifndef   __UNALIGNED_UINT32_WRITE
0098   #pragma GCC diagnostic push
0099   #pragma GCC diagnostic ignored "-Wpacked"
0100   #pragma GCC diagnostic ignored "-Wattributes"
0101   __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
0102   #pragma GCC diagnostic pop
0103   #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
0104 #endif
0105 #ifndef   __UNALIGNED_UINT32_READ
0106   #pragma GCC diagnostic push
0107   #pragma GCC diagnostic ignored "-Wpacked"
0108   #pragma GCC diagnostic ignored "-Wattributes"
0109   __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
0110   #pragma GCC diagnostic pop
0111   #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
0112 #endif
0113 #ifndef   __ALIGNED
0114   #define __ALIGNED(x)                           __attribute__((aligned(x)))
0115 #endif
0116 #ifndef   __RESTRICT
0117   #define __RESTRICT                             __restrict
0118 #endif
0119 #ifndef   __COMPILER_BARRIER
0120   #define __COMPILER_BARRIER()                   __ASM volatile("":::"memory")
0121 #endif
0122 #ifndef __NO_INIT
0123   #define __NO_INIT                              __attribute__ ((section (".bss.noinit")))
0124 #endif
0125 #ifndef __ALIAS
0126   #define __ALIAS(x)                             __attribute__ ((alias(x)))
0127 #endif
0128 
0129 /* #########################  Startup and Lowlevel Init  ######################## */
0130 
0131 #ifndef __PROGRAM_START
0132 
0133 /**
0134   \brief   Initializes data and bss sections
0135   \details This default implementations initialized all data and additional bss
0136            sections relying on .copy.table and .zero.table specified properly
0137            in the used linker script.
0138 
0139  */
0140 __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void)
0141 {
0142 #ifdef __rtems__
0143 #pragma GCC diagnostic push
0144 #pragma GCC diagnostic ignored "-Wnested-externs"
0145 #endif /* __rtems__ */
0146   extern void _start(void) __NO_RETURN;
0147 
0148   typedef struct __copy_table {
0149     uint32_t const* src;
0150     uint32_t* dest;
0151     uint32_t  wlen;
0152   } __copy_table_t;
0153 
0154   typedef struct __zero_table {
0155     uint32_t* dest;
0156     uint32_t  wlen;
0157   } __zero_table_t;
0158 
0159   extern const __copy_table_t __copy_table_start__;
0160   extern const __copy_table_t __copy_table_end__;
0161   extern const __zero_table_t __zero_table_start__;
0162   extern const __zero_table_t __zero_table_end__;
0163 
0164 #ifdef __rtems__
0165 #pragma GCC diagnostic pop
0166 #endif /* __rtems__ */
0167 
0168   for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) {
0169     for(uint32_t i=0u; i<pTable->wlen; ++i) {
0170       pTable->dest[i] = pTable->src[i];
0171     }
0172   }
0173 
0174   for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) {
0175     for(uint32_t i=0u; i<pTable->wlen; ++i) {
0176       pTable->dest[i] = 0u;
0177     }
0178   }
0179 
0180   _start();
0181 }
0182 
0183 #define __PROGRAM_START           __cmsis_start
0184 #endif
0185 
0186 #ifndef __INITIAL_SP
0187 #define __INITIAL_SP              __StackTop
0188 #endif
0189 
0190 #ifndef __STACK_LIMIT
0191 #define __STACK_LIMIT             __StackLimit
0192 #endif
0193 
0194 #ifndef __VECTOR_TABLE
0195 #define __VECTOR_TABLE            __Vectors
0196 #endif
0197 
0198 #ifndef __VECTOR_TABLE_ATTRIBUTE
0199 #define __VECTOR_TABLE_ATTRIBUTE  __attribute__((used, section(".vectors")))
0200 #endif
0201 
0202 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U)
0203 #ifndef __STACK_SEAL
0204 #define __STACK_SEAL              __StackSeal
0205 #endif
0206 
0207 #ifndef __TZ_STACK_SEAL_SIZE
0208 #define __TZ_STACK_SEAL_SIZE      8U
0209 #endif
0210 
0211 #ifndef __TZ_STACK_SEAL_VALUE
0212 #define __TZ_STACK_SEAL_VALUE     0xFEF5EDA5FEF5EDA5ULL
0213 #endif
0214 
0215 
0216 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) {
0217   *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE;
0218 }
0219 #endif
0220 
0221 
0222 /* ##########################  Core Instruction Access  ######################### */
0223 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
0224   Access to dedicated instructions
0225   @{
0226 */
0227 
0228 /* Define macros for porting to both thumb1 and thumb2.
0229  * For thumb1, use low register (r0-r7), specified by constraint "l"
0230  * Otherwise, use general registers, specified by constraint "r" */
0231 #if defined (__thumb__) && !defined (__thumb2__)
0232 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
0233 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
0234 #define __CMSIS_GCC_USE_REG(r) "l" (r)
0235 #else
0236 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
0237 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
0238 #define __CMSIS_GCC_USE_REG(r) "r" (r)
0239 #endif
0240 
0241 /**
0242   \brief   No Operation
0243   \details No Operation does nothing. This instruction can be used for code alignment purposes.
0244  */
0245 #define __NOP()                             __ASM volatile ("nop")
0246 
0247 /**
0248   \brief   Wait For Interrupt
0249   \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
0250  */
0251 #define __WFI()                             __ASM volatile ("wfi":::"memory")
0252 
0253 
0254 /**
0255   \brief   Wait For Event
0256   \details Wait For Event is a hint instruction that permits the processor to enter
0257            a low-power state until one of a number of events occurs.
0258  */
0259 #define __WFE()                             __ASM volatile ("wfe":::"memory")
0260 
0261 
0262 /**
0263   \brief   Send Event
0264   \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
0265  */
0266 #define __SEV()                             __ASM volatile ("sev")
0267 
0268 
0269 /**
0270   \brief   Instruction Synchronization Barrier
0271   \details Instruction Synchronization Barrier flushes the pipeline in the processor,
0272            so that all instructions following the ISB are fetched from cache or memory,
0273            after the instruction has been completed.
0274  */
0275 __STATIC_FORCEINLINE void __ISB(void)
0276 {
0277   __ASM volatile ("isb 0xF":::"memory");
0278 }
0279 
0280 
0281 /**
0282   \brief   Data Synchronization Barrier
0283   \details Acts as a special kind of Data Memory Barrier.
0284            It completes when all explicit memory accesses before this instruction complete.
0285  */
0286 __STATIC_FORCEINLINE void __DSB(void)
0287 {
0288   __ASM volatile ("dsb 0xF":::"memory");
0289 }
0290 
0291 
0292 /**
0293   \brief   Data Memory Barrier
0294   \details Ensures the apparent order of the explicit memory operations before
0295            and after the instruction, without ensuring their completion.
0296  */
0297 __STATIC_FORCEINLINE void __DMB(void)
0298 {
0299   __ASM volatile ("dmb 0xF":::"memory");
0300 }
0301 
0302 
0303 /**
0304   \brief   Reverse byte order (32 bit)
0305   \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
0306   \param [in]    value  Value to reverse
0307   \return               Reversed value
0308  */
0309 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
0310 {
0311 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
0312   return __builtin_bswap32(value);
0313 #else
0314   uint32_t result;
0315 
0316   __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0317   return result;
0318 #endif
0319 }
0320 
0321 
0322 /**
0323   \brief   Reverse byte order (16 bit)
0324   \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
0325   \param [in]    value  Value to reverse
0326   \return               Reversed value
0327  */
0328 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
0329 {
0330   uint32_t result;
0331 
0332   __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0333   return result;
0334 }
0335 
0336 
0337 /**
0338   \brief   Reverse byte order (16 bit)
0339   \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
0340   \param [in]    value  Value to reverse
0341   \return               Reversed value
0342  */
0343 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
0344 {
0345 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0346   return (int16_t)__builtin_bswap16(value);
0347 #else
0348   int16_t result;
0349 
0350   __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0351   return result;
0352 #endif
0353 }
0354 
0355 
0356 /**
0357   \brief   Rotate Right in unsigned value (32 bit)
0358   \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
0359   \param [in]    op1  Value to rotate
0360   \param [in]    op2  Number of Bits to rotate
0361   \return               Rotated value
0362  */
0363 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
0364 {
0365   op2 %= 32U;
0366   if (op2 == 0U)
0367   {
0368     return op1;
0369   }
0370   return (op1 >> op2) | (op1 << (32U - op2));
0371 }
0372 
0373 
0374 /**
0375   \brief   Breakpoint
0376   \details Causes the processor to enter Debug state.
0377            Debug tools can use this to investigate system state when the instruction at a particular address is reached.
0378   \param [in]    value  is ignored by the processor.
0379                  If required, a debugger can use it to store additional information about the breakpoint.
0380  */
0381 #define __BKPT(value)                       __ASM volatile ("bkpt "#value)
0382 
0383 
0384 /**
0385   \brief   Reverse bit order of value
0386   \details Reverses the bit order of the given value.
0387   \param [in]    value  Value to reverse
0388   \return               Reversed value
0389  */
0390 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
0391 {
0392   uint32_t result;
0393 
0394 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0395      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0396      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
0397    __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) );
0398 #else
0399   uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
0400 
0401   result = value;                      /* r will be reversed bits of v; first get LSB of v */
0402   for (value >>= 1U; value != 0U; value >>= 1U)
0403   {
0404     result <<= 1U;
0405     result |= value & 1U;
0406     s--;
0407   }
0408   result <<= s;                        /* shift when v's highest bits are zero */
0409 #endif
0410   return result;
0411 }
0412 
0413 
0414 /**
0415   \brief   Count leading zeros
0416   \details Counts the number of leading zeros of a data value.
0417   \param [in]  value  Value to count the leading zeros
0418   \return             number of leading zeros in value
0419  */
0420 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
0421 {
0422   /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
0423      __builtin_clz(0) is undefined behaviour, so handle this case specially.
0424      This guarantees ARM-compatible results if happening to compile on a non-ARM
0425      target, and ensures the compiler doesn't decide to activate any
0426      optimisations using the logic "value was passed to __builtin_clz, so it
0427      is non-zero".
0428      ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a
0429      single CLZ instruction.
0430    */
0431   if (value == 0U)
0432   {
0433     return 32U;
0434   }
0435   return __builtin_clz(value);
0436 }
0437 
0438 
0439 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0440      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0441      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0442      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
0443 /**
0444   \brief   LDR Exclusive (8 bit)
0445   \details Executes a exclusive LDR instruction for 8 bit value.
0446   \param [in]    ptr  Pointer to data
0447   \return             value of type uint8_t at (*ptr)
0448  */
0449 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
0450 {
0451     uint32_t result;
0452 
0453 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0454    __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
0455 #else
0456     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
0457        accepted by assembler. So has to use following less efficient pattern.
0458     */
0459    __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
0460 #endif
0461    return ((uint8_t) result);    /* Add explicit type cast here */
0462 }
0463 
0464 
0465 /**
0466   \brief   LDR Exclusive (16 bit)
0467   \details Executes a exclusive LDR instruction for 16 bit values.
0468   \param [in]    ptr  Pointer to data
0469   \return        value of type uint16_t at (*ptr)
0470  */
0471 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
0472 {
0473     uint32_t result;
0474 
0475 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0476    __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
0477 #else
0478     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
0479        accepted by assembler. So has to use following less efficient pattern.
0480     */
0481    __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
0482 #endif
0483    return ((uint16_t) result);    /* Add explicit type cast here */
0484 }
0485 
0486 
0487 /**
0488   \brief   LDR Exclusive (32 bit)
0489   \details Executes a exclusive LDR instruction for 32 bit values.
0490   \param [in]    ptr  Pointer to data
0491   \return        value of type uint32_t at (*ptr)
0492  */
0493 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
0494 {
0495     uint32_t result;
0496 
0497    __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
0498    return(result);
0499 }
0500 
0501 
0502 /**
0503   \brief   STR Exclusive (8 bit)
0504   \details Executes a exclusive STR instruction for 8 bit values.
0505   \param [in]  value  Value to store
0506   \param [in]    ptr  Pointer to location
0507   \return          0  Function succeeded
0508   \return          1  Function failed
0509  */
0510 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
0511 {
0512    uint32_t result;
0513 
0514    __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
0515    return(result);
0516 }
0517 
0518 
0519 /**
0520   \brief   STR Exclusive (16 bit)
0521   \details Executes a exclusive STR instruction for 16 bit values.
0522   \param [in]  value  Value to store
0523   \param [in]    ptr  Pointer to location
0524   \return          0  Function succeeded
0525   \return          1  Function failed
0526  */
0527 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
0528 {
0529    uint32_t result;
0530 
0531    __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
0532    return(result);
0533 }
0534 
0535 
0536 /**
0537   \brief   STR Exclusive (32 bit)
0538   \details Executes a exclusive STR instruction for 32 bit values.
0539   \param [in]  value  Value to store
0540   \param [in]    ptr  Pointer to location
0541   \return          0  Function succeeded
0542   \return          1  Function failed
0543  */
0544 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
0545 {
0546    uint32_t result;
0547 
0548    __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
0549    return(result);
0550 }
0551 
0552 
0553 /**
0554   \brief   Remove the exclusive lock
0555   \details Removes the exclusive lock which is created by LDREX.
0556  */
0557 __STATIC_FORCEINLINE void __CLREX(void)
0558 {
0559   __ASM volatile ("clrex" ::: "memory");
0560 }
0561 
0562 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0563            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0564            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0565            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
0566 
0567 
0568 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0569      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0570      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
0571 /**
0572   \brief   Signed Saturate
0573   \details Saturates a signed value.
0574   \param [in]  ARG1  Value to be saturated
0575   \param [in]  ARG2  Bit position to saturate to (1..32)
0576   \return             Saturated value
0577  */
0578 #define __SSAT(ARG1, ARG2) \
0579 __extension__ \
0580 ({                          \
0581   int32_t __RES, __ARG1 = (ARG1); \
0582   __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
0583   __RES; \
0584  })
0585 
0586 
0587 /**
0588   \brief   Unsigned Saturate
0589   \details Saturates an unsigned value.
0590   \param [in]  ARG1  Value to be saturated
0591   \param [in]  ARG2  Bit position to saturate to (0..31)
0592   \return             Saturated value
0593  */
0594 #define __USAT(ARG1, ARG2) \
0595 __extension__ \
0596 ({                          \
0597   uint32_t __RES, __ARG1 = (ARG1); \
0598   __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
0599   __RES; \
0600  })
0601 
0602 
0603 /**
0604   \brief   Rotate Right with Extend (32 bit)
0605   \details Moves each bit of a bitstring right by one bit.
0606            The carry input is shifted in at the left end of the bitstring.
0607   \param [in]    value  Value to rotate
0608   \return               Rotated value
0609  */
0610 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
0611 {
0612   uint32_t result;
0613 
0614   __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
0615   return(result);
0616 }
0617 
0618 
0619 /**
0620   \brief   LDRT Unprivileged (8 bit)
0621   \details Executes a Unprivileged LDRT instruction for 8 bit value.
0622   \param [in]    ptr  Pointer to data
0623   \return             value of type uint8_t at (*ptr)
0624  */
0625 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
0626 {
0627     uint32_t result;
0628 
0629 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0630    __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
0631 #else
0632     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
0633        accepted by assembler. So has to use following less efficient pattern.
0634     */
0635    __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
0636 #endif
0637    return ((uint8_t) result);    /* Add explicit type cast here */
0638 }
0639 
0640 
0641 /**
0642   \brief   LDRT Unprivileged (16 bit)
0643   \details Executes a Unprivileged LDRT instruction for 16 bit values.
0644   \param [in]    ptr  Pointer to data
0645   \return        value of type uint16_t at (*ptr)
0646  */
0647 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
0648 {
0649     uint32_t result;
0650 
0651 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
0652    __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
0653 #else
0654     /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
0655        accepted by assembler. So has to use following less efficient pattern.
0656     */
0657    __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
0658 #endif
0659    return ((uint16_t) result);    /* Add explicit type cast here */
0660 }
0661 
0662 
0663 /**
0664   \brief   LDRT Unprivileged (32 bit)
0665   \details Executes a Unprivileged LDRT instruction for 32 bit values.
0666   \param [in]    ptr  Pointer to data
0667   \return        value of type uint32_t at (*ptr)
0668  */
0669 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
0670 {
0671     uint32_t result;
0672 
0673    __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
0674    return(result);
0675 }
0676 
0677 
0678 /**
0679   \brief   STRT Unprivileged (8 bit)
0680   \details Executes a Unprivileged STRT instruction for 8 bit values.
0681   \param [in]  value  Value to store
0682   \param [in]    ptr  Pointer to location
0683  */
0684 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
0685 {
0686    __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
0687 }
0688 
0689 
0690 /**
0691   \brief   STRT Unprivileged (16 bit)
0692   \details Executes a Unprivileged STRT instruction for 16 bit values.
0693   \param [in]  value  Value to store
0694   \param [in]    ptr  Pointer to location
0695  */
0696 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
0697 {
0698    __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
0699 }
0700 
0701 
0702 /**
0703   \brief   STRT Unprivileged (32 bit)
0704   \details Executes a Unprivileged STRT instruction for 32 bit values.
0705   \param [in]  value  Value to store
0706   \param [in]    ptr  Pointer to location
0707  */
0708 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
0709 {
0710    __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
0711 }
0712 
0713 #else  /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0714            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0715            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
0716 
0717 /**
0718   \brief   Signed Saturate
0719   \details Saturates a signed value.
0720   \param [in]  value  Value to be saturated
0721   \param [in]    sat  Bit position to saturate to (1..32)
0722   \return             Saturated value
0723  */
0724 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
0725 {
0726   if ((sat >= 1U) && (sat <= 32U))
0727   {
0728     const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
0729     const int32_t min = -1 - max ;
0730     if (val > max)
0731     {
0732       return max;
0733     }
0734     else if (val < min)
0735     {
0736       return min;
0737     }
0738   }
0739   return val;
0740 }
0741 
0742 /**
0743   \brief   Unsigned Saturate
0744   \details Saturates an unsigned value.
0745   \param [in]  value  Value to be saturated
0746   \param [in]    sat  Bit position to saturate to (0..31)
0747   \return             Saturated value
0748  */
0749 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
0750 {
0751   if (sat <= 31U)
0752   {
0753     const uint32_t max = ((1U << sat) - 1U);
0754     if (val > (int32_t)max)
0755     {
0756       return max;
0757     }
0758     else if (val < 0)
0759     {
0760       return 0U;
0761     }
0762   }
0763   return (uint32_t)val;
0764 }
0765 
0766 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
0767            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
0768            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
0769 
0770 
0771 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0772      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
0773 /**
0774   \brief   Load-Acquire (8 bit)
0775   \details Executes a LDAB instruction for 8 bit value.
0776   \param [in]    ptr  Pointer to data
0777   \return             value of type uint8_t at (*ptr)
0778  */
0779 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
0780 {
0781     uint32_t result;
0782 
0783    __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0784    return ((uint8_t) result);
0785 }
0786 
0787 
0788 /**
0789   \brief   Load-Acquire (16 bit)
0790   \details Executes a LDAH instruction for 16 bit values.
0791   \param [in]    ptr  Pointer to data
0792   \return        value of type uint16_t at (*ptr)
0793  */
0794 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
0795 {
0796     uint32_t result;
0797 
0798    __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0799    return ((uint16_t) result);
0800 }
0801 
0802 
0803 /**
0804   \brief   Load-Acquire (32 bit)
0805   \details Executes a LDA instruction for 32 bit values.
0806   \param [in]    ptr  Pointer to data
0807   \return        value of type uint32_t at (*ptr)
0808  */
0809 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
0810 {
0811     uint32_t result;
0812 
0813    __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0814    return(result);
0815 }
0816 
0817 
0818 /**
0819   \brief   Store-Release (8 bit)
0820   \details Executes a STLB instruction for 8 bit values.
0821   \param [in]  value  Value to store
0822   \param [in]    ptr  Pointer to location
0823  */
0824 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
0825 {
0826    __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0827 }
0828 
0829 
0830 /**
0831   \brief   Store-Release (16 bit)
0832   \details Executes a STLH instruction for 16 bit values.
0833   \param [in]  value  Value to store
0834   \param [in]    ptr  Pointer to location
0835  */
0836 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
0837 {
0838    __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0839 }
0840 
0841 
0842 /**
0843   \brief   Store-Release (32 bit)
0844   \details Executes a STL instruction for 32 bit values.
0845   \param [in]  value  Value to store
0846   \param [in]    ptr  Pointer to location
0847  */
0848 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
0849 {
0850    __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0851 }
0852 
0853 
0854 /**
0855   \brief   Load-Acquire Exclusive (8 bit)
0856   \details Executes a LDAB exclusive instruction for 8 bit value.
0857   \param [in]    ptr  Pointer to data
0858   \return             value of type uint8_t at (*ptr)
0859  */
0860 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
0861 {
0862     uint32_t result;
0863 
0864    __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0865    return ((uint8_t) result);
0866 }
0867 
0868 
0869 /**
0870   \brief   Load-Acquire Exclusive (16 bit)
0871   \details Executes a LDAH exclusive instruction for 16 bit values.
0872   \param [in]    ptr  Pointer to data
0873   \return        value of type uint16_t at (*ptr)
0874  */
0875 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
0876 {
0877     uint32_t result;
0878 
0879    __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0880    return ((uint16_t) result);
0881 }
0882 
0883 
0884 /**
0885   \brief   Load-Acquire Exclusive (32 bit)
0886   \details Executes a LDA exclusive instruction for 32 bit values.
0887   \param [in]    ptr  Pointer to data
0888   \return        value of type uint32_t at (*ptr)
0889  */
0890 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
0891 {
0892     uint32_t result;
0893 
0894    __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" );
0895    return(result);
0896 }
0897 
0898 
0899 /**
0900   \brief   Store-Release Exclusive (8 bit)
0901   \details Executes a STLB exclusive instruction for 8 bit values.
0902   \param [in]  value  Value to store
0903   \param [in]    ptr  Pointer to location
0904   \return          0  Function succeeded
0905   \return          1  Function failed
0906  */
0907 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
0908 {
0909    uint32_t result;
0910 
0911    __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0912    return(result);
0913 }
0914 
0915 
0916 /**
0917   \brief   Store-Release Exclusive (16 bit)
0918   \details Executes a STLH exclusive instruction for 16 bit values.
0919   \param [in]  value  Value to store
0920   \param [in]    ptr  Pointer to location
0921   \return          0  Function succeeded
0922   \return          1  Function failed
0923  */
0924 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
0925 {
0926    uint32_t result;
0927 
0928    __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0929    return(result);
0930 }
0931 
0932 
0933 /**
0934   \brief   Store-Release Exclusive (32 bit)
0935   \details Executes a STL exclusive instruction for 32 bit values.
0936   \param [in]  value  Value to store
0937   \param [in]    ptr  Pointer to location
0938   \return          0  Function succeeded
0939   \return          1  Function failed
0940  */
0941 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
0942 {
0943    uint32_t result;
0944 
0945    __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" );
0946    return(result);
0947 }
0948 
0949 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0950            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
0951 
0952 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
0953 
0954 
0955 /* ###########################  Core Function Access  ########################### */
0956 /** \ingroup  CMSIS_Core_FunctionInterface
0957     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
0958   @{
0959  */
0960 
0961 /**
0962   \brief   Enable IRQ Interrupts
0963   \details Enables IRQ interrupts by clearing special-purpose register PRIMASK.
0964            Can only be executed in Privileged modes.
0965  */
0966 __STATIC_FORCEINLINE void __enable_irq(void)
0967 {
0968   __ASM volatile ("cpsie i" : : : "memory");
0969 }
0970 
0971 
0972 /**
0973   \brief   Disable IRQ Interrupts
0974   \details Disables IRQ interrupts by setting special-purpose register PRIMASK.
0975            Can only be executed in Privileged modes.
0976  */
0977 __STATIC_FORCEINLINE void __disable_irq(void)
0978 {
0979   __ASM volatile ("cpsid i" : : : "memory");
0980 }
0981 
0982 
0983 /**
0984   \brief   Get Control Register
0985   \details Returns the content of the Control Register.
0986   \return               Control Register value
0987  */
0988 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
0989 {
0990   uint32_t result;
0991 
0992   __ASM volatile ("MRS %0, control" : "=r" (result) );
0993   return(result);
0994 }
0995 
0996 
0997 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
0998 /**
0999   \brief   Get Control Register (non-secure)
1000   \details Returns the content of the non-secure Control Register when in secure mode.
1001   \return               non-secure Control Register value
1002  */
1003 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
1004 {
1005   uint32_t result;
1006 
1007   __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
1008   return(result);
1009 }
1010 #endif
1011 
1012 
1013 /**
1014   \brief   Set Control Register
1015   \details Writes the given value to the Control Register.
1016   \param [in]    control  Control Register value to set
1017  */
1018 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
1019 {
1020   __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
1021   __ISB();
1022 }
1023 
1024 
1025 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1026 /**
1027   \brief   Set Control Register (non-secure)
1028   \details Writes the given value to the non-secure Control Register when in secure state.
1029   \param [in]    control  Control Register value to set
1030  */
1031 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
1032 {
1033   __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
1034   __ISB();
1035 }
1036 #endif
1037 
1038 
1039 /**
1040   \brief   Get IPSR Register
1041   \details Returns the content of the IPSR Register.
1042   \return               IPSR Register value
1043  */
1044 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
1045 {
1046   uint32_t result;
1047 
1048   __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
1049   return(result);
1050 }
1051 
1052 
1053 /**
1054   \brief   Get APSR Register
1055   \details Returns the content of the APSR Register.
1056   \return               APSR Register value
1057  */
1058 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
1059 {
1060   uint32_t result;
1061 
1062   __ASM volatile ("MRS %0, apsr" : "=r" (result) );
1063   return(result);
1064 }
1065 
1066 
1067 /**
1068   \brief   Get xPSR Register
1069   \details Returns the content of the xPSR Register.
1070   \return               xPSR Register value
1071  */
1072 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
1073 {
1074   uint32_t result;
1075 
1076   __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
1077   return(result);
1078 }
1079 
1080 
1081 /**
1082   \brief   Get Process Stack Pointer
1083   \details Returns the current value of the Process Stack Pointer (PSP).
1084   \return               PSP Register value
1085  */
1086 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
1087 {
1088   uint32_t result;
1089 
1090   __ASM volatile ("MRS %0, psp"  : "=r" (result) );
1091   return(result);
1092 }
1093 
1094 
1095 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1096 /**
1097   \brief   Get Process Stack Pointer (non-secure)
1098   \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
1099   \return               PSP Register value
1100  */
1101 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
1102 {
1103   uint32_t result;
1104 
1105   __ASM volatile ("MRS %0, psp_ns"  : "=r" (result) );
1106   return(result);
1107 }
1108 #endif
1109 
1110 
1111 /**
1112   \brief   Set Process Stack Pointer
1113   \details Assigns the given value to the Process Stack Pointer (PSP).
1114   \param [in]    topOfProcStack  Process Stack Pointer value to set
1115  */
1116 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
1117 {
1118   __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
1119 }
1120 
1121 
1122 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1123 /**
1124   \brief   Set Process Stack Pointer (non-secure)
1125   \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
1126   \param [in]    topOfProcStack  Process Stack Pointer value to set
1127  */
1128 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
1129 {
1130   __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
1131 }
1132 #endif
1133 
1134 
1135 /**
1136   \brief   Get Main Stack Pointer
1137   \details Returns the current value of the Main Stack Pointer (MSP).
1138   \return               MSP Register value
1139  */
1140 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
1141 {
1142   uint32_t result;
1143 
1144   __ASM volatile ("MRS %0, msp" : "=r" (result) );
1145   return(result);
1146 }
1147 
1148 
1149 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1150 /**
1151   \brief   Get Main Stack Pointer (non-secure)
1152   \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
1153   \return               MSP Register value
1154  */
1155 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
1156 {
1157   uint32_t result;
1158 
1159   __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
1160   return(result);
1161 }
1162 #endif
1163 
1164 
1165 /**
1166   \brief   Set Main Stack Pointer
1167   \details Assigns the given value to the Main Stack Pointer (MSP).
1168   \param [in]    topOfMainStack  Main Stack Pointer value to set
1169  */
1170 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
1171 {
1172   __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
1173 }
1174 
1175 
1176 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1177 /**
1178   \brief   Set Main Stack Pointer (non-secure)
1179   \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
1180   \param [in]    topOfMainStack  Main Stack Pointer value to set
1181  */
1182 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
1183 {
1184   __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
1185 }
1186 #endif
1187 
1188 
1189 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1190 /**
1191   \brief   Get Stack Pointer (non-secure)
1192   \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
1193   \return               SP Register value
1194  */
1195 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
1196 {
1197   uint32_t result;
1198 
1199   __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
1200   return(result);
1201 }
1202 
1203 
1204 /**
1205   \brief   Set Stack Pointer (non-secure)
1206   \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
1207   \param [in]    topOfStack  Stack Pointer value to set
1208  */
1209 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
1210 {
1211   __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
1212 }
1213 #endif
1214 
1215 
1216 /**
1217   \brief   Get Priority Mask
1218   \details Returns the current state of the priority mask bit from the Priority Mask Register.
1219   \return               Priority Mask value
1220  */
1221 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
1222 {
1223   uint32_t result;
1224 
1225   __ASM volatile ("MRS %0, primask" : "=r" (result) );
1226   return(result);
1227 }
1228 
1229 
1230 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1231 /**
1232   \brief   Get Priority Mask (non-secure)
1233   \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
1234   \return               Priority Mask value
1235  */
1236 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
1237 {
1238   uint32_t result;
1239 
1240   __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
1241   return(result);
1242 }
1243 #endif
1244 
1245 
1246 /**
1247   \brief   Set Priority Mask
1248   \details Assigns the given value to the Priority Mask Register.
1249   \param [in]    priMask  Priority Mask
1250  */
1251 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
1252 {
1253   __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
1254 }
1255 
1256 
1257 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1258 /**
1259   \brief   Set Priority Mask (non-secure)
1260   \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
1261   \param [in]    priMask  Priority Mask
1262  */
1263 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
1264 {
1265   __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
1266 }
1267 #endif
1268 
1269 
1270 #if ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1271      (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1272      (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    )
1273 /**
1274   \brief   Enable FIQ
1275   \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK.
1276            Can only be executed in Privileged modes.
1277  */
1278 __STATIC_FORCEINLINE void __enable_fault_irq(void)
1279 {
1280   __ASM volatile ("cpsie f" : : : "memory");
1281 }
1282 
1283 
1284 /**
1285   \brief   Disable FIQ
1286   \details Disables FIQ interrupts by setting special-purpose register FAULTMASK.
1287            Can only be executed in Privileged modes.
1288  */
1289 __STATIC_FORCEINLINE void __disable_fault_irq(void)
1290 {
1291   __ASM volatile ("cpsid f" : : : "memory");
1292 }
1293 
1294 
1295 /**
1296   \brief   Get Base Priority
1297   \details Returns the current value of the Base Priority register.
1298   \return               Base Priority register value
1299  */
1300 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
1301 {
1302   uint32_t result;
1303 
1304   __ASM volatile ("MRS %0, basepri" : "=r" (result) );
1305   return(result);
1306 }
1307 
1308 
1309 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1310 /**
1311   \brief   Get Base Priority (non-secure)
1312   \details Returns the current value of the non-secure Base Priority register when in secure state.
1313   \return               Base Priority register value
1314  */
1315 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
1316 {
1317   uint32_t result;
1318 
1319   __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
1320   return(result);
1321 }
1322 #endif
1323 
1324 
1325 /**
1326   \brief   Set Base Priority
1327   \details Assigns the given value to the Base Priority register.
1328   \param [in]    basePri  Base Priority value to set
1329  */
1330 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
1331 {
1332   __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
1333 }
1334 
1335 
1336 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1337 /**
1338   \brief   Set Base Priority (non-secure)
1339   \details Assigns the given value to the non-secure Base Priority register when in secure state.
1340   \param [in]    basePri  Base Priority value to set
1341  */
1342 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
1343 {
1344   __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
1345 }
1346 #endif
1347 
1348 
1349 /**
1350   \brief   Set Base Priority with condition
1351   \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
1352            or the new value increases the BASEPRI priority level.
1353   \param [in]    basePri  Base Priority value to set
1354  */
1355 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
1356 {
1357   __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
1358 }
1359 
1360 
1361 /**
1362   \brief   Get Fault Mask
1363   \details Returns the current value of the Fault Mask register.
1364   \return               Fault Mask register value
1365  */
1366 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
1367 {
1368   uint32_t result;
1369 
1370   __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
1371   return(result);
1372 }
1373 
1374 
1375 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1376 /**
1377   \brief   Get Fault Mask (non-secure)
1378   \details Returns the current value of the non-secure Fault Mask register when in secure state.
1379   \return               Fault Mask register value
1380  */
1381 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
1382 {
1383   uint32_t result;
1384 
1385   __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
1386   return(result);
1387 }
1388 #endif
1389 
1390 
1391 /**
1392   \brief   Set Fault Mask
1393   \details Assigns the given value to the Fault Mask register.
1394   \param [in]    faultMask  Fault Mask value to set
1395  */
1396 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
1397 {
1398   __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
1399 }
1400 
1401 
1402 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
1403 /**
1404   \brief   Set Fault Mask (non-secure)
1405   \details Assigns the given value to the non-secure Fault Mask register when in secure state.
1406   \param [in]    faultMask  Fault Mask value to set
1407  */
1408 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
1409 {
1410   __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
1411 }
1412 #endif
1413 
1414 #endif /* ((defined (__ARM_ARCH_7M__      ) && (__ARM_ARCH_7M__      == 1)) || \
1415            (defined (__ARM_ARCH_7EM__     ) && (__ARM_ARCH_7EM__     == 1)) || \
1416            (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))    ) */
1417 
1418 
1419 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1420      (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
1421 
1422 /**
1423   \brief   Get Process Stack Pointer Limit
1424   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1425   Stack Pointer Limit register hence zero is returned always in non-secure
1426   mode.
1427 
1428   \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
1429   \return               PSPLIM Register value
1430  */
1431 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
1432 {
1433 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1434     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1435     // without main extensions, the non-secure PSPLIM is RAZ/WI
1436   return 0U;
1437 #else
1438   uint32_t result;
1439   __ASM volatile ("MRS %0, psplim"  : "=r" (result) );
1440   return result;
1441 #endif
1442 }
1443 
1444 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
1445 /**
1446   \brief   Get Process Stack Pointer Limit (non-secure)
1447   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1448   Stack Pointer Limit register hence zero is returned always.
1449 
1450   \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1451   \return               PSPLIM Register value
1452  */
1453 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
1454 {
1455 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1456   // without main extensions, the non-secure PSPLIM is RAZ/WI
1457   return 0U;
1458 #else
1459   uint32_t result;
1460   __ASM volatile ("MRS %0, psplim_ns"  : "=r" (result) );
1461   return result;
1462 #endif
1463 }
1464 #endif
1465 
1466 
1467 /**
1468   \brief   Set Process Stack Pointer Limit
1469   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1470   Stack Pointer Limit register hence the write is silently ignored in non-secure
1471   mode.
1472 
1473   \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
1474   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1475  */
1476 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
1477 {
1478 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1479     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1480   // without main extensions, the non-secure PSPLIM is RAZ/WI
1481   (void)ProcStackPtrLimit;
1482 #else
1483   __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
1484 #endif
1485 }
1486 
1487 
1488 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1489 /**
1490   \brief   Set Process Stack Pointer (non-secure)
1491   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1492   Stack Pointer Limit register hence the write is silently ignored.
1493 
1494   \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
1495   \param [in]    ProcStackPtrLimit  Process Stack Pointer Limit value to set
1496  */
1497 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
1498 {
1499 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1500   // without main extensions, the non-secure PSPLIM is RAZ/WI
1501   (void)ProcStackPtrLimit;
1502 #else
1503   __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
1504 #endif
1505 }
1506 #endif
1507 
1508 
1509 /**
1510   \brief   Get Main Stack Pointer Limit
1511   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1512   Stack Pointer Limit register hence zero is returned always in non-secure
1513   mode.
1514 
1515   \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
1516   \return               MSPLIM Register value
1517  */
1518 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
1519 {
1520 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1521     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1522   // without main extensions, the non-secure MSPLIM is RAZ/WI
1523   return 0U;
1524 #else
1525   uint32_t result;
1526   __ASM volatile ("MRS %0, msplim" : "=r" (result) );
1527   return result;
1528 #endif
1529 }
1530 
1531 
1532 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1533 /**
1534   \brief   Get Main Stack Pointer Limit (non-secure)
1535   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1536   Stack Pointer Limit register hence zero is returned always.
1537 
1538   \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
1539   \return               MSPLIM Register value
1540  */
1541 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
1542 {
1543 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1544   // without main extensions, the non-secure MSPLIM is RAZ/WI
1545   return 0U;
1546 #else
1547   uint32_t result;
1548   __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
1549   return result;
1550 #endif
1551 }
1552 #endif
1553 
1554 
1555 /**
1556   \brief   Set Main Stack Pointer Limit
1557   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1558   Stack Pointer Limit register hence the write is silently ignored in non-secure
1559   mode.
1560 
1561   \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
1562   \param [in]    MainStackPtrLimit  Main Stack Pointer Limit value to set
1563  */
1564 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
1565 {
1566 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
1567     (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
1568   // without main extensions, the non-secure MSPLIM is RAZ/WI
1569   (void)MainStackPtrLimit;
1570 #else
1571   __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
1572 #endif
1573 }
1574 
1575 
1576 #if (defined (__ARM_FEATURE_CMSE  ) && (__ARM_FEATURE_CMSE   == 3))
1577 /**
1578   \brief   Set Main Stack Pointer Limit (non-secure)
1579   Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
1580   Stack Pointer Limit register hence the write is silently ignored.
1581 
1582   \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
1583   \param [in]    MainStackPtrLimit  Main Stack Pointer value to set
1584  */
1585 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
1586 {
1587 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
1588   // without main extensions, the non-secure MSPLIM is RAZ/WI
1589   (void)MainStackPtrLimit;
1590 #else
1591   __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
1592 #endif
1593 }
1594 #endif
1595 
1596 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
1597            (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    ) */
1598 
1599 
1600 /**
1601   \brief   Get FPSCR
1602   \details Returns the current value of the Floating Point Status/Control register.
1603   \return               Floating Point Status/Control register value
1604  */
1605 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
1606 {
1607 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1608      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
1609 #if __has_builtin(__builtin_arm_get_fpscr)
1610 // Re-enable using built-in when GCC has been fixed
1611 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1612   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1613   return __builtin_arm_get_fpscr();
1614 #else
1615   uint32_t result;
1616 
1617   __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
1618   return(result);
1619 #endif
1620 #else
1621   return(0U);
1622 #endif
1623 }
1624 
1625 
1626 /**
1627   \brief   Set FPSCR
1628   \details Assigns the given value to the Floating Point Status/Control register.
1629   \param [in]    fpscr  Floating Point Status/Control value to set
1630  */
1631 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
1632 {
1633 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
1634      (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
1635 #if __has_builtin(__builtin_arm_set_fpscr)
1636 // Re-enable using built-in when GCC has been fixed
1637 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
1638   /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
1639   __builtin_arm_set_fpscr(fpscr);
1640 #else
1641   __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
1642 #endif
1643 #else
1644   (void)fpscr;
1645 #endif
1646 }
1647 
1648 
1649 /*@} end of CMSIS_Core_RegAccFunctions */
1650 
1651 
1652 /* ###################  Compiler specific Intrinsics  ########################### */
1653 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
1654   Access to dedicated SIMD instructions
1655   @{
1656 */
1657 
1658 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
1659 
1660 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1661 {
1662   uint32_t result;
1663 
1664   __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1665   return(result);
1666 }
1667 
1668 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1669 {
1670   uint32_t result;
1671 
1672   __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1673   return(result);
1674 }
1675 
1676 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1677 {
1678   uint32_t result;
1679 
1680   __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1681   return(result);
1682 }
1683 
1684 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1685 {
1686   uint32_t result;
1687 
1688   __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1689   return(result);
1690 }
1691 
1692 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1693 {
1694   uint32_t result;
1695 
1696   __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1697   return(result);
1698 }
1699 
1700 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1701 {
1702   uint32_t result;
1703 
1704   __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1705   return(result);
1706 }
1707 
1708 
1709 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1710 {
1711   uint32_t result;
1712 
1713   __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1714   return(result);
1715 }
1716 
1717 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1718 {
1719   uint32_t result;
1720 
1721   __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1722   return(result);
1723 }
1724 
1725 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1726 {
1727   uint32_t result;
1728 
1729   __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1730   return(result);
1731 }
1732 
1733 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1734 {
1735   uint32_t result;
1736 
1737   __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1738   return(result);
1739 }
1740 
1741 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1742 {
1743   uint32_t result;
1744 
1745   __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1746   return(result);
1747 }
1748 
1749 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1750 {
1751   uint32_t result;
1752 
1753   __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1754   return(result);
1755 }
1756 
1757 
1758 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1759 {
1760   uint32_t result;
1761 
1762   __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1763   return(result);
1764 }
1765 
1766 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1767 {
1768   uint32_t result;
1769 
1770   __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1771   return(result);
1772 }
1773 
1774 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1775 {
1776   uint32_t result;
1777 
1778   __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1779   return(result);
1780 }
1781 
1782 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1783 {
1784   uint32_t result;
1785 
1786   __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1787   return(result);
1788 }
1789 
1790 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1791 {
1792   uint32_t result;
1793 
1794   __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1795   return(result);
1796 }
1797 
1798 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1799 {
1800   uint32_t result;
1801 
1802   __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1803   return(result);
1804 }
1805 
1806 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1807 {
1808   uint32_t result;
1809 
1810   __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1811   return(result);
1812 }
1813 
1814 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1815 {
1816   uint32_t result;
1817 
1818   __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1819   return(result);
1820 }
1821 
1822 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1823 {
1824   uint32_t result;
1825 
1826   __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1827   return(result);
1828 }
1829 
1830 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1831 {
1832   uint32_t result;
1833 
1834   __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1835   return(result);
1836 }
1837 
1838 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1839 {
1840   uint32_t result;
1841 
1842   __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1843   return(result);
1844 }
1845 
1846 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1847 {
1848   uint32_t result;
1849 
1850   __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1851   return(result);
1852 }
1853 
1854 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1855 {
1856   uint32_t result;
1857 
1858   __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1859   return(result);
1860 }
1861 
1862 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1863 {
1864   uint32_t result;
1865 
1866   __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1867   return(result);
1868 }
1869 
1870 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1871 {
1872   uint32_t result;
1873 
1874   __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1875   return(result);
1876 }
1877 
1878 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1879 {
1880   uint32_t result;
1881 
1882   __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1883   return(result);
1884 }
1885 
1886 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1887 {
1888   uint32_t result;
1889 
1890   __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1891   return(result);
1892 }
1893 
1894 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1895 {
1896   uint32_t result;
1897 
1898   __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1899   return(result);
1900 }
1901 
1902 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1903 {
1904   uint32_t result;
1905 
1906   __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1907   return(result);
1908 }
1909 
1910 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1911 {
1912   uint32_t result;
1913 
1914   __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1915   return(result);
1916 }
1917 
1918 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1919 {
1920   uint32_t result;
1921 
1922   __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1923   return(result);
1924 }
1925 
1926 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1927 {
1928   uint32_t result;
1929 
1930   __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1931   return(result);
1932 }
1933 
1934 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1935 {
1936   uint32_t result;
1937 
1938   __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1939   return(result);
1940 }
1941 
1942 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1943 {
1944   uint32_t result;
1945 
1946   __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1947   return(result);
1948 }
1949 
1950 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1951 {
1952   uint32_t result;
1953 
1954   __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1955   return(result);
1956 }
1957 
1958 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1959 {
1960   uint32_t result;
1961 
1962   __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1963   return(result);
1964 }
1965 
1966 #define __SSAT16(ARG1, ARG2) \
1967 __extension__ \
1968 ({                          \
1969   int32_t __RES, __ARG1 = (ARG1); \
1970   __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
1971   __RES; \
1972  })
1973 
1974 #define __USAT16(ARG1, ARG2) \
1975 __extension__ \
1976 ({                          \
1977   uint32_t __RES, __ARG1 = (ARG1); \
1978   __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) :  "I" (ARG2), "r" (__ARG1) : "cc" ); \
1979   __RES; \
1980  })
1981 
1982 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
1983 {
1984   uint32_t result;
1985 
1986   __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1987   return(result);
1988 }
1989 
1990 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1991 {
1992   uint32_t result;
1993 
1994   __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1995   return(result);
1996 }
1997 
1998 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
1999 {
2000   uint32_t result;
2001 
2002   __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
2003   return(result);
2004 }
2005 
2006 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate)
2007 {
2008   uint32_t result;
2009   if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2010     __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) );
2011   } else {
2012     result = __SXTB16(__ROR(op1, rotate)) ;
2013   }
2014   return result;
2015 }
2016 
2017 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
2018 {
2019   uint32_t result;
2020 
2021   __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2022   return(result);
2023 }
2024 
2025 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate)
2026 {
2027   uint32_t result;
2028   if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) {
2029     __ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate));
2030   } else {
2031     result = __SXTAB16(op1, __ROR(op2, rotate));
2032   }
2033   return result;
2034 }
2035 
2036 
2037 __STATIC_FORCEINLINE uint32_t __SMUAD  (uint32_t op1, uint32_t op2)
2038 {
2039   uint32_t result;
2040 
2041   __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2042   return(result);
2043 }
2044 
2045 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
2046 {
2047   uint32_t result;
2048 
2049   __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2050   return(result);
2051 }
2052 
2053 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
2054 {
2055   uint32_t result;
2056 
2057   __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2058   return(result);
2059 }
2060 
2061 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
2062 {
2063   uint32_t result;
2064 
2065   __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2066   return(result);
2067 }
2068 
2069 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
2070 {
2071   union llreg_u{
2072     uint32_t w32[2];
2073     uint64_t w64;
2074   } llr;
2075   llr.w64 = acc;
2076 
2077 #ifndef __ARMEB__   /* Little endian */
2078   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2079 #else               /* Big endian */
2080   __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2081 #endif
2082 
2083   return(llr.w64);
2084 }
2085 
2086 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
2087 {
2088   union llreg_u{
2089     uint32_t w32[2];
2090     uint64_t w64;
2091   } llr;
2092   llr.w64 = acc;
2093 
2094 #ifndef __ARMEB__   /* Little endian */
2095   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2096 #else               /* Big endian */
2097   __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2098 #endif
2099 
2100   return(llr.w64);
2101 }
2102 
2103 __STATIC_FORCEINLINE uint32_t __SMUSD  (uint32_t op1, uint32_t op2)
2104 {
2105   uint32_t result;
2106 
2107   __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2108   return(result);
2109 }
2110 
2111 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
2112 {
2113   uint32_t result;
2114 
2115   __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2116   return(result);
2117 }
2118 
2119 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
2120 {
2121   uint32_t result;
2122 
2123   __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2124   return(result);
2125 }
2126 
2127 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
2128 {
2129   uint32_t result;
2130 
2131   __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
2132   return(result);
2133 }
2134 
2135 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
2136 {
2137   union llreg_u{
2138     uint32_t w32[2];
2139     uint64_t w64;
2140   } llr;
2141   llr.w64 = acc;
2142 
2143 #ifndef __ARMEB__   /* Little endian */
2144   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2145 #else               /* Big endian */
2146   __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2147 #endif
2148 
2149   return(llr.w64);
2150 }
2151 
2152 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
2153 {
2154   union llreg_u{
2155     uint32_t w32[2];
2156     uint64_t w64;
2157   } llr;
2158   llr.w64 = acc;
2159 
2160 #ifndef __ARMEB__   /* Little endian */
2161   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
2162 #else               /* Big endian */
2163   __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
2164 #endif
2165 
2166   return(llr.w64);
2167 }
2168 
2169 __STATIC_FORCEINLINE uint32_t __SEL  (uint32_t op1, uint32_t op2)
2170 {
2171   uint32_t result;
2172 
2173   __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2174   return(result);
2175 }
2176 
2177 __STATIC_FORCEINLINE  int32_t __QADD( int32_t op1,  int32_t op2)
2178 {
2179   int32_t result;
2180 
2181   __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2182   return(result);
2183 }
2184 
2185 __STATIC_FORCEINLINE  int32_t __QSUB( int32_t op1,  int32_t op2)
2186 {
2187   int32_t result;
2188 
2189   __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
2190   return(result);
2191 }
2192 
2193 
2194 #define __PKHBT(ARG1,ARG2,ARG3) \
2195 __extension__ \
2196 ({                          \
2197   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2198   __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2199   __RES; \
2200  })
2201 
2202 #define __PKHTB(ARG1,ARG2,ARG3) \
2203 __extension__ \
2204 ({                          \
2205   uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
2206   if (ARG3 == 0) \
2207     __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2)  ); \
2208   else \
2209     __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) :  "r" (__ARG1), "r" (__ARG2), "I" (ARG3)  ); \
2210   __RES; \
2211  })
2212 
2213 
2214 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
2215 {
2216  int32_t result;
2217 
2218  __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r"  (op1), "r" (op2), "r" (op3) );
2219  return(result);
2220 }
2221 
2222 #endif /* (__ARM_FEATURE_DSP == 1) */
2223 /*@} end of group CMSIS_SIMD_intrinsics */
2224 
2225 
2226 #pragma GCC diagnostic pop
2227 
2228 #endif /* __CMSIS_GCC_H */