File indexing completed on 2025-05-11 08:22:58
0001
0002
0003
0004
0005
0006
0007
0008
0009 #ifndef _FSL_COMMON_ARM_H_
0010 #define _FSL_COMMON_ARM_H_
0011
0012
0013
0014
0015
0016
0017 #ifdef _RTE_
0018 #include "RTE_Components.h"
0019 #endif
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063 #if ((defined(__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
0064 (defined(__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
0065 (defined(__ARM_ARCH_8M_MAIN__) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
0066 (defined(__ARM_ARCH_8M_BASE__) && (__ARM_ARCH_8M_BASE__ == 1)))
0067
0068
0069
0070 #define _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, val, ops) \
0071 do \
0072 { \
0073 (val) = __LDREXB(addr); \
0074 (ops); \
0075 } while (0UL != __STREXB((val), (addr)))
0076
0077 #define _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, val, ops) \
0078 do \
0079 { \
0080 (val) = __LDREXH(addr); \
0081 (ops); \
0082 } while (0UL != __STREXH((val), (addr)))
0083
0084 #define _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, val, ops) \
0085 do \
0086 { \
0087 (val) = __LDREXW(addr); \
0088 (ops); \
0089 } while (0UL != __STREXW((val), (addr)))
0090
0091 static inline void _SDK_AtomicLocalAdd1Byte(volatile uint8_t *addr, uint8_t val)
0092 {
0093 uint8_t s_val;
0094
0095 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val += val);
0096 }
0097
0098 static inline void _SDK_AtomicLocalAdd2Byte(volatile uint16_t *addr, uint16_t val)
0099 {
0100 uint16_t s_val;
0101
0102 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val += val);
0103 }
0104
0105 static inline void _SDK_AtomicLocalAdd4Byte(volatile uint32_t *addr, uint32_t val)
0106 {
0107 uint32_t s_val;
0108
0109 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val += val);
0110 }
0111
0112 static inline void _SDK_AtomicLocalSub1Byte(volatile uint8_t *addr, uint8_t val)
0113 {
0114 uint8_t s_val;
0115
0116 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val -= val);
0117 }
0118
0119 static inline void _SDK_AtomicLocalSub2Byte(volatile uint16_t *addr, uint16_t val)
0120 {
0121 uint16_t s_val;
0122
0123 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val -= val);
0124 }
0125
0126 static inline void _SDK_AtomicLocalSub4Byte(volatile uint32_t *addr, uint32_t val)
0127 {
0128 uint32_t s_val;
0129
0130 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val -= val);
0131 }
0132
0133 static inline void _SDK_AtomicLocalSet1Byte(volatile uint8_t *addr, uint8_t bits)
0134 {
0135 uint8_t s_val;
0136
0137 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val |= bits);
0138 }
0139
0140 static inline void _SDK_AtomicLocalSet2Byte(volatile uint16_t *addr, uint16_t bits)
0141 {
0142 uint16_t s_val;
0143
0144 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val |= bits);
0145 }
0146
0147 static inline void _SDK_AtomicLocalSet4Byte(volatile uint32_t *addr, uint32_t bits)
0148 {
0149 uint32_t s_val;
0150
0151 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val |= bits);
0152 }
0153
0154 static inline void _SDK_AtomicLocalClear1Byte(volatile uint8_t *addr, uint8_t bits)
0155 {
0156 uint8_t s_val;
0157
0158 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val &= ~bits);
0159 }
0160
0161 static inline void _SDK_AtomicLocalClear2Byte(volatile uint16_t *addr, uint16_t bits)
0162 {
0163 uint16_t s_val;
0164
0165 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val &= ~bits);
0166 }
0167
0168 static inline void _SDK_AtomicLocalClear4Byte(volatile uint32_t *addr, uint32_t bits)
0169 {
0170 uint32_t s_val;
0171
0172 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val &= ~bits);
0173 }
0174
0175 static inline void _SDK_AtomicLocalToggle1Byte(volatile uint8_t *addr, uint8_t bits)
0176 {
0177 uint8_t s_val;
0178
0179 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val ^= bits);
0180 }
0181
0182 static inline void _SDK_AtomicLocalToggle2Byte(volatile uint16_t *addr, uint16_t bits)
0183 {
0184 uint16_t s_val;
0185
0186 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val ^= bits);
0187 }
0188
0189 static inline void _SDK_AtomicLocalToggle4Byte(volatile uint32_t *addr, uint32_t bits)
0190 {
0191 uint32_t s_val;
0192
0193 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val ^= bits);
0194 }
0195
0196 static inline void _SDK_AtomicLocalClearAndSet1Byte(volatile uint8_t *addr, uint8_t clearBits, uint8_t setBits)
0197 {
0198 uint8_t s_val;
0199
0200 _SDK_ATOMIC_LOCAL_OPS_1BYTE(addr, s_val, s_val = (s_val & ~clearBits) | setBits);
0201 }
0202
0203 static inline void _SDK_AtomicLocalClearAndSet2Byte(volatile uint16_t *addr, uint16_t clearBits, uint16_t setBits)
0204 {
0205 uint16_t s_val;
0206
0207 _SDK_ATOMIC_LOCAL_OPS_2BYTE(addr, s_val, s_val = (s_val & ~clearBits) | setBits);
0208 }
0209
0210 static inline void _SDK_AtomicLocalClearAndSet4Byte(volatile uint32_t *addr, uint32_t clearBits, uint32_t setBits)
0211 {
0212 uint32_t s_val;
0213
0214 _SDK_ATOMIC_LOCAL_OPS_4BYTE(addr, s_val, s_val = (s_val & ~clearBits) | setBits);
0215 }
0216
0217 #define SDK_ATOMIC_LOCAL_ADD(addr, val) \
0218 ((1UL == sizeof(*(addr))) ? \
0219 _SDK_AtomicLocalAdd1Byte((volatile uint8_t *)(volatile void *)(addr), (uint8_t)(val)) : \
0220 ((2UL == sizeof(*(addr))) ? _SDK_AtomicLocalAdd2Byte((volatile uint16_t *)(volatile void *)(addr), (uint16_t)(val)) : \
0221 _SDK_AtomicLocalAdd4Byte((volatile uint32_t *)(volatile void *)(addr), (uint32_t)(val))))
0222
0223 #define SDK_ATOMIC_LOCAL_SET(addr, bits) \
0224 ((1UL == sizeof(*(addr))) ? \
0225 _SDK_AtomicLocalSet1Byte((volatile uint8_t *)(volatile void *)(addr), (uint8_t)(bits)) : \
0226 ((2UL == sizeof(*(addr))) ? _SDK_AtomicLocalSet2Byte((volatile uint16_t *)(volatile void *)(addr), (uint16_t)(bits)) : \
0227 _SDK_AtomicLocalSet4Byte((volatile uint32_t *)(volatile void *)(addr), (uint32_t)(bits))))
0228
0229 #define SDK_ATOMIC_LOCAL_CLEAR(addr, bits) \
0230 ((1UL == sizeof(*(addr))) ? \
0231 _SDK_AtomicLocalClear1Byte((volatile uint8_t *)(volatile void *)(addr), (uint8_t)(bits)) : \
0232 ((2UL == sizeof(*(addr))) ? \
0233 _SDK_AtomicLocalClear2Byte((volatile uint16_t *)(volatile void *)(addr), (uint16_t)(bits)) : \
0234 _SDK_AtomicLocalClear4Byte((volatile uint32_t *)(volatile void *)(addr), (uint32_t)(bits))))
0235
0236 #define SDK_ATOMIC_LOCAL_TOGGLE(addr, bits) \
0237 ((1UL == sizeof(*(addr))) ? \
0238 _SDK_AtomicLocalToggle1Byte((volatile uint8_t *)(volatile void *)(addr), (uint8_t)(bits)) : \
0239 ((2UL == sizeof(*(addr))) ? \
0240 _SDK_AtomicLocalToggle2Byte((volatile uint16_t *)(volatile void *)(addr), (uint16_t)(bits)) : \
0241 _SDK_AtomicLocalToggle4Byte((volatile uint32_t *)(volatile void *)(addr), (uint32_t)(bits))))
0242
0243 #define SDK_ATOMIC_LOCAL_CLEAR_AND_SET(addr, clearBits, setBits) \
0244 ((1UL == sizeof(*(addr))) ? \
0245 _SDK_AtomicLocalClearAndSet1Byte((volatile uint8_t *)(volatile void *)(addr), (uint8_t)(clearBits), (uint8_t)(setBits)) : \
0246 ((2UL == sizeof(*(addr))) ? \
0247 _SDK_AtomicLocalClearAndSet2Byte((volatile uint16_t *)(volatile void *)(addr), (uint16_t)(clearBits), (uint16_t)(setBits)) : \
0248 _SDK_AtomicLocalClearAndSet4Byte((volatile uint32_t *)(volatile void *)(addr), (uint32_t)(clearBits), (uint32_t)(setBits))))
0249 #else
0250
0251 #define SDK_ATOMIC_LOCAL_ADD(addr, val) \
0252 do \
0253 { \
0254 uint32_t s_atomicOldInt; \
0255 s_atomicOldInt = DisableGlobalIRQ(); \
0256 *(addr) += (val); \
0257 EnableGlobalIRQ(s_atomicOldInt); \
0258 } while (0)
0259
0260 #define SDK_ATOMIC_LOCAL_SET(addr, bits) \
0261 do \
0262 { \
0263 uint32_t s_atomicOldInt; \
0264 s_atomicOldInt = DisableGlobalIRQ(); \
0265 *(addr) |= (bits); \
0266 EnableGlobalIRQ(s_atomicOldInt); \
0267 } while (0)
0268
0269 #define SDK_ATOMIC_LOCAL_CLEAR(addr, bits) \
0270 do \
0271 { \
0272 uint32_t s_atomicOldInt; \
0273 s_atomicOldInt = DisableGlobalIRQ(); \
0274 *(addr) &= ~(bits); \
0275 EnableGlobalIRQ(s_atomicOldInt); \
0276 } while (0)
0277
0278 #define SDK_ATOMIC_LOCAL_TOGGLE(addr, bits) \
0279 do \
0280 { \
0281 uint32_t s_atomicOldInt; \
0282 s_atomicOldInt = DisableGlobalIRQ(); \
0283 *(addr) ^= (bits); \
0284 EnableGlobalIRQ(s_atomicOldInt); \
0285 } while (0)
0286
0287 #define SDK_ATOMIC_LOCAL_CLEAR_AND_SET(addr, clearBits, setBits) \
0288 do \
0289 { \
0290 uint32_t s_atomicOldInt; \
0291 s_atomicOldInt = DisableGlobalIRQ(); \
0292 *(addr) = (*(addr) & ~(clearBits)) | (setBits); \
0293 EnableGlobalIRQ(s_atomicOldInt); \
0294 } while (0)
0295
0296 #endif
0297
0298
0299
0300
0301
0302 #define USEC_TO_COUNT(us, clockFreqInHz) (uint64_t)(((uint64_t)(us) * (clockFreqInHz)) / 1000000U)
0303
0304 #define COUNT_TO_USEC(count, clockFreqInHz) (uint64_t)((uint64_t)(count)*1000000U / (clockFreqInHz))
0305
0306
0307 #define MSEC_TO_COUNT(ms, clockFreqInHz) (uint64_t)((uint64_t)(ms) * (clockFreqInHz) / 1000U)
0308
0309 #define COUNT_TO_MSEC(count, clockFreqInHz) (uint64_t)((uint64_t)(count)*1000U / (clockFreqInHz))
0310
0311
0312
0313
0314
0315
0316
0317
0318
0319
0320
0321 #if (defined __CORTEX_M) && ((__CORTEX_M == 4U) || (__CORTEX_M == 7U))
0322 #define SDK_ISR_EXIT_BARRIER __DSB()
0323 #else
0324 #define SDK_ISR_EXIT_BARRIER
0325 #endif
0326
0327
0328
0329
0330
0331 #if (defined(__ICCARM__))
0332
0333
0334
0335
0336 _Pragma("diag_suppress=Pm120")
0337 #define SDK_PRAGMA(x) _Pragma(#x)
0338 _Pragma("diag_error=Pm120")
0339
0340 #define SDK_ALIGN(var, alignbytes) SDK_PRAGMA(data_alignment = alignbytes) var
0341 #elif defined(__CC_ARM) || defined(__ARMCC_VERSION)
0342
0343 #define SDK_ALIGN(var, alignbytes) __attribute__((aligned(alignbytes))) var
0344 #elif defined(__GNUC__)
0345
0346 #define SDK_ALIGN(var, alignbytes) var __attribute__((aligned(alignbytes)))
0347 #else
0348 #error Toolchain not supported
0349 #endif
0350
0351
0352 #if defined(FSL_FEATURE_L1DCACHE_LINESIZE_BYTE)
0353 #define SDK_L1DCACHE_ALIGN(var) SDK_ALIGN(var, FSL_FEATURE_L1DCACHE_LINESIZE_BYTE)
0354 #endif
0355
0356 #if defined(FSL_FEATURE_L2CACHE_LINESIZE_BYTE)
0357 #define SDK_L2CACHE_ALIGN(var) SDK_ALIGN(var, FSL_FEATURE_L2CACHE_LINESIZE_BYTE)
0358 #endif
0359
0360
0361 #define SDK_SIZEALIGN(var, alignbytes) \
0362 ((unsigned int)((var) + ((alignbytes)-1U)) & (unsigned int)(~(unsigned int)((alignbytes)-1U)))
0363
0364
0365
0366
0367
0368
0369
0370
0371
0372
0373 #if ((!(defined(FSL_FEATURE_HAS_NO_NONCACHEABLE_SECTION) && FSL_FEATURE_HAS_NO_NONCACHEABLE_SECTION)) && \
0374 defined(FSL_FEATURE_L1ICACHE_LINESIZE_BYTE))
0375
0376 #if (defined(__ICCARM__))
0377 #define AT_NONCACHEABLE_SECTION(var) var @"NonCacheable"
0378 #define AT_NONCACHEABLE_SECTION_ALIGN(var, alignbytes) SDK_PRAGMA(data_alignment = alignbytes) var @"NonCacheable"
0379 #define AT_NONCACHEABLE_SECTION_INIT(var) var @"NonCacheable.init"
0380 #define AT_NONCACHEABLE_SECTION_ALIGN_INIT(var, alignbytes) \
0381 SDK_PRAGMA(data_alignment = alignbytes) var @"NonCacheable.init"
0382
0383 #elif (defined(__CC_ARM) || defined(__ARMCC_VERSION))
0384 #define AT_NONCACHEABLE_SECTION_INIT(var) __attribute__((section("NonCacheable.init"))) var
0385 #define AT_NONCACHEABLE_SECTION_ALIGN_INIT(var, alignbytes) \
0386 __attribute__((section("NonCacheable.init"))) __attribute__((aligned(alignbytes))) var
0387 #if (defined(__CC_ARM))
0388 #define AT_NONCACHEABLE_SECTION(var) __attribute__((section("NonCacheable"), zero_init)) var
0389 #define AT_NONCACHEABLE_SECTION_ALIGN(var, alignbytes) \
0390 __attribute__((section("NonCacheable"), zero_init)) __attribute__((aligned(alignbytes))) var
0391 #else
0392 #define AT_NONCACHEABLE_SECTION(var) __attribute__((section(".bss.NonCacheable"))) var
0393 #define AT_NONCACHEABLE_SECTION_ALIGN(var, alignbytes) \
0394 __attribute__((section(".bss.NonCacheable"))) __attribute__((aligned(alignbytes))) var
0395 #endif
0396
0397 #elif (defined(__GNUC__))
0398 #if defined(__ARM_ARCH_8A__)
0399 #define __CS "//"
0400 #else
0401 #define __CS "@"
0402 #endif
0403
0404
0405
0406
0407 #define AT_NONCACHEABLE_SECTION_INIT(var) __attribute__((section("NonCacheable.init"))) var
0408 #define AT_NONCACHEABLE_SECTION_ALIGN_INIT(var, alignbytes) \
0409 __attribute__((section("NonCacheable.init"))) var __attribute__((aligned(alignbytes)))
0410 #define AT_NONCACHEABLE_SECTION(var) __attribute__((section("NonCacheable,\"aw\",%nobits " __CS))) var
0411 #define AT_NONCACHEABLE_SECTION_ALIGN(var, alignbytes) \
0412 __attribute__((section("NonCacheable,\"aw\",%nobits " __CS))) var __attribute__((aligned(alignbytes)))
0413 #else
0414 #error Toolchain not supported.
0415 #endif
0416
0417 #else
0418
0419 #define AT_NONCACHEABLE_SECTION(var) var
0420 #define AT_NONCACHEABLE_SECTION_ALIGN(var, alignbytes) SDK_ALIGN(var, alignbytes)
0421 #define AT_NONCACHEABLE_SECTION_INIT(var) var
0422 #define AT_NONCACHEABLE_SECTION_ALIGN_INIT(var, alignbytes) SDK_ALIGN(var, alignbytes)
0423
0424 #endif
0425
0426
0427
0428
0429
0430
0431
0432 #if (defined(__ICCARM__))
0433 #define AT_QUICKACCESS_SECTION_CODE(func) func @"CodeQuickAccess"
0434 #define AT_QUICKACCESS_SECTION_DATA(var) var @"DataQuickAccess"
0435 #define AT_QUICKACCESS_SECTION_DATA_ALIGN(var, alignbytes) \
0436 SDK_PRAGMA(data_alignment = alignbytes) var @"DataQuickAccess"
0437 #elif (defined(__CC_ARM) || defined(__ARMCC_VERSION))
0438 #define AT_QUICKACCESS_SECTION_CODE(func) __attribute__((section("CodeQuickAccess"), __noinline__)) func
0439 #define AT_QUICKACCESS_SECTION_DATA(var) __attribute__((section("DataQuickAccess"))) var
0440 #define AT_QUICKACCESS_SECTION_DATA_ALIGN(var, alignbytes) \
0441 __attribute__((section("DataQuickAccess"))) __attribute__((aligned(alignbytes))) var
0442 #elif (defined(__GNUC__))
0443 #define AT_QUICKACCESS_SECTION_CODE(func) __attribute__((section("CodeQuickAccess"), __noinline__)) func
0444 #define AT_QUICKACCESS_SECTION_DATA(var) __attribute__((section("DataQuickAccess"))) var
0445 #define AT_QUICKACCESS_SECTION_DATA_ALIGN(var, alignbytes) \
0446 __attribute__((section("DataQuickAccess"))) var __attribute__((aligned(alignbytes)))
0447 #else
0448 #error Toolchain not supported.
0449 #endif
0450
0451
0452 #if (defined(__ICCARM__))
0453 #define RAMFUNCTION_SECTION_CODE(func) func @"RamFunction"
0454 #elif (defined(__CC_ARM) || defined(__ARMCC_VERSION))
0455 #define RAMFUNCTION_SECTION_CODE(func) __attribute__((section("RamFunction"))) func
0456 #elif (defined(__GNUC__))
0457 #define RAMFUNCTION_SECTION_CODE(func) __attribute__((section("RamFunction"))) func
0458 #else
0459 #error Toolchain not supported.
0460 #endif
0461
0462
0463 #if defined(__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)
0464 void DefaultISR(void);
0465 #endif
0466
0467
0468
0469
0470
0471 #include "fsl_clock.h"
0472
0473
0474
0475
0476 #if ((defined(FSL_FEATURE_SOC_SYSCON_COUNT) && (FSL_FEATURE_SOC_SYSCON_COUNT > 0)) || \
0477 (defined(FSL_FEATURE_SOC_ASYNC_SYSCON_COUNT) && (FSL_FEATURE_SOC_ASYNC_SYSCON_COUNT > 0)))
0478 #include "fsl_reset.h"
0479 #endif
0480
0481
0482
0483
0484
0485 #if defined(__cplusplus)
0486 extern "C" {
0487 #endif
0488
0489
0490
0491
0492
0493
0494
0495
0496
0497
0498
0499
0500
0501
0502
0503
0504
0505 static inline status_t EnableIRQ(IRQn_Type interrupt)
0506 {
0507 status_t status = kStatus_Success;
0508
0509 if (NotAvail_IRQn == interrupt)
0510 {
0511 status = kStatus_Fail;
0512 }
0513
0514 #if defined(FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS) && (FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS > 0)
0515 else if ((int32_t)interrupt >= (int32_t)FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS)
0516 {
0517 status = kStatus_Fail;
0518 }
0519 #endif
0520
0521 else
0522 {
0523 #if defined(__GIC_PRIO_BITS)
0524 GIC_EnableIRQ(interrupt);
0525 #else
0526 NVIC_EnableIRQ(interrupt);
0527 #endif
0528 }
0529
0530 return status;
0531 }
0532
0533
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543
0544
0545
0546
0547
0548
0549 static inline status_t DisableIRQ(IRQn_Type interrupt)
0550 {
0551 status_t status = kStatus_Success;
0552
0553 if (NotAvail_IRQn == interrupt)
0554 {
0555 status = kStatus_Fail;
0556 }
0557
0558 #if defined(FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS) && (FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS > 0)
0559 else if ((int32_t)interrupt >= (int32_t)FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS)
0560 {
0561 status = kStatus_Fail;
0562 }
0563 #endif
0564
0565 else
0566 {
0567 #if defined(__GIC_PRIO_BITS)
0568 GIC_DisableIRQ(interrupt);
0569 #else
0570 NVIC_DisableIRQ(interrupt);
0571 #endif
0572 }
0573
0574 return status;
0575 }
0576
0577 #if defined(__GIC_PRIO_BITS)
0578 #define NVIC_SetPriority(irq, prio) do {} while(0)
0579 #endif
0580
0581
0582
0583
0584
0585
0586
0587
0588
0589
0590
0591
0592
0593
0594
0595
0596
0597
0598 static inline status_t EnableIRQWithPriority(IRQn_Type interrupt, uint8_t priNum)
0599 {
0600 status_t status = kStatus_Success;
0601
0602 if (NotAvail_IRQn == interrupt)
0603 {
0604 status = kStatus_Fail;
0605 }
0606
0607 #if defined(FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS) && (FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS > 0)
0608 else if ((int32_t)interrupt >= (int32_t)FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS)
0609 {
0610 status = kStatus_Fail;
0611 }
0612 #endif
0613
0614 else
0615 {
0616 #if defined(__GIC_PRIO_BITS)
0617 GIC_SetPriority(interrupt, priNum);
0618 GIC_EnableIRQ(interrupt);
0619 #else
0620 NVIC_SetPriority(interrupt, priNum);
0621 NVIC_EnableIRQ(interrupt);
0622 #endif
0623 }
0624
0625 return status;
0626 }
0627
0628
0629
0630
0631
0632
0633
0634
0635
0636
0637
0638
0639
0640
0641
0642
0643
0644
0645
0646 static inline status_t IRQ_SetPriority(IRQn_Type interrupt, uint8_t priNum)
0647 {
0648 status_t status = kStatus_Success;
0649
0650 if (NotAvail_IRQn == interrupt)
0651 {
0652 status = kStatus_Fail;
0653 }
0654
0655 #if defined(FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS) && (FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS > 0)
0656 else if ((int32_t)interrupt >= (int32_t)FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS)
0657 {
0658 status = kStatus_Fail;
0659 }
0660 #endif
0661
0662 else
0663 {
0664 #if defined(__GIC_PRIO_BITS)
0665 GIC_SetPriority(interrupt, priNum);
0666 #else
0667 NVIC_SetPriority(interrupt, priNum);
0668 #endif
0669 }
0670
0671 return status;
0672 }
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685
0686
0687
0688
0689
0690
0691 static inline status_t IRQ_ClearPendingIRQ(IRQn_Type interrupt)
0692 {
0693 status_t status = kStatus_Success;
0694
0695 if (NotAvail_IRQn == interrupt)
0696 {
0697 status = kStatus_Fail;
0698 }
0699
0700 #if defined(FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS) && (FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS > 0)
0701 else if ((int32_t)interrupt >= (int32_t)FSL_FEATURE_NUMBER_OF_LEVEL1_INT_VECTORS)
0702 {
0703 status = kStatus_Fail;
0704 }
0705 #endif
0706
0707 else
0708 {
0709 #if defined(__GIC_PRIO_BITS)
0710 GIC_ClearPendingIRQ(interrupt);
0711 #else
0712 NVIC_ClearPendingIRQ(interrupt);
0713 #endif
0714 }
0715
0716 return status;
0717 }
0718
0719
0720
0721
0722
0723
0724
0725
0726
0727 static inline uint32_t DisableGlobalIRQ(void)
0728 {
0729 uint32_t mask;
0730
0731 #if defined(CPSR_I_Msk)
0732 mask = __get_CPSR() & CPSR_I_Msk;
0733 #elif defined(DAIF_I_BIT)
0734 mask = __get_DAIF() & DAIF_I_BIT;
0735 #else
0736 mask = __get_PRIMASK();
0737 #endif
0738 __disable_irq();
0739
0740 return mask;
0741 }
0742
0743
0744
0745
0746
0747
0748
0749
0750
0751
0752
0753 static inline void EnableGlobalIRQ(uint32_t primask)
0754 {
0755 #if defined(CPSR_I_Msk)
0756 __set_CPSR((__get_CPSR() & ~CPSR_I_Msk) | primask);
0757 #elif defined(DAIF_I_BIT)
0758 if (0UL == primask)
0759 {
0760 __enable_irq();
0761 }
0762 #else
0763 __set_PRIMASK(primask);
0764 #endif
0765 }
0766
0767 #if defined(ENABLE_RAM_VECTOR_TABLE)
0768
0769
0770
0771
0772
0773
0774
0775 uint32_t InstallIRQHandler(IRQn_Type irq, uint32_t irqHandler);
0776 #endif
0777
0778 #if (defined(FSL_FEATURE_SOC_SYSCON_COUNT) && (FSL_FEATURE_SOC_SYSCON_COUNT > 0))
0779
0780
0781
0782
0783
0784 #if !(defined(FSL_FEATURE_POWERLIB_EXTEND) && (FSL_FEATURE_POWERLIB_EXTEND != 0))
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794
0795
0796
0797
0798 void EnableDeepSleepIRQ(IRQn_Type interrupt);
0799
0800
0801
0802
0803
0804
0805
0806
0807
0808
0809
0810
0811
0812
0813 void DisableDeepSleepIRQ(IRQn_Type interrupt);
0814 #endif
0815 #endif
0816
0817 #if defined(DWT)
0818
0819
0820
0821 void MSDK_EnableCpuCycleCounter(void);
0822
0823
0824
0825
0826
0827
0828 uint32_t MSDK_GetCpuCycleCount(void);
0829 #endif
0830
0831 #if defined(__cplusplus)
0832 }
0833 #endif
0834
0835
0836
0837 #endif