File indexing completed on 2025-05-11 08:22:42
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037 #include <rtems.h>
0038 #include <bsp.h>
0039 #include <rtems/score/aarch64-system-registers.h>
0040
0041 #define CPU_DATA_CACHE_ALIGNMENT 64
0042
0043 #define CPU_INSTRUCTION_CACHE_ALIGNMENT 64
0044
0045 #define CPU_CACHE_SUPPORT_PROVIDES_RANGE_FUNCTIONS
0046
0047 #define CPU_CACHE_SUPPORT_PROVIDES_CACHE_SIZE_FUNCTIONS
0048
0049 #define AARCH64_CACHE_L1_CPU_DATA_ALIGNMENT ( (size_t) 64 )
0050 #define AARCH64_CACHE_PREPARE_MVA(mva) (const void *) \
0051 RTEMS_ALIGN_DOWN ( (size_t) mva, AARCH64_CACHE_L1_CPU_DATA_ALIGNMENT )
0052
0053 static inline
0054 void AArch64_data_cache_clean_and_invalidate_line(const void *d_addr)
0055 {
0056 d_addr = AARCH64_CACHE_PREPARE_MVA(d_addr);
0057
0058 __asm__ volatile (
0059 "dc civac, %[d_addr]"
0060 :
0061 : [d_addr] "r" (d_addr)
0062 : "memory"
0063 );
0064 }
0065
0066 static inline void
0067 _CPU_cache_flush_data_range(
0068 const void *d_addr,
0069 size_t n_bytes
0070 )
0071 {
0072 _AARCH64_Data_synchronization_barrier();
0073 if ( n_bytes != 0 ) {
0074 size_t adx = (size_t) AARCH64_CACHE_PREPARE_MVA ( d_addr );
0075 const size_t ADDR_LAST = (size_t) d_addr + n_bytes - 1;
0076
0077 for (; adx <= ADDR_LAST; adx += AARCH64_CACHE_L1_CPU_DATA_ALIGNMENT ) {
0078
0079 AArch64_data_cache_clean_and_invalidate_line( (void*)adx );
0080 }
0081
0082 _AARCH64_Data_synchronization_barrier();
0083 }
0084 _AARCH64_Data_synchronization_barrier();
0085 }
0086
0087 static inline void AArch64_data_cache_invalidate_line(const void *d_addr)
0088 {
0089 d_addr = AARCH64_CACHE_PREPARE_MVA(d_addr);
0090
0091 __asm__ volatile (
0092 "dc ivac, %[d_addr]"
0093 :
0094 : [d_addr] "r" (d_addr)
0095 : "memory"
0096 );
0097 }
0098
0099 static inline void
0100 _CPU_cache_invalidate_data_range(
0101 const void *d_addr,
0102 size_t n_bytes
0103 )
0104 {
0105 if ( n_bytes != 0 ) {
0106 size_t adx = (size_t) AARCH64_CACHE_PREPARE_MVA ( d_addr );
0107 const size_t end = (size_t)d_addr + n_bytes -1;
0108
0109
0110 for (;
0111 adx <= end;
0112 adx += AARCH64_CACHE_L1_CPU_DATA_ALIGNMENT ) {
0113
0114 AArch64_data_cache_invalidate_line( (void*)adx );
0115 }
0116
0117 _AARCH64_Data_synchronization_barrier();
0118 }
0119 }
0120
0121 static inline void _CPU_cache_freeze_data(void)
0122 {
0123
0124 }
0125
0126 static inline void _CPU_cache_unfreeze_data(void)
0127 {
0128
0129 }
0130
0131 static inline void AArch64_instruction_cache_invalidate_line(const void *i_addr)
0132 {
0133
0134 __builtin___clear_cache((void *)i_addr, ((char *)i_addr) + sizeof(void*) - 1);
0135 }
0136
0137 static inline void
0138 _CPU_cache_invalidate_instruction_range( const void *i_addr, size_t n_bytes)
0139 {
0140 if ( n_bytes != 0 ) {
0141 __builtin___clear_cache((void *)i_addr, ((char *)i_addr) + n_bytes - 1);
0142 }
0143 _AARCH64_Instruction_synchronization_barrier();
0144 }
0145
0146 static inline void _CPU_cache_freeze_instruction(void)
0147 {
0148
0149 }
0150
0151 static inline void _CPU_cache_unfreeze_instruction(void)
0152 {
0153
0154 }
0155
0156 static inline uint64_t AArch64_get_ccsidr_for_level(
0157 uint64_t level, bool instruction
0158 )
0159 {
0160 uint64_t csselr = AARCH64_CSSELR_EL1_LEVEL(level - 1);
0161
0162 csselr |= instruction ? AARCH64_CSSELR_EL1_IND : 0;
0163
0164 _AArch64_Write_csselr_el1(csselr);
0165 _AARCH64_Instruction_synchronization_barrier();
0166 return _AArch64_Read_ccsidr_el1();
0167 }
0168
0169 static inline uint64_t
0170 AArch64_ccsidr_get_line_power(uint64_t ccsidr)
0171 {
0172 return AARCH64_CCSIDR_EL1_LINESIZE_GET(ccsidr) + 4;
0173 }
0174
0175 static inline uint64_t
0176 AArch64_ccsidr_get_associativity(uint64_t ccsidr)
0177 {
0178 return AARCH64_CCSIDR_EL1_ASSOCIATIVITY_GET_0(ccsidr) + 1;
0179 }
0180
0181 static inline uint64_t
0182 AArch64_ccsidr_get_num_sets(uint64_t ccsidr)
0183 {
0184 return AARCH64_CCSIDR_EL1_NUMSETS_GET_0(ccsidr) + 1;
0185 }
0186
0187
0188 static inline void
0189 AArch64_data_cache_clean_and_invalidate_level(uint64_t level)
0190 {
0191 uint64_t ccsidr;
0192 uint64_t line_power;
0193 uint64_t associativity;
0194 uint64_t way;
0195 uint64_t way_shift;
0196
0197 ccsidr = AArch64_get_ccsidr_for_level(level, false);
0198
0199 line_power = AArch64_ccsidr_get_line_power(ccsidr);
0200 associativity = AArch64_ccsidr_get_associativity(ccsidr);
0201 way_shift = __builtin_clz(associativity - 1);
0202
0203 for (way = 0; way < associativity; ++way) {
0204 uint64_t num_sets = AArch64_ccsidr_get_num_sets(ccsidr);
0205 uint64_t set;
0206
0207 for (set = 0; set < num_sets; ++set) {
0208 uint64_t set_and_way = (way << way_shift)
0209 | (set << line_power)
0210 | ((level - 1) << 1);
0211
0212 __asm__ volatile (
0213 "dc cisw, %[set_and_way]"
0214 :
0215 : [set_and_way] "r" (set_and_way)
0216 : "memory"
0217 );
0218 }
0219 }
0220 }
0221
0222 static inline
0223 uint64_t AArch64_clidr_get_cache_type(uint64_t clidr, uint64_t level)
0224 {
0225 return (clidr >> (3 * level)) & 0x7;
0226 }
0227
0228 static inline uint64_t AArch64_clidr_get_level_of_coherency(uint64_t clidr)
0229 {
0230 return AARCH64_CLIDR_EL1_LOC_GET(clidr);
0231 }
0232
0233 static inline void AArch64_data_cache_clean_and_invalidate_all_levels(void)
0234 {
0235 uint64_t clidr = _AArch64_Read_clidr_el1();
0236 uint64_t loc = AArch64_clidr_get_level_of_coherency(clidr);
0237 uint64_t level = 0;
0238
0239 for (level = 1; level <= loc; ++level) {
0240
0241 AArch64_data_cache_clean_and_invalidate_level(level);
0242 }
0243 }
0244
0245 static inline void _CPU_cache_flush_entire_data(void)
0246 {
0247 rtems_interrupt_level isr_level;
0248
0249 rtems_interrupt_local_disable(isr_level);
0250 _AARCH64_Data_synchronization_barrier();
0251 AArch64_data_cache_clean_and_invalidate_all_levels();
0252 _AARCH64_Data_synchronization_barrier();
0253 rtems_interrupt_local_enable(isr_level);
0254 }
0255
0256 static inline void AArch64_cache_invalidate_level(uint64_t level)
0257 {
0258 uint64_t ccsidr;
0259 uint64_t line_power;
0260 uint64_t associativity;
0261 uint64_t way;
0262 uint64_t way_shift;
0263
0264 ccsidr = AArch64_get_ccsidr_for_level(level, false);
0265
0266 line_power = AArch64_ccsidr_get_line_power(ccsidr);
0267 associativity = AArch64_ccsidr_get_associativity(ccsidr);
0268 way_shift = __builtin_clz(associativity - 1);
0269
0270 for (way = 0; way < associativity; ++way) {
0271 uint64_t num_sets = AArch64_ccsidr_get_num_sets(ccsidr);
0272 uint64_t set;
0273
0274 for (set = 0; set < num_sets; ++set) {
0275 uint64_t set_and_way = (way << way_shift)
0276 | (set << line_power)
0277 | ((level - 1) << 1);
0278
0279 __asm__ volatile (
0280 "dc isw, %[set_and_way]"
0281 :
0282 : [set_and_way] "r" (set_and_way)
0283 : "memory"
0284 );
0285 }
0286 }
0287 }
0288
0289 static inline void AArch64_data_cache_invalidate_all_levels(void)
0290 {
0291 uint64_t clidr = _AArch64_Read_clidr_el1();
0292 uint64_t loc = AArch64_clidr_get_level_of_coherency(clidr);
0293 uint64_t level = 0;
0294
0295 for (level = 1; level <= loc; ++level) {
0296
0297 AArch64_cache_invalidate_level(level);
0298 }
0299 }
0300
0301 static inline void _CPU_cache_invalidate_entire_data(void)
0302 {
0303 rtems_interrupt_level isr_level;
0304
0305 rtems_interrupt_local_disable(isr_level);
0306 _AARCH64_Data_synchronization_barrier();
0307 AArch64_data_cache_invalidate_all_levels();
0308 _AARCH64_Data_synchronization_barrier();
0309 rtems_interrupt_local_enable(isr_level);
0310 }
0311
0312 static inline void _CPU_cache_enable_data(void)
0313 {
0314 rtems_interrupt_level isr_level;
0315 uint64_t sctlr;
0316
0317 rtems_interrupt_local_disable(isr_level);
0318 sctlr = _AArch64_Read_sctlr_el1();
0319 sctlr |= AARCH64_SCTLR_EL1_C;
0320 _AArch64_Write_sctlr_el1(sctlr);
0321 rtems_interrupt_local_enable(isr_level);
0322 }
0323
0324 static RTEMS_NO_RETURN inline void _CPU_cache_disable_data(void)
0325 {
0326 _Internal_error( INTERNAL_ERROR_CANNOT_DISABLE_DATA_CACHE );
0327 }
0328
0329 static inline void _CPU_cache_invalidate_entire_instruction(void)
0330 {
0331
0332
0333
0334
0335
0336
0337 __asm__ volatile (
0338 #ifdef RTEMS_SMP
0339
0340
0341
0342
0343 "ic ialluis\n"
0344 #else
0345
0346 "ic iallu\n"
0347 #endif
0348 "isb"
0349 :
0350 :
0351 : "memory"
0352 );
0353 }
0354
0355 static inline void _CPU_cache_enable_instruction(void)
0356 {
0357 rtems_interrupt_level isr_level;
0358 uint64_t sctlr;
0359
0360 rtems_interrupt_local_disable(isr_level);
0361 sctlr = _AArch64_Read_sctlr_el1();
0362 sctlr |= AARCH64_SCTLR_EL1_I;
0363 _AArch64_Write_sctlr_el1(sctlr);
0364 rtems_interrupt_local_enable(isr_level);
0365 }
0366
0367 static inline void _CPU_cache_disable_instruction(void)
0368 {
0369 rtems_interrupt_level isr_level;
0370 uint64_t sctlr;
0371
0372 rtems_interrupt_local_disable(isr_level);
0373 sctlr = _AArch64_Read_sctlr_el1();
0374 sctlr &= ~AARCH64_SCTLR_EL1_I;
0375 _AArch64_Write_sctlr_el1(sctlr);
0376 rtems_interrupt_local_enable(isr_level);
0377 }
0378
0379 static inline size_t AArch64_get_cache_size(
0380 uint64_t level,
0381 bool instruction
0382 )
0383 {
0384 rtems_interrupt_level isr_level;
0385 uint64_t clidr;
0386 uint64_t loc;
0387 uint64_t ccsidr;
0388
0389 clidr = _AArch64_Read_clidr_el1();
0390 loc = AArch64_clidr_get_level_of_coherency(clidr);
0391
0392 if (level > loc) {
0393 return 0;
0394 }
0395
0396 rtems_interrupt_local_disable(isr_level);
0397 ccsidr = AArch64_get_ccsidr_for_level(level, instruction);
0398 rtems_interrupt_local_enable(isr_level);
0399
0400 return (1U << (AArch64_ccsidr_get_line_power(ccsidr)+4))
0401 * AArch64_ccsidr_get_associativity(ccsidr)
0402 * AArch64_ccsidr_get_num_sets(ccsidr);
0403 }
0404
0405 static inline size_t _CPU_cache_get_data_cache_size(uint64_t level)
0406 {
0407 return AArch64_get_cache_size(level, false);
0408 }
0409
0410 static inline size_t _CPU_cache_get_instruction_cache_size(uint64_t level)
0411 {
0412 return AArch64_get_cache_size(level, true);
0413 }
0414
0415 #include "../../shared/cache/cacheimpl.h"