File indexing completed on 2025-05-11 08:23:48
0001
0002
0003
0004
0005 #if (defined(__mc68020__) && !defined(__mcpu32__))
0006 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0007 #elif defined(__mc68030__)
0008 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0009 # define M68K_DATA_CACHE_ALIGNMENT 16
0010 #elif ( defined(__mc68040__) || defined (__mc68060__) )
0011 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0012 # define M68K_DATA_CACHE_ALIGNMENT 16
0013 #elif ( defined(__mcf5200__) )
0014 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0015 # if ( defined(__mcf528x__) )
0016 # define M68K_DATA_CACHE_ALIGNMENT 16
0017 # endif
0018 #elif ( defined(__mcf5300__) )
0019 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0020 # define M68K_DATA_CACHE_ALIGNMENT 16
0021 #elif defined(__mcfv4e__)
0022 # define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
0023 # define M68K_DATA_CACHE_ALIGNMENT 16
0024 #endif
0025
0026 #if defined(M68K_DATA_CACHE_ALIGNMENT)
0027 #define CPU_DATA_CACHE_ALIGNMENT M68K_DATA_CACHE_ALIGNMENT
0028 #endif
0029
0030 #if defined(M68K_INSTRUCTION_CACHE_ALIGNMENT)
0031 #define CPU_INSTRUCTION_CACHE_ALIGNMENT M68K_INSTRUCTION_CACHE_ALIGNMENT
0032 #endif
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042 #define _CPU_CACR_AND(mask) \
0043 { \
0044 register unsigned long _value = mask; \
0045 register unsigned long _ctl = 0; \
0046 __asm__ volatile ( "movec %%cacr, %0; /* read the cacr */ \
0047 andl %2, %0; /* and with _val */ \
0048 movec %1, %%cacr" \
0049 : "=d" (_ctl) : "0" (_ctl), "d" (_value) : "%%cc" ); \
0050 }
0051
0052
0053
0054
0055
0056 #define _CPU_CACR_OR(mask) \
0057 { \
0058 register unsigned long _value = mask; \
0059 register unsigned long _ctl = 0; \
0060 __asm__ volatile ( "movec %%cacr, %0; /* read the cacr */ \
0061 orl %2, %0; /* or with _val */ \
0062 movec %1, %%cacr" \
0063 : "=d" (_ctl) : "0" (_ctl), "d" (_value) : "%%cc" ); \
0064 }
0065
0066
0067
0068
0069
0070
0071
0072
0073 #if ( (defined(__mc68020__) && !defined(__mcpu32__)) || defined(__mc68030__) )
0074
0075 #if defined(__mc68030__)
0076
0077
0078
0079 static inline void _CPU_cache_flush_1_data_line(const void * d_addr) {}
0080 static inline void _CPU_cache_flush_entire_data(void) {}
0081
0082 static inline void _CPU_cache_invalidate_1_data_line(
0083 const void * d_addr
0084 )
0085 {
0086 void * p_address = (void *) _CPU_virtual_to_physical( d_addr );
0087 __asm__ volatile ( "movec %0, %%caar" :: "a" (p_address) );
0088 _CPU_CACR_OR(0x00000400);
0089 }
0090
0091 static inline void _CPU_cache_invalidate_entire_data(void)
0092 {
0093 _CPU_CACR_OR( 0x00000800 );
0094 }
0095
0096 static inline void _CPU_cache_freeze_data(void)
0097 {
0098 _CPU_CACR_OR( 0x00000200 );
0099 }
0100
0101 static inline void _CPU_cache_unfreeze_data(void)
0102 {
0103 _CPU_CACR_AND( 0xFFFFFDFF );
0104 }
0105
0106 static inline void _CPU_cache_enable_data(void)
0107 {
0108 _CPU_CACR_OR( 0x00000100 );
0109 }
0110
0111 static inline void _CPU_cache_disable_data(void)
0112 {
0113 _CPU_CACR_AND( 0xFFFFFEFF );
0114 }
0115 #endif
0116
0117
0118
0119
0120 static inline void _CPU_cache_invalidate_1_instruction_line(
0121 const void * d_addr
0122 )
0123 {
0124 void * p_address = (void *) _CPU_virtual_to_physical( d_addr );
0125 __asm__ volatile ( "movec %0, %%caar" :: "a" (p_address) );
0126 _CPU_CACR_OR( 0x00000004 );
0127 }
0128
0129 static inline void _CPU_cache_invalidate_entire_instruction(void)
0130 {
0131 _CPU_CACR_OR( 0x00000008 );
0132 }
0133
0134 static inline void _CPU_cache_freeze_instruction(void)
0135 {
0136 _CPU_CACR_OR( 0x00000002);
0137 }
0138
0139 static inline void _CPU_cache_unfreeze_instruction(void)
0140 {
0141 _CPU_CACR_AND( 0xFFFFFFFD );
0142 }
0143
0144 static inline void _CPU_cache_enable_instruction(void)
0145 {
0146 _CPU_CACR_OR( 0x00000001 );
0147 }
0148
0149 static inline void _CPU_cache_disable_instruction(void)
0150 {
0151 _CPU_CACR_AND( 0xFFFFFFFE );
0152 }
0153
0154
0155 #elif ( defined(__mc68040__) || defined (__mc68060__) )
0156
0157
0158 static inline void _CPU_cache_freeze_data(void) {}
0159 static inline void _CPU_cache_unfreeze_data(void) {}
0160 static inline void _CPU_cache_freeze_instruction(void) {}
0161 static inline void _CPU_cache_unfreeze_instruction(void) {}
0162
0163 static inline void _CPU_cache_flush_1_data_line(
0164 const void * d_addr
0165 )
0166 {
0167 void * p_address = (void *) _CPU_virtual_to_physical( d_addr );
0168 __asm__ volatile ( "cpushl %%dc,(%0)" :: "a" (p_address) );
0169 }
0170
0171 static inline void _CPU_cache_invalidate_1_data_line(
0172 const void * d_addr
0173 )
0174 {
0175 void * p_address = (void *) _CPU_virtual_to_physical( d_addr );
0176 __asm__ volatile ( "cinvl %%dc,(%0)" :: "a" (p_address) );
0177 }
0178
0179 static inline void _CPU_cache_flush_entire_data(void)
0180 {
0181 __asm__ volatile ( "cpusha %%dc" :: );
0182 }
0183
0184 static inline void _CPU_cache_invalidate_entire_data(void)
0185 {
0186 __asm__ volatile ( "cinva %%dc" :: );
0187 }
0188
0189 static inline void _CPU_cache_enable_data(void)
0190 {
0191 _CPU_CACR_OR( 0x80000000 );
0192 }
0193
0194 static inline void _CPU_cache_disable_data(void)
0195 {
0196 _CPU_CACR_AND( 0x7FFFFFFF );
0197 }
0198
0199 static inline void _CPU_cache_invalidate_1_instruction_line(
0200 const void * i_addr )
0201 {
0202 void * p_address = (void *) _CPU_virtual_to_physical( i_addr );
0203 __asm__ volatile ( "cinvl %%ic,(%0)" :: "a" (p_address) );
0204 }
0205
0206 static inline void _CPU_cache_invalidate_entire_instruction(void)
0207 {
0208 __asm__ volatile ( "cinva %%ic" :: );
0209 }
0210
0211 static inline void _CPU_cache_enable_instruction(void)
0212 {
0213 _CPU_CACR_OR( 0x00008000 );
0214 }
0215
0216 static inline void _CPU_cache_disable_instruction(void)
0217 {
0218 _CPU_CACR_AND( 0xFFFF7FFF );
0219 }
0220 #endif