3 * Texas Instruments, <www.ti.com>
4 * Aneesh V <aneesh@ti.com>
6 * SPDX-License-Identifier: GPL-2.0+
8 #include <linux/types.h>
10 #include <asm/armv7.h>
11 #include <asm/utils.h>
13 #define ARMV7_DCACHE_INVAL_ALL 1
14 #define ARMV7_DCACHE_CLEAN_INVAL_ALL 2
15 #define ARMV7_DCACHE_INVAL_RANGE 3
16 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4
18 #ifndef CONFIG_SYS_DCACHE_OFF
20 /* Asm functions from cache_v7_asm.S */
21 void v7_flush_dcache_all(void);
23 static int check_cache_range(unsigned long start, unsigned long stop)
27 if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
30 if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
34 debug("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
41 * Write the level and type you want to Cache Size Selection Register(CSSELR)
42 * to get size details from Current Cache Size ID Register(CCSIDR)
44 static void set_csselr(u32 level, u32 type)
46 u32 csselr = level << 1 | type;
48 /* Write to Cache Size Selection Register(CSSELR) */
49 asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr));
52 static u32 get_ccsidr(void)
56 /* Read current CP15 Cache Size ID Register */
57 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
61 static u32 get_clidr(void)
65 /* Read current CP15 Cache Level ID Register */
66 asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr));
70 static void v7_inval_dcache_level_setway(u32 level, u32 num_sets,
71 u32 num_ways, u32 way_shift,
78 * For optimal assembly code:
80 * b. have bigger loop inside
82 for (way = num_ways - 1; way >= 0 ; way--) {
83 for (set = num_sets - 1; set >= 0; set--) {
84 setway = (level << 1) | (set << log2_line_len) |
86 /* Invalidate data/unified cache line by set/way */
87 asm volatile (" mcr p15, 0, %0, c7, c6, 2"
91 /* DSB to make sure the operation is complete */
95 static void v7_maint_dcache_level_setway(u32 level, u32 operation)
98 u32 num_sets, num_ways, log2_line_len, log2_num_ways;
101 set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED);
103 ccsidr = get_ccsidr();
105 log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
106 CCSIDR_LINE_SIZE_OFFSET) + 2;
107 /* Converting from words to bytes */
110 num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >>
111 CCSIDR_ASSOCIATIVITY_OFFSET) + 1;
112 num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >>
113 CCSIDR_NUM_SETS_OFFSET) + 1;
115 * According to ARMv7 ARM number of sets and number of ways need
116 * not be a power of 2
118 log2_num_ways = log_2_n_round_up(num_ways);
120 way_shift = (32 - log2_num_ways);
121 v7_inval_dcache_level_setway(level, num_sets, num_ways,
122 way_shift, log2_line_len);
125 static void v7_maint_dcache_all(u32 operation)
127 u32 level, cache_type, level_start_bit = 0;
128 u32 clidr = get_clidr();
130 for (level = 0; level < 7; level++) {
131 cache_type = (clidr >> level_start_bit) & 0x7;
132 if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) ||
133 (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) ||
134 (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED))
135 v7_maint_dcache_level_setway(level, operation);
136 level_start_bit += 3;
140 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
144 /* Align start to cache line boundary */
145 start &= ~(line_len - 1);
146 for (mva = start; mva < stop; mva = mva + line_len) {
147 /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
148 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
152 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
157 * If start address is not aligned to cache-line do not
158 * invalidate the first cache-line
160 if (start & (line_len - 1)) {
161 printf("ERROR: %s - start address is not aligned - 0x%08x\n",
163 /* move to next cache line */
164 start = (start + line_len - 1) & ~(line_len - 1);
168 * If stop address is not aligned to cache-line do not
169 * invalidate the last cache-line
171 if (stop & (line_len - 1)) {
172 printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
174 /* align to the beginning of this cache line */
175 stop &= ~(line_len - 1);
178 for (mva = start; mva < stop; mva = mva + line_len) {
179 /* DCIMVAC - Invalidate data cache by MVA to PoC */
180 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
184 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
186 u32 line_len, ccsidr;
188 ccsidr = get_ccsidr();
189 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
190 CCSIDR_LINE_SIZE_OFFSET) + 2;
191 /* Converting from words to bytes */
193 /* converting from log2(linelen) to linelen */
194 line_len = 1 << line_len;
197 case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
198 v7_dcache_clean_inval_range(start, stop, line_len);
200 case ARMV7_DCACHE_INVAL_RANGE:
201 v7_dcache_inval_range(start, stop, line_len);
205 /* DSB to make sure the operation is complete */
210 static void v7_inval_tlb(void)
212 /* Invalidate entire unified TLB */
213 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
214 /* Invalidate entire data TLB */
215 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
216 /* Invalidate entire instruction TLB */
217 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
218 /* Full system DSB - make sure that the invalidation is complete */
220 /* Full system ISB - make sure the instruction stream sees it */
224 void invalidate_dcache_all(void)
226 v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL);
228 v7_outer_cache_inval_all();
232 * Performs a clean & invalidation of the entire data cache
235 void flush_dcache_all(void)
237 v7_flush_dcache_all();
239 v7_outer_cache_flush_all();
243 * Invalidates range in all levels of D-cache/unified cache used:
244 * Affects the range [start, stop - 1]
246 void invalidate_dcache_range(unsigned long start, unsigned long stop)
248 check_cache_range(start, stop);
250 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
252 v7_outer_cache_inval_range(start, stop);
256 * Flush range(clean & invalidate) from all levels of D-cache/unified
258 * Affects the range [start, stop - 1]
260 void flush_dcache_range(unsigned long start, unsigned long stop)
262 check_cache_range(start, stop);
264 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
266 v7_outer_cache_flush_range(start, stop);
269 void arm_init_before_mmu(void)
271 v7_outer_cache_enable();
272 invalidate_dcache_all();
276 void mmu_page_table_flush(unsigned long start, unsigned long stop)
278 flush_dcache_range(start, stop);
281 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
282 void invalidate_dcache_all(void)
286 void flush_dcache_all(void)
290 void arm_init_before_mmu(void)
294 void mmu_page_table_flush(unsigned long start, unsigned long stop)
298 void arm_init_domains(void)
301 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
303 #ifndef CONFIG_SYS_ICACHE_OFF
304 /* Invalidate entire I-cache and branch predictor array */
305 void invalidate_icache_all(void)
308 * Invalidate all instruction caches to PoU.
309 * Also flushes branch target cache.
311 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
313 /* Invalidate entire branch predictor array */
314 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
316 /* Full system DSB - make sure that the invalidation is complete */
319 /* ISB - make sure the instruction stream sees it */
323 void invalidate_icache_all(void)
328 /* Stub implementations for outer cache operations */
329 __weak void v7_outer_cache_enable(void) {}
330 __weak void v7_outer_cache_disable(void) {}
331 __weak void v7_outer_cache_flush_all(void) {}
332 __weak void v7_outer_cache_inval_all(void) {}
333 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
334 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}