3 * Texas Instruments, <www.ti.com>
4 * Aneesh V <aneesh@ti.com>
6 * SPDX-License-Identifier: GPL-2.0+
8 #include <linux/types.h>
10 #include <asm/armv7.h>
11 #include <asm/utils.h>
13 #define ARMV7_DCACHE_INVAL_RANGE 1
14 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2
16 #ifndef CONFIG_SYS_DCACHE_OFF
18 /* Asm functions from cache_v7_asm.S */
19 void v7_flush_dcache_all(void);
20 void v7_invalidate_dcache_all(void);
22 static int check_cache_range(unsigned long start, unsigned long stop)
26 if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
29 if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
33 debug("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
39 static u32 get_ccsidr(void)
43 /* Read current CP15 Cache Size ID Register */
44 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
48 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
52 /* Align start to cache line boundary */
53 start &= ~(line_len - 1);
54 for (mva = start; mva < stop; mva = mva + line_len) {
55 /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
56 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
60 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
65 * If start address is not aligned to cache-line do not
66 * invalidate the first cache-line
68 if (start & (line_len - 1)) {
69 printf("ERROR: %s - start address is not aligned - 0x%08x\n",
71 /* move to next cache line */
72 start = (start + line_len - 1) & ~(line_len - 1);
76 * If stop address is not aligned to cache-line do not
77 * invalidate the last cache-line
79 if (stop & (line_len - 1)) {
80 printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
82 /* align to the beginning of this cache line */
83 stop &= ~(line_len - 1);
86 for (mva = start; mva < stop; mva = mva + line_len) {
87 /* DCIMVAC - Invalidate data cache by MVA to PoC */
88 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
92 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
96 ccsidr = get_ccsidr();
97 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
98 CCSIDR_LINE_SIZE_OFFSET) + 2;
99 /* Converting from words to bytes */
101 /* converting from log2(linelen) to linelen */
102 line_len = 1 << line_len;
105 case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
106 v7_dcache_clean_inval_range(start, stop, line_len);
108 case ARMV7_DCACHE_INVAL_RANGE:
109 v7_dcache_inval_range(start, stop, line_len);
113 /* DSB to make sure the operation is complete */
118 static void v7_inval_tlb(void)
120 /* Invalidate entire unified TLB */
121 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
122 /* Invalidate entire data TLB */
123 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
124 /* Invalidate entire instruction TLB */
125 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
126 /* Full system DSB - make sure that the invalidation is complete */
128 /* Full system ISB - make sure the instruction stream sees it */
132 void invalidate_dcache_all(void)
134 v7_invalidate_dcache_all();
136 v7_outer_cache_inval_all();
140 * Performs a clean & invalidation of the entire data cache
143 void flush_dcache_all(void)
145 v7_flush_dcache_all();
147 v7_outer_cache_flush_all();
151 * Invalidates range in all levels of D-cache/unified cache used:
152 * Affects the range [start, stop - 1]
154 void invalidate_dcache_range(unsigned long start, unsigned long stop)
156 check_cache_range(start, stop);
158 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
160 v7_outer_cache_inval_range(start, stop);
164 * Flush range(clean & invalidate) from all levels of D-cache/unified
166 * Affects the range [start, stop - 1]
168 void flush_dcache_range(unsigned long start, unsigned long stop)
170 check_cache_range(start, stop);
172 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
174 v7_outer_cache_flush_range(start, stop);
177 void arm_init_before_mmu(void)
179 v7_outer_cache_enable();
180 invalidate_dcache_all();
184 void mmu_page_table_flush(unsigned long start, unsigned long stop)
186 flush_dcache_range(start, stop);
189 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
190 void invalidate_dcache_all(void)
194 void flush_dcache_all(void)
198 void arm_init_before_mmu(void)
202 void mmu_page_table_flush(unsigned long start, unsigned long stop)
206 void arm_init_domains(void)
209 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
211 #ifndef CONFIG_SYS_ICACHE_OFF
212 /* Invalidate entire I-cache and branch predictor array */
213 void invalidate_icache_all(void)
216 * Invalidate all instruction caches to PoU.
217 * Also flushes branch target cache.
219 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
221 /* Invalidate entire branch predictor array */
222 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
224 /* Full system DSB - make sure that the invalidation is complete */
227 /* ISB - make sure the instruction stream sees it */
231 void invalidate_icache_all(void)
236 /* Stub implementations for outer cache operations */
237 __weak void v7_outer_cache_enable(void) {}
238 __weak void v7_outer_cache_disable(void) {}
239 __weak void v7_outer_cache_flush_all(void) {}
240 __weak void v7_outer_cache_inval_all(void) {}
241 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
242 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}