4 #include <ppc_asm.tmpl>
10 #ifndef CACHE_LINE_SIZE
11 # define CACHE_LINE_SIZE L1_CACHE_BYTES
14 #if CACHE_LINE_SIZE == 128
15 #define LG_CACHE_LINE_SIZE 7
16 #elif CACHE_LINE_SIZE == 32
17 #define LG_CACHE_LINE_SIZE 5
18 #elif CACHE_LINE_SIZE == 16
19 #define LG_CACHE_LINE_SIZE 4
20 #elif CACHE_LINE_SIZE == 8
21 #define LG_CACHE_LINE_SIZE 3
23 # error "Invalid cache line size!"
27 * Most of this code is taken from 74xx_7xx/cache.S
28 * and then cleaned up a bit
32 * Invalidate L1 instruction cache.
34 _GLOBAL(invalidate_l1_instruction_cache)
35 /* use invalidate-all bit in HID0 */
43 * Invalidate L1 data cache.
45 _GLOBAL(invalidate_l1_data_cache)
57 lis r5,CACHE_LINE_SIZE
62 lis r5,CACHE_LINE_SIZE
68 * Write any modified data cache blocks out to memory
69 * and invalidate the corresponding instruction cache blocks.
70 * This is a no-op on the 601.
72 * flush_icache_range(unsigned long start, unsigned long stop)
74 _GLOBAL(flush_icache_range)
75 li r5,CACHE_LINE_SIZE-1
79 srwi. r4,r4,LG_CACHE_LINE_SIZE
84 addi r3,r3,CACHE_LINE_SIZE
86 sync /* wait for dcbst's to get to ram */
89 addi r6,r6,CACHE_LINE_SIZE
91 sync /* additional sync needed on g4 */
95 * Write any modified data cache blocks out to memory.
96 * Does not invalidate the corresponding cache lines (especially for
97 * any corresponding instruction cache).
99 * clean_dcache_range(unsigned long start, unsigned long stop)
101 _GLOBAL(clean_dcache_range)
102 li r5,CACHE_LINE_SIZE-1
103 andc r3,r3,r5 /* align r3 down to cache line */
104 subf r4,r3,r4 /* r4 = offset of stop from start of cache line */
105 add r4,r4,r5 /* r4 += cache_line_size-1 */
106 srwi. r4,r4,LG_CACHE_LINE_SIZE /* r4 = number of cache lines to flush */
107 beqlr /* if r4 == 0 return */
108 mtctr r4 /* ctr = r4 */
112 addi r3,r3,CACHE_LINE_SIZE
114 sync /* wait for dcbst's to get to ram */
118 * Write any modified data cache blocks out to memory
119 * and invalidate the corresponding instruction cache blocks.
121 * flush_dcache_range(unsigned long start, unsigned long stop)
123 _GLOBAL(flush_dcache_range)
124 li r5,CACHE_LINE_SIZE-1
128 srwi. r4,r4,LG_CACHE_LINE_SIZE
134 addi r3,r3,CACHE_LINE_SIZE
136 sync /* wait for dcbf's to get to ram */
140 * Like above, but invalidate the D-cache. This is used by the 8xx
141 * to invalidate the cache so the PPC core doesn't get stale data
142 * from the CPM (no cache snooping here :-).
144 * invalidate_dcache_range(unsigned long start, unsigned long stop)
146 _GLOBAL(invalidate_dcache_range)
147 li r5,CACHE_LINE_SIZE-1
151 srwi. r4,r4,LG_CACHE_LINE_SIZE
157 addi r3,r3,CACHE_LINE_SIZE
159 sync /* wait for dcbi's to get to ram */
163 * Flush a particular page from the data cache to RAM.
164 * Note: this is necessary because the instruction cache does *not*
165 * snoop from the data cache.
167 * void __flush_page_to_ram(void *page)
169 _GLOBAL(__flush_page_to_ram)
170 rlwinm r3,r3,0,0,19 /* Get page base address */
171 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
174 0: dcbst 0,r3 /* Write line to ram */
175 addi r3,r3,CACHE_LINE_SIZE
180 addi r6,r6,CACHE_LINE_SIZE
187 * Flush a particular page from the instruction cache.
188 * Note: this is necessary because the instruction cache does *not*
189 * snoop from the data cache.
191 * void __flush_icache_page(void *page)
193 _GLOBAL(__flush_icache_page)
194 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
197 addi r3,r3,CACHE_LINE_SIZE
204 * Clear a page using the dcbz instruction, which doesn't cause any
205 * memory traffic (except to write out any cache lines which get
206 * displaced). This only works on cacheable memory.
209 li r0,4096/CACHE_LINE_SIZE
212 addi r3,r3,CACHE_LINE_SIZE
217 * Enable L1 Instruction cache
219 _GLOBAL(icache_enable)
221 li r5, HID0_ICFI|HID0_ILOCK
224 ori r5, r3, HID0_ICFI
231 * Disable L1 Instruction cache
233 _GLOBAL(icache_disable)
235 bl invalidate_l1_instruction_cache /* uses r3 */
247 * Is instruction cache enabled?
249 _GLOBAL(icache_status)
251 andi. r3, r3, HID0_ICE
255 _GLOBAL(l1dcache_enable)
257 li r5, HID0_DCFI|HID0_DLOCK
259 mtspr HID0, r3 /* no invalidate, unlock */
261 ori r5, r3, HID0_DCFI
262 mtspr HID0, r5 /* enable + invalidate */
263 mtspr HID0, r3 /* enable */
268 * Enable data cache(s) - L1 and optionally L2
269 * Calls l2cache_enable. LR saved in r5
271 _GLOBAL(dcache_enable)
273 li r5, HID0_DCFI|HID0_DLOCK
275 mtspr HID0, r3 /* no invalidate, unlock */
277 ori r5, r3, HID0_DCFI
278 mtspr HID0, r5 /* enable + invalidate */
279 mtspr HID0, r3 /* enable */
283 bl l2cache_enable /* uses r3 and r4 */
291 * Disable data cache(s) - L1 and optionally L2
292 * Calls flush_dcache and l2cache_disable_no_flush.
295 _GLOBAL(dcache_disable)
296 mflr r4 /* save link register */
297 bl flush_dcache /* uses r3 and r5 */
300 li r5, HID0_DCFI|HID0_DLOCK
302 mtspr HID0, r3 /* no invalidate, unlock */
303 li r5, HID0_DCE|HID0_DCFI
304 andc r3, r3, r5 /* no enable, no invalidate */
308 bl l2cache_disable_no_flush /* uses r3 */
310 mtlr r4 /* restore link register */
314 * Is data cache enabled?
316 _GLOBAL(dcache_status)
318 andi. r3, r3, HID0_DCE
322 * Invalidate L2 cache using L2I, assume L2 is enabled
324 _GLOBAL(l2cache_invalidate)
326 rlwinm. r3, r3, 0, 0, 0
330 rlwinm r3, r3, 0, 1, 31
332 #ifdef CONFIG_ALTIVEC
339 oris r3, r3, L2CR_L2I@h
344 andis. r3, r3, L2CR_L2I@h
350 * Calls l2cache_invalidate. LR is saved in r4
352 _GLOBAL(l2cache_enable)
353 mflr r4 /* save link register */
354 bl l2cache_invalidate /* uses r3 */
357 ori r3, r3, L2_ENABLE@l
360 mtlr r4 /* restore link register */
365 * Calls flush_dcache. LR is saved in r4
367 _GLOBAL(l2cache_disable)
368 mflr r4 /* save link register */
369 bl flush_dcache /* uses r3 and r5 */
371 mtlr r4 /* restore link register */
372 l2cache_disable_no_flush: /* provide way to disable L2 w/o flushing */
374 ori r3, r3, L2_INIT@l