5 #include <ppc_asm.tmpl>
11 #ifndef CACHE_LINE_SIZE
12 # define CACHE_LINE_SIZE L1_CACHE_BYTES
15 #if CACHE_LINE_SIZE == 128
16 #define LG_CACHE_LINE_SIZE 7
17 #elif CACHE_LINE_SIZE == 32
18 #define LG_CACHE_LINE_SIZE 5
19 #elif CACHE_LINE_SIZE == 16
20 #define LG_CACHE_LINE_SIZE 4
21 #elif CACHE_LINE_SIZE == 8
22 #define LG_CACHE_LINE_SIZE 3
24 # error "Invalid cache line size!"
28 * Most of this code is taken from 74xx_7xx/cache.S
29 * and then cleaned up a bit
33 * Invalidate L1 instruction cache.
35 _GLOBAL(invalidate_l1_instruction_cache)
36 /* use invalidate-all bit in HID0 */
44 * Invalidate L1 data cache.
46 _GLOBAL(invalidate_l1_data_cache)
58 lis r5,CACHE_LINE_SIZE
63 lis r5,CACHE_LINE_SIZE
69 * Write any modified data cache blocks out to memory
70 * and invalidate the corresponding instruction cache blocks.
71 * This is a no-op on the 601.
73 * flush_icache_range(unsigned long start, unsigned long stop)
75 _GLOBAL(flush_icache_range)
76 li r5,CACHE_LINE_SIZE-1
80 srwi. r4,r4,LG_CACHE_LINE_SIZE
85 addi r3,r3,CACHE_LINE_SIZE
87 sync /* wait for dcbst's to get to ram */
90 addi r6,r6,CACHE_LINE_SIZE
92 sync /* additional sync needed on g4 */
96 * Write any modified data cache blocks out to memory.
97 * Does not invalidate the corresponding cache lines (especially for
98 * any corresponding instruction cache).
100 * clean_dcache_range(unsigned long start, unsigned long stop)
102 _GLOBAL(clean_dcache_range)
103 li r5,CACHE_LINE_SIZE-1
104 andc r3,r3,r5 /* align r3 down to cache line */
105 subf r4,r3,r4 /* r4 = offset of stop from start of cache line */
106 add r4,r4,r5 /* r4 += cache_line_size-1 */
107 srwi. r4,r4,LG_CACHE_LINE_SIZE /* r4 = number of cache lines to flush */
108 beqlr /* if r4 == 0 return */
109 mtctr r4 /* ctr = r4 */
113 addi r3,r3,CACHE_LINE_SIZE
115 sync /* wait for dcbst's to get to ram */
119 * Write any modified data cache blocks out to memory
120 * and invalidate the corresponding instruction cache blocks.
122 * flush_dcache_range(unsigned long start, unsigned long stop)
124 _GLOBAL(flush_dcache_range)
125 li r5,CACHE_LINE_SIZE-1
129 srwi. r4,r4,LG_CACHE_LINE_SIZE
135 addi r3,r3,CACHE_LINE_SIZE
137 sync /* wait for dcbf's to get to ram */
141 * Like above, but invalidate the D-cache. This is used by the 8xx
142 * to invalidate the cache so the PPC core doesn't get stale data
143 * from the CPM (no cache snooping here :-).
145 * invalidate_dcache_range(unsigned long start, unsigned long stop)
147 _GLOBAL(invalidate_dcache_range)
148 li r5,CACHE_LINE_SIZE-1
152 srwi. r4,r4,LG_CACHE_LINE_SIZE
158 addi r3,r3,CACHE_LINE_SIZE
160 sync /* wait for dcbi's to get to ram */
164 * Flush a particular page from the data cache to RAM.
165 * Note: this is necessary because the instruction cache does *not*
166 * snoop from the data cache.
168 * void __flush_page_to_ram(void *page)
170 _GLOBAL(__flush_page_to_ram)
171 rlwinm r3,r3,0,0,19 /* Get page base address */
172 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
175 0: dcbst 0,r3 /* Write line to ram */
176 addi r3,r3,CACHE_LINE_SIZE
181 addi r6,r6,CACHE_LINE_SIZE
188 * Flush a particular page from the instruction cache.
189 * Note: this is necessary because the instruction cache does *not*
190 * snoop from the data cache.
192 * void __flush_icache_page(void *page)
194 _GLOBAL(__flush_icache_page)
195 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
198 addi r3,r3,CACHE_LINE_SIZE
205 * Clear a page using the dcbz instruction, which doesn't cause any
206 * memory traffic (except to write out any cache lines which get
207 * displaced). This only works on cacheable memory.
210 li r0,4096/CACHE_LINE_SIZE
213 addi r3,r3,CACHE_LINE_SIZE
218 * Enable L1 Instruction cache
220 _GLOBAL(icache_enable)
222 li r5, HID0_ICFI|HID0_ILOCK
225 ori r5, r3, HID0_ICFI
232 * Disable L1 Instruction cache
234 _GLOBAL(icache_disable)
236 bl invalidate_l1_instruction_cache /* uses r3 */
248 * Is instruction cache enabled?
250 _GLOBAL(icache_status)
252 andi. r3, r3, HID0_ICE
256 _GLOBAL(l1dcache_enable)
258 li r5, HID0_DCFI|HID0_DLOCK
260 mtspr HID0, r3 /* no invalidate, unlock */
262 ori r5, r3, HID0_DCFI
263 mtspr HID0, r5 /* enable + invalidate */
264 mtspr HID0, r3 /* enable */
269 * Enable data cache(s) - L1 and optionally L2
270 * Calls l2cache_enable. LR saved in r5
272 _GLOBAL(dcache_enable)
274 li r5, HID0_DCFI|HID0_DLOCK
276 mtspr HID0, r3 /* no invalidate, unlock */
278 ori r5, r3, HID0_DCFI
279 mtspr HID0, r5 /* enable + invalidate */
280 mtspr HID0, r3 /* enable */
284 bl l2cache_enable /* uses r3 and r4 */
292 * Disable data cache(s) - L1 and optionally L2
293 * Calls flush_dcache and l2cache_disable_no_flush.
296 _GLOBAL(dcache_disable)
297 mflr r4 /* save link register */
298 bl flush_dcache /* uses r3 and r5 */
301 li r5, HID0_DCFI|HID0_DLOCK
303 mtspr HID0, r3 /* no invalidate, unlock */
304 li r5, HID0_DCE|HID0_DCFI
305 andc r3, r3, r5 /* no enable, no invalidate */
309 bl l2cache_disable_no_flush /* uses r3 */
311 mtlr r4 /* restore link register */
315 * Is data cache enabled?
317 _GLOBAL(dcache_status)
319 andi. r3, r3, HID0_DCE
323 * Invalidate L2 cache using L2I, assume L2 is enabled
325 _GLOBAL(l2cache_invalidate)
327 rlwinm. r3, r3, 0, 0, 0
331 rlwinm r3, r3, 0, 1, 31
333 #ifdef CONFIG_ALTIVEC
340 oris r3, r3, L2CR_L2I@h
345 andis. r3, r3, L2CR_L2I@h
351 * Calls l2cache_invalidate. LR is saved in r4
353 _GLOBAL(l2cache_enable)
354 mflr r4 /* save link register */
355 bl l2cache_invalidate /* uses r3 */
358 ori r3, r3, L2_ENABLE@l
361 mtlr r4 /* restore link register */
366 * Calls flush_dcache. LR is saved in r4
368 _GLOBAL(l2cache_disable)
369 mflr r4 /* save link register */
370 bl flush_dcache /* uses r3 and r5 */
372 mtlr r4 /* restore link register */
373 l2cache_disable_no_flush: /* provide way to disable L2 w/o flushing */
375 ori r3, r3, L2_INIT@l