2 * This file contains low-level functions for performing various
3 * types of TLB invalidations on various processors with no hash
6 * This file implements the following functions for all no-hash
7 * processors. Some aren't implemented for some variants. Some
8 * are inline in tlbflush.h
15 * Code mostly moved over from misc_32.S
17 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
19 * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
20 * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
22 * This program is free software; you can redistribute it and/or
23 * modify it under the terms of the GNU General Public License
24 * as published by the Free Software Foundation; either version
25 * 2 of the License, or (at your option) any later version.
31 #include <asm/cputable.h>
33 #include <asm/ppc_asm.h>
34 #include <asm/asm-offsets.h>
35 #include <asm/processor.h>
37 #include <asm/asm-compat.h>
39 #if defined(CONFIG_40x)
42 * 40x implementation needs only tlbil_va
45 /* We run the search with interrupts disabled because we have to change
46 * the PID and I don't want to preempt when that happens.
57 /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is
58 * clear. Since 25 is the V bit in the TLB_TAG, loading this value
59 * will invalidate the TLB entry. */
64 #elif defined(CONFIG_PPC_8xx)
67 * Nothing to do for 8xx, everything is inline
70 #elif defined(CONFIG_44x) /* Includes 47x */
73 * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
74 * of the TLB for everything else.
81 * We write 16 bits of STID since 47x supports that much, we
82 * will never be passed out of bounds values on 440 (hopefully)
86 /* We have to run the search with interrupts disabled, otherwise
87 * an interrupt which causes a TLB miss can clobber the MMUCR
88 * between the mtspr and the tlbsx.
90 * Critical and Machine Check interrupts take care of saving
91 * and restoring MMUCR, so only normal interrupts have to be
101 END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
102 /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
103 * 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this
104 * value will invalidate the TLB entry.
106 tlbwe r6,r6,PPC44x_TLB_PAGEID
111 #ifdef CONFIG_PPC_47x
112 oris r7,r6,0x8000 /* specify way explicitly */
113 clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */
114 ori r4,r4,PPC47x_TLBE_SIZE
115 tlbwe r4,r7,0 /* write it */
119 #else /* CONFIG_PPC_47x */
121 EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
122 #endif /* !CONFIG_PPC_47x */
126 BEGIN_MMU_FTR_SECTION
128 END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
132 /* Load high watermark */
133 lis r4,tlb_44x_hwater@ha
134 lwz r5,tlb_44x_hwater@l(r4)
136 1: tlbwe r3,r3,PPC44x_TLB_PAGEID
144 #ifdef CONFIG_PPC_47x
145 /* 476 variant. There's not simple way to do this, hopefully we'll
146 * try to limit the amount of such full invalidates
148 mfmsr r11 /* Interrupts off */
150 li r3,-1 /* Current set */
151 lis r10,tlb_47x_boltmap@h
152 ori r10,r10,tlb_47x_boltmap@l
153 lis r7,0x8000 /* Specify way explicitly */
155 b 9f /* For each set */
157 1: li r9,4 /* Number of ways */
158 li r4,0 /* Current way */
159 li r6,0 /* Default entry value 0 */
160 andi. r0,r8,1 /* Check if way 0 is bolted */
161 mtctr r9 /* Load way counter */
162 bne- 3f /* Bolted, skip loading it */
164 2: /* For each way */
165 or r5,r3,r4 /* Make way|index for tlbre */
166 rlwimi r5,r5,16,8,15 /* Copy index into position */
167 tlbre r6,r5,0 /* Read entry */
168 3: addis r4,r4,0x2000 /* Next way */
169 andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
170 beq 4f /* Nope, skip it */
171 rlwimi r7,r5,0,1,2 /* Insert way number */
172 rlwinm r6,r6,0,21,19 /* Clear V */
173 tlbwe r6,r7,0 /* Write it */
174 4: bdnz 2b /* Loop for each way */
175 srwi r8,r8,1 /* Next boltmap bit */
176 9: cmpwi cr1,r3,255 /* Last set done ? */
177 addi r3,r3,1 /* Next set */
178 beq cr1,1f /* End of loop */
179 andi. r0,r3,0x1f /* Need to load a new boltmap word ? */
180 bne 1b /* No, loop */
181 lwz r8,0(r10) /* Load boltmap entry */
182 addi r10,r10,4 /* Next word */
184 1: isync /* Sync shadows */
186 #else /* CONFIG_PPC_47x */
188 EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
189 #endif /* !CONFIG_PPC_47x */
192 #ifdef CONFIG_PPC_47x
195 * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
196 * check though, it will blow up soon enough if we mistakenly try
197 * to use it on a 440.
199 _GLOBAL(_tlbivax_bcast)
212 END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
217 * DD2 HW could hang if in instruction fetch happens before msync completes.
218 * Touch enough instruction cache lines to ensure cache hits
224 PPC_ICBT(0,R6,R7) /* touch next cache line */
226 PPC_ICBT(0,R6,R7) /* touch next cache line */
228 PPC_ICBT(0,R6,R7) /* touch next cache line */
241 #endif /* CONFIG_PPC_47x */
243 #elif defined(CONFIG_FSL_BOOKE)
245 * FSL BookE implementations.
247 * Since feature sections are using _SECTION_ELSE we need
248 * to have the larger code path before the _SECTION_ELSE
252 * Flush MMU TLB on the local processor
255 BEGIN_MMU_FTR_SECTION
256 li r3,(MMUCSR0_TLBFI)@l
257 mtspr SPRN_MMUCSR0, r3
259 mfspr r3,SPRN_MMUCSR0
260 andi. r3,r3,MMUCSR0_TLBFI@l
264 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
270 BEGIN_MMU_FTR_SECTION
274 mfspr r4,SPRN_MAS6 /* save MAS6 */
277 mtspr SPRN_MAS6,r4 /* restore MAS6 */
280 li r3,(MMUCSR0_TLBFI)@l
281 mtspr SPRN_MMUCSR0, r3
283 mfspr r3,SPRN_MMUCSR0
284 andi. r3,r3,MMUCSR0_TLBFI@l
286 ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
292 * Flush MMU TLB for a particular address, but only on the local processor
299 ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
300 mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
301 BEGIN_MMU_FTR_SECTION
303 mfspr r4,SPRN_MAS1 /* check valid */
304 andis. r3,r4,MAS1_VALID@h
311 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
316 #elif defined(CONFIG_PPC_BOOK3E)
318 * New Book3E (>= 2.06) implementation
320 * Note: We may be able to get away without the interrupt masking stuff
321 * if we save/restore MAS6 on exceptions that might modify it
324 slwi r4,r3,MAS6_SPID_SHIFT
334 _GLOBAL(_tlbil_pid_noind)
335 slwi r4,r3,MAS6_SPID_SHIFT
356 slwi r4,r4,MAS6_SPID_SHIFT
357 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
359 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
360 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
367 _GLOBAL(_tlbivax_bcast)
371 slwi r4,r4,MAS6_SPID_SHIFT
372 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
374 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
375 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
384 #ifdef CONFIG_BDI_SWITCH
385 /* Context switch the PTE pointer for the Abatron BDI2000.
386 * The PGDIR is the second parameter.
388 lis r5, abatron_pteptrs@h
389 ori r5, r5, abatron_pteptrs@l
393 isync /* Force context change */
396 #error Unsupported processor type !
399 #if defined(CONFIG_PPC_FSL_BOOK3E)
401 * extern void loadcam_entry(unsigned int index)
403 * Load TLBCAM[index] entry in to the L2 CAM MMU
404 * Must preserve r7, r8, r9, and r10
406 _GLOBAL(loadcam_entry)
408 LOAD_REG_ADDR_PIC(r4, TLBCAM)
410 mulli r5,r3,TLBCAM_SIZE
412 lwz r4,TLBCAM_MAS0(r3)
414 lwz r4,TLBCAM_MAS1(r3)
416 PPC_LL r4,TLBCAM_MAS2(r3)
418 lwz r4,TLBCAM_MAS3(r3)
420 BEGIN_MMU_FTR_SECTION
421 lwz r4,TLBCAM_MAS7(r3)
423 END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
430 * Load multiple TLB entries at once, using an alternate-space
431 * trampoline so that we don't have to care about whether the same
432 * TLB entry maps us before and after.
434 * r3 = first entry to write
435 * r4 = number of entries to write
436 * r5 = temporary tlb entry
438 _GLOBAL(loadcam_multi)
442 * Set up temporary TLB entry that is the same as what we're
443 * running from, but in AS=1.
452 rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
461 ori r6,r6,MSR_IS|MSR_DS
473 /* Return to AS=0 and clear the temporary entry */
475 rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
481 rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
482 oris r6,r6,MAS0_TLBSEL(1)@h