1 /* $Id: dtlb_backend.S,v 1.16 2001/10/09 04:02:11 davem Exp $
2 * dtlb_backend.S: Back end to DTLB miss replacement strategy.
3 * This is included directly into the trap table.
5 * Copyright (C) 1996,1998 David S. Miller (davem@redhat.com)
6 * Copyright (C) 1997,1998 Jakub Jelinek (jj@ultra.linux.cz)
9 #include <asm/pgtable.h>
10 #include <asm/mmu_context.h>
13 #define FILL_VALID_SZ_BITS1(r1) \
15 #define FILL_VALID_SZ_BITS2(r1)
16 #define FILL_VALID_SZ_BITS_NOP nop
17 #elif PAGE_SHIFT == 16
18 #define FILL_VALID_SZ_BITS1(r1) \
20 #define FILL_VALID_SZ_BITS2(r1) \
22 #define FILL_VALID_SZ_BITS_NOP
24 #error unsupported PAGE_SIZE
25 #endif /* PAGE_SHIFT */
27 #define VPTE_BITS (_PAGE_CP | _PAGE_CV | _PAGE_P )
28 #define VPTE_SHIFT (PAGE_SHIFT - 3)
29 #define TLB_PMD_SHIFT (PAGE_SHIFT - 3 + 3)
30 #define TLB_PGD_SHIFT (PMD_BITS + PAGE_SHIFT - 3 + 3)
31 #define TLB_PMD_MASK (((1 << PMD_BITS) - 1) << 1)
32 #define TLB_PGD_MASK (((1 << (VA_BITS - PAGE_SHIFT - (PAGE_SHIFT - 3) - PMD_BITS)) - 1) << 2)
34 /* Ways we can get here:
36 * 1) Nucleus loads and stores to/from PA-->VA direct mappings at tl>1.
37 * 2) Nucleus loads and stores to/from user/kernel window save areas.
38 * 3) VPTE misses from dtlb_base and itlb_base.
41 /* TLB1 ** ICACHE line 1: tl1 DTLB and quick VPTE miss */
42 ldxa [%g1 + %g1] ASI_DMMU, %g4 ! Get TAG_ACCESS
43 add %g3, %g3, %g5 ! Compute VPTE base
44 cmp %g4, %g5 ! VPTE miss?
45 bgeu,pt %xcc, 1f ! Continue here
46 andcc %g4, TAG_CONTEXT_BITS, %g5 ! From Nucleus? (for tl0 miss)
47 ba,pt %xcc, from_tl1_trap ! Fall to tl0 miss
48 rdpr %tl, %g5 ! For tl0 miss TL==3 test
49 1: sllx %g6, VPTE_SHIFT, %g4 ! Position TAG_ACCESS
51 /* TLB1 ** ICACHE line 2: Quick VPTE miss */
52 or %g4, %g5, %g4 ! Prepare TAG_ACCESS
53 mov TSB_REG, %g1 ! Grab TSB reg
54 ldxa [%g1] ASI_DMMU, %g5 ! Doing PGD caching?
55 srlx %g6, (TLB_PMD_SHIFT - 1), %g1 ! Position PMD offset
56 be,pn %xcc, sparc64_vpte_nucleus ! Is it from Nucleus?
57 and %g1, TLB_PMD_MASK, %g1 ! Mask PMD offset bits
58 brnz,pt %g5, sparc64_vpte_continue ! Yep, go like smoke
59 add %g1, %g1, %g1 ! Position PMD offset some more
61 /* TLB1 ** ICACHE line 3: Quick VPTE miss */
62 srlx %g6, (TLB_PGD_SHIFT - 2), %g5 ! Position PGD offset
63 and %g5, TLB_PGD_MASK, %g5 ! Mask PGD offset
64 lduwa [%g7 + %g5] ASI_PHYS_USE_EC, %g5! Load PGD
65 brz,pn %g5, vpte_noent ! Valid?
66 sparc64_kpte_continue:
67 sllx %g5, 11, %g5 ! Shift into place
68 sparc64_vpte_continue:
69 lduwa [%g5 + %g1] ASI_PHYS_USE_EC, %g5! Load PMD
70 sllx %g5, 11, %g5 ! Shift into place
71 brz,pn %g5, vpte_noent ! Valid?
73 /* TLB1 ** ICACHE line 4: Quick VPTE miss */
74 FILL_VALID_SZ_BITS1(%g1) ! Put _PAGE_VALID into %g1
75 FILL_VALID_SZ_BITS2(%g1) ! Put _PAGE_VALID into %g1
76 or %g5, VPTE_BITS, %g5 ! Prepare VPTE data
77 or %g5, %g1, %g5 ! ...
78 mov TLB_SFSR, %g1 ! Restore %g1 value
79 stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Load VPTE into TLB
80 stxa %g4, [%g1 + %g1] ASI_DMMU ! Restore previous TAG_ACCESS
81 retry ! Load PTE once again
82 FILL_VALID_SZ_BITS_NOP
90 #undef FILL_VALID_SZ_BITS1
91 #undef FILL_VALID_SZ_BITS2
92 #undef FILL_VALID_SZ_BITS_NOP