1 /* $Id: spitfire.h,v 1.18 2001/11/29 16:42:10 kanoj Exp $
2 * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
12 /* The following register addresses are accessible via ASI_DMMU
13 * and ASI_IMMU, that is there is a distinct and unique copy of
14 * each these registers for each TLB.
16 #define TSB_TAG_TARGET 0x0000000000000000 /* All chips */
17 #define TLB_SFSR 0x0000000000000018 /* All chips */
18 #define TSB_REG 0x0000000000000028 /* All chips */
19 #define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */
20 #define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */
21 #define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */
22 #define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */
23 #define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */
24 #define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */
25 #define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */
27 /* These registers only exist as one entity, and are accessed
30 #define PRIMARY_CONTEXT 0x0000000000000008
31 #define SECONDARY_CONTEXT 0x0000000000000010
32 #define DMMU_SFAR 0x0000000000000020
33 #define VIRT_WATCHPOINT 0x0000000000000038
34 #define PHYS_WATCHPOINT 0x0000000000000040
36 #define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1)
40 enum ultra_tlb_layout {
46 extern enum ultra_tlb_layout tlb_type;
48 #define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1)
50 #define L1DCACHE_SIZE 0x4000
52 #define sparc64_highest_locked_tlbent() \
53 (tlb_type == spitfire ? \
54 SPITFIRE_HIGHEST_LOCKED_TLBENT : \
55 CHEETAH_HIGHEST_LOCKED_TLBENT)
57 static __inline__ unsigned long spitfire_get_isfsr(void)
61 __asm__ __volatile__("ldxa [%1] %2, %0"
63 : "r" (TLB_SFSR), "i" (ASI_IMMU));
67 static __inline__ unsigned long spitfire_get_dsfsr(void)
71 __asm__ __volatile__("ldxa [%1] %2, %0"
73 : "r" (TLB_SFSR), "i" (ASI_DMMU));
77 static __inline__ unsigned long spitfire_get_sfar(void)
81 __asm__ __volatile__("ldxa [%1] %2, %0"
83 : "r" (DMMU_SFAR), "i" (ASI_DMMU));
87 static __inline__ void spitfire_put_isfsr(unsigned long sfsr)
89 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
92 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_IMMU));
95 static __inline__ void spitfire_put_dsfsr(unsigned long sfsr)
97 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
100 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_DMMU));
103 static __inline__ unsigned long spitfire_get_primary_context(void)
107 __asm__ __volatile__("ldxa [%1] %2, %0"
109 : "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
113 static __inline__ void spitfire_set_primary_context(unsigned long ctx)
115 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
119 "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
120 __asm__ __volatile__ ("membar #Sync" : : : "memory");
123 static __inline__ unsigned long spitfire_get_secondary_context(void)
127 __asm__ __volatile__("ldxa [%1] %2, %0"
129 : "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
133 static __inline__ void spitfire_set_secondary_context(unsigned long ctx)
135 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
139 "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
140 __asm__ __volatile__ ("membar #Sync" : : : "memory");
143 /* The data cache is write through, so this just invalidates the
146 static __inline__ void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
148 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
151 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
152 __asm__ __volatile__ ("membar #Sync" : : : "memory");
155 /* The instruction cache lines are flushed with this, but note that
156 * this does not flush the pipeline. It is possible for a line to
157 * get flushed but stale instructions to still be in the pipeline,
158 * a flush instruction (to any address) is sufficient to handle
159 * this issue after the line is invalidated.
161 static __inline__ void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
163 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
166 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
169 static __inline__ unsigned long spitfire_get_dtlb_data(int entry)
173 __asm__ __volatile__("ldxa [%1] %2, %0"
175 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
177 /* Clear TTE diag bits. */
178 data &= ~0x0003fe0000000000UL;
183 static __inline__ unsigned long spitfire_get_dtlb_tag(int entry)
187 __asm__ __volatile__("ldxa [%1] %2, %0"
189 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
193 static __inline__ void spitfire_put_dtlb_data(int entry, unsigned long data)
195 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
198 : "r" (data), "r" (entry << 3),
199 "i" (ASI_DTLB_DATA_ACCESS));
202 static __inline__ unsigned long spitfire_get_itlb_data(int entry)
206 __asm__ __volatile__("ldxa [%1] %2, %0"
208 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
210 /* Clear TTE diag bits. */
211 data &= ~0x0003fe0000000000UL;
216 static __inline__ unsigned long spitfire_get_itlb_tag(int entry)
220 __asm__ __volatile__("ldxa [%1] %2, %0"
222 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
226 static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
228 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
231 : "r" (data), "r" (entry << 3),
232 "i" (ASI_ITLB_DATA_ACCESS));
235 /* Spitfire hardware assisted TLB flushes. */
237 /* Context level flushes. */
238 static __inline__ void spitfire_flush_dtlb_primary_context(void)
240 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
243 : "r" (0x40), "i" (ASI_DMMU_DEMAP));
246 static __inline__ void spitfire_flush_itlb_primary_context(void)
248 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
251 : "r" (0x40), "i" (ASI_IMMU_DEMAP));
254 static __inline__ void spitfire_flush_dtlb_secondary_context(void)
256 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
259 : "r" (0x50), "i" (ASI_DMMU_DEMAP));
262 static __inline__ void spitfire_flush_itlb_secondary_context(void)
264 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
267 : "r" (0x50), "i" (ASI_IMMU_DEMAP));
270 static __inline__ void spitfire_flush_dtlb_nucleus_context(void)
272 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
275 : "r" (0x60), "i" (ASI_DMMU_DEMAP));
278 static __inline__ void spitfire_flush_itlb_nucleus_context(void)
280 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
283 : "r" (0x60), "i" (ASI_IMMU_DEMAP));
286 /* Page level flushes. */
287 static __inline__ void spitfire_flush_dtlb_primary_page(unsigned long page)
289 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
292 : "r" (page), "i" (ASI_DMMU_DEMAP));
295 static __inline__ void spitfire_flush_itlb_primary_page(unsigned long page)
297 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
300 : "r" (page), "i" (ASI_IMMU_DEMAP));
303 static __inline__ void spitfire_flush_dtlb_secondary_page(unsigned long page)
305 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
308 : "r" (page | 0x10), "i" (ASI_DMMU_DEMAP));
311 static __inline__ void spitfire_flush_itlb_secondary_page(unsigned long page)
313 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
316 : "r" (page | 0x10), "i" (ASI_IMMU_DEMAP));
319 static __inline__ void spitfire_flush_dtlb_nucleus_page(unsigned long page)
321 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
324 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
327 static __inline__ void spitfire_flush_itlb_nucleus_page(unsigned long page)
329 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
332 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
335 /* Cheetah has "all non-locked" tlb flushes. */
336 static __inline__ void cheetah_flush_dtlb_all(void)
338 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
341 : "r" (0x80), "i" (ASI_DMMU_DEMAP));
344 static __inline__ void cheetah_flush_itlb_all(void)
346 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
349 : "r" (0x80), "i" (ASI_IMMU_DEMAP));
352 /* Cheetah has a 4-tlb layout so direct access is a bit different.
353 * The first two TLBs are fully assosciative, hold 16 entries, and are
354 * used only for locked and >8K sized translations. One exists for
355 * data accesses and one for instruction accesses.
357 * The third TLB is for data accesses to 8K non-locked translations, is
358 * 2 way assosciative, and holds 512 entries. The fourth TLB is for
359 * instruction accesses to 8K non-locked translations, is 2 way
360 * assosciative, and holds 128 entries.
362 * Cheetah has some bug where bogus data can be returned from
363 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
364 * the problem for me. -DaveM
366 static __inline__ unsigned long cheetah_get_ldtlb_data(int entry)
370 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
373 : "r" ((0 << 16) | (entry << 3)),
374 "i" (ASI_DTLB_DATA_ACCESS));
379 static __inline__ unsigned long cheetah_get_litlb_data(int entry)
383 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
386 : "r" ((0 << 16) | (entry << 3)),
387 "i" (ASI_ITLB_DATA_ACCESS));
392 static __inline__ unsigned long cheetah_get_ldtlb_tag(int entry)
396 __asm__ __volatile__("ldxa [%1] %2, %0"
398 : "r" ((0 << 16) | (entry << 3)),
399 "i" (ASI_DTLB_TAG_READ));
404 static __inline__ unsigned long cheetah_get_litlb_tag(int entry)
408 __asm__ __volatile__("ldxa [%1] %2, %0"
410 : "r" ((0 << 16) | (entry << 3)),
411 "i" (ASI_ITLB_TAG_READ));
416 static __inline__ void cheetah_put_ldtlb_data(int entry, unsigned long data)
418 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
422 "r" ((0 << 16) | (entry << 3)),
423 "i" (ASI_DTLB_DATA_ACCESS));
426 static __inline__ void cheetah_put_litlb_data(int entry, unsigned long data)
428 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
432 "r" ((0 << 16) | (entry << 3)),
433 "i" (ASI_ITLB_DATA_ACCESS));
436 static __inline__ unsigned long cheetah_get_dtlb_data(int entry, int tlb)
440 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
443 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
448 static __inline__ unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
452 __asm__ __volatile__("ldxa [%1] %2, %0"
454 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
458 static __inline__ void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
460 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
464 "r" ((tlb << 16) | (entry << 3)),
465 "i" (ASI_DTLB_DATA_ACCESS));
468 static __inline__ unsigned long cheetah_get_itlb_data(int entry)
472 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
475 : "r" ((2 << 16) | (entry << 3)),
476 "i" (ASI_ITLB_DATA_ACCESS));
481 static __inline__ unsigned long cheetah_get_itlb_tag(int entry)
485 __asm__ __volatile__("ldxa [%1] %2, %0"
487 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
491 static __inline__ void cheetah_put_itlb_data(int entry, unsigned long data)
493 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
496 : "r" (data), "r" ((2 << 16) | (entry << 3)),
497 "i" (ASI_ITLB_DATA_ACCESS));
500 #endif /* !(__ASSEMBLY__) */
502 #endif /* !(_SPARC64_SPITFIRE_H) */