Merge tag 'v3.3.7' into 3.3/master
[zen-stable.git] / arch / sparc / include / asm / spitfire.h
blobd06a266017534f406b41d0488e0c00eb296d0156
1 /* spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
3 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
4 */
6 #ifndef _SPARC64_SPITFIRE_H
7 #define _SPARC64_SPITFIRE_H
9 #ifdef CONFIG_SPARC64
11 #include <asm/asi.h>
13 /* The following register addresses are accessible via ASI_DMMU
14 * and ASI_IMMU, that is there is a distinct and unique copy of
15 * each these registers for each TLB.
17 #define TSB_TAG_TARGET 0x0000000000000000 /* All chips */
18 #define TLB_SFSR 0x0000000000000018 /* All chips */
19 #define TSB_REG 0x0000000000000028 /* All chips */
20 #define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */
21 #define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */
22 #define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */
23 #define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */
24 #define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */
25 #define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */
26 #define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */
28 /* These registers only exist as one entity, and are accessed
29 * via ASI_DMMU only.
31 #define PRIMARY_CONTEXT 0x0000000000000008
32 #define SECONDARY_CONTEXT 0x0000000000000010
33 #define DMMU_SFAR 0x0000000000000020
34 #define VIRT_WATCHPOINT 0x0000000000000038
35 #define PHYS_WATCHPOINT 0x0000000000000040
37 #define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1)
38 #define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1)
40 #define L1DCACHE_SIZE 0x4000
42 #define SUN4V_CHIP_INVALID 0x00
43 #define SUN4V_CHIP_NIAGARA1 0x01
44 #define SUN4V_CHIP_NIAGARA2 0x02
45 #define SUN4V_CHIP_NIAGARA3 0x03
46 #define SUN4V_CHIP_NIAGARA4 0x04
47 #define SUN4V_CHIP_NIAGARA5 0x05
48 #define SUN4V_CHIP_UNKNOWN 0xff
50 #ifndef __ASSEMBLY__
52 enum ultra_tlb_layout {
53 spitfire = 0,
54 cheetah = 1,
55 cheetah_plus = 2,
56 hypervisor = 3,
59 extern enum ultra_tlb_layout tlb_type;
61 extern int sun4v_chip_type;
63 extern int cheetah_pcache_forced_on;
64 extern void cheetah_enable_pcache(void);
66 #define sparc64_highest_locked_tlbent() \
67 (tlb_type == spitfire ? \
68 SPITFIRE_HIGHEST_LOCKED_TLBENT : \
69 CHEETAH_HIGHEST_LOCKED_TLBENT)
71 extern int num_kernel_image_mappings;
73 /* The data cache is write through, so this just invalidates the
74 * specified line.
76 static inline void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
78 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
79 "membar #Sync"
80 : /* No outputs */
81 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
84 /* The instruction cache lines are flushed with this, but note that
85 * this does not flush the pipeline. It is possible for a line to
86 * get flushed but stale instructions to still be in the pipeline,
87 * a flush instruction (to any address) is sufficient to handle
88 * this issue after the line is invalidated.
90 static inline void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
92 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
93 "membar #Sync"
94 : /* No outputs */
95 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
98 static inline unsigned long spitfire_get_dtlb_data(int entry)
100 unsigned long data;
102 __asm__ __volatile__("ldxa [%1] %2, %0"
103 : "=r" (data)
104 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
106 /* Clear TTE diag bits. */
107 data &= ~0x0003fe0000000000UL;
109 return data;
112 static inline unsigned long spitfire_get_dtlb_tag(int entry)
114 unsigned long tag;
116 __asm__ __volatile__("ldxa [%1] %2, %0"
117 : "=r" (tag)
118 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
119 return tag;
122 static inline void spitfire_put_dtlb_data(int entry, unsigned long data)
124 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
125 "membar #Sync"
126 : /* No outputs */
127 : "r" (data), "r" (entry << 3),
128 "i" (ASI_DTLB_DATA_ACCESS));
131 static inline unsigned long spitfire_get_itlb_data(int entry)
133 unsigned long data;
135 __asm__ __volatile__("ldxa [%1] %2, %0"
136 : "=r" (data)
137 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
139 /* Clear TTE diag bits. */
140 data &= ~0x0003fe0000000000UL;
142 return data;
145 static inline unsigned long spitfire_get_itlb_tag(int entry)
147 unsigned long tag;
149 __asm__ __volatile__("ldxa [%1] %2, %0"
150 : "=r" (tag)
151 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
152 return tag;
155 static inline void spitfire_put_itlb_data(int entry, unsigned long data)
157 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
158 "membar #Sync"
159 : /* No outputs */
160 : "r" (data), "r" (entry << 3),
161 "i" (ASI_ITLB_DATA_ACCESS));
164 static inline void spitfire_flush_dtlb_nucleus_page(unsigned long page)
166 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
167 "membar #Sync"
168 : /* No outputs */
169 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
172 static inline void spitfire_flush_itlb_nucleus_page(unsigned long page)
174 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
175 "membar #Sync"
176 : /* No outputs */
177 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
180 /* Cheetah has "all non-locked" tlb flushes. */
181 static inline void cheetah_flush_dtlb_all(void)
183 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
184 "membar #Sync"
185 : /* No outputs */
186 : "r" (0x80), "i" (ASI_DMMU_DEMAP));
189 static inline void cheetah_flush_itlb_all(void)
191 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
192 "membar #Sync"
193 : /* No outputs */
194 : "r" (0x80), "i" (ASI_IMMU_DEMAP));
197 /* Cheetah has a 4-tlb layout so direct access is a bit different.
198 * The first two TLBs are fully assosciative, hold 16 entries, and are
199 * used only for locked and >8K sized translations. One exists for
200 * data accesses and one for instruction accesses.
202 * The third TLB is for data accesses to 8K non-locked translations, is
203 * 2 way assosciative, and holds 512 entries. The fourth TLB is for
204 * instruction accesses to 8K non-locked translations, is 2 way
205 * assosciative, and holds 128 entries.
207 * Cheetah has some bug where bogus data can be returned from
208 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
209 * the problem for me. -DaveM
211 static inline unsigned long cheetah_get_ldtlb_data(int entry)
213 unsigned long data;
215 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
216 "ldxa [%1] %2, %0"
217 : "=r" (data)
218 : "r" ((0 << 16) | (entry << 3)),
219 "i" (ASI_DTLB_DATA_ACCESS));
221 return data;
224 static inline unsigned long cheetah_get_litlb_data(int entry)
226 unsigned long data;
228 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
229 "ldxa [%1] %2, %0"
230 : "=r" (data)
231 : "r" ((0 << 16) | (entry << 3)),
232 "i" (ASI_ITLB_DATA_ACCESS));
234 return data;
237 static inline unsigned long cheetah_get_ldtlb_tag(int entry)
239 unsigned long tag;
241 __asm__ __volatile__("ldxa [%1] %2, %0"
242 : "=r" (tag)
243 : "r" ((0 << 16) | (entry << 3)),
244 "i" (ASI_DTLB_TAG_READ));
246 return tag;
249 static inline unsigned long cheetah_get_litlb_tag(int entry)
251 unsigned long tag;
253 __asm__ __volatile__("ldxa [%1] %2, %0"
254 : "=r" (tag)
255 : "r" ((0 << 16) | (entry << 3)),
256 "i" (ASI_ITLB_TAG_READ));
258 return tag;
261 static inline void cheetah_put_ldtlb_data(int entry, unsigned long data)
263 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
264 "membar #Sync"
265 : /* No outputs */
266 : "r" (data),
267 "r" ((0 << 16) | (entry << 3)),
268 "i" (ASI_DTLB_DATA_ACCESS));
271 static inline void cheetah_put_litlb_data(int entry, unsigned long data)
273 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
274 "membar #Sync"
275 : /* No outputs */
276 : "r" (data),
277 "r" ((0 << 16) | (entry << 3)),
278 "i" (ASI_ITLB_DATA_ACCESS));
281 static inline unsigned long cheetah_get_dtlb_data(int entry, int tlb)
283 unsigned long data;
285 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
286 "ldxa [%1] %2, %0"
287 : "=r" (data)
288 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
290 return data;
293 static inline unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
295 unsigned long tag;
297 __asm__ __volatile__("ldxa [%1] %2, %0"
298 : "=r" (tag)
299 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
300 return tag;
303 static inline void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
305 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
306 "membar #Sync"
307 : /* No outputs */
308 : "r" (data),
309 "r" ((tlb << 16) | (entry << 3)),
310 "i" (ASI_DTLB_DATA_ACCESS));
313 static inline unsigned long cheetah_get_itlb_data(int entry)
315 unsigned long data;
317 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
318 "ldxa [%1] %2, %0"
319 : "=r" (data)
320 : "r" ((2 << 16) | (entry << 3)),
321 "i" (ASI_ITLB_DATA_ACCESS));
323 return data;
326 static inline unsigned long cheetah_get_itlb_tag(int entry)
328 unsigned long tag;
330 __asm__ __volatile__("ldxa [%1] %2, %0"
331 : "=r" (tag)
332 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
333 return tag;
336 static inline void cheetah_put_itlb_data(int entry, unsigned long data)
338 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
339 "membar #Sync"
340 : /* No outputs */
341 : "r" (data), "r" ((2 << 16) | (entry << 3)),
342 "i" (ASI_ITLB_DATA_ACCESS));
345 #endif /* !(__ASSEMBLY__) */
346 #endif /* CONFIG_SPARC64 */
347 #endif /* !(_SPARC64_SPITFIRE_H) */