Add qemu 2.4.0
[kvmfornfv.git] / qemu / roms / openbios / arch / sparc64 / spitfire.h
1 /* $Id: spitfire.h,v 1.18 2001/11/29 16:42:10 kanoj Exp $
2  * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
3  *
4  * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5  */
6
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
9
10 #include <asm/asi.h>
11
12 /* The following register addresses are accessible via ASI_DMMU
13  * and ASI_IMMU, that is there is a distinct and unique copy of
14  * each these registers for each TLB.
15  */
16 #define TSB_TAG_TARGET          0x0000000000000000 /* All chips                         */
17 #define TLB_SFSR                0x0000000000000018 /* All chips                         */
18 #define TSB_REG                 0x0000000000000028 /* All chips                         */
19 #define TLB_TAG_ACCESS          0x0000000000000030 /* All chips                         */
20 #define VIRT_WATCHPOINT         0x0000000000000038 /* All chips                         */
21 #define PHYS_WATCHPOINT         0x0000000000000040 /* All chips                         */
22 #define TSB_EXTENSION_P         0x0000000000000048 /* Ultra-III and later               */
23 #define TSB_EXTENSION_S         0x0000000000000050 /* Ultra-III and later, D-TLB only   */
24 #define TSB_EXTENSION_N         0x0000000000000058 /* Ultra-III and later               */
25 #define TLB_TAG_ACCESS_EXT      0x0000000000000060 /* Ultra-III+ and later              */
26
27 /* These registers only exist as one entity, and are accessed
28  * via ASI_DMMU only.
29  */
30 #define PRIMARY_CONTEXT         0x0000000000000008
31 #define SECONDARY_CONTEXT       0x0000000000000010
32 #define DMMU_SFAR               0x0000000000000020
33 #define VIRT_WATCHPOINT         0x0000000000000038
34 #define PHYS_WATCHPOINT         0x0000000000000040
35
36 #define SPITFIRE_HIGHEST_LOCKED_TLBENT  (64 - 1)
37
38 /* translation table entry bits */
39 #define SPITFIRE_TTE_WRITABLE   0x02
40 #define SPITFIRE_TTE_PRIVILEGED 0x04
41 #define SPITFIRE_TTE_CV         0x10
42 #define SPITFIRE_TTE_CP         0x20
43 #define SPITFIRE_TTE_LOCKED     0x40
44 #define SPITFIRE_TTE_VALID      0x8000000000000000ULL
45
46 #ifndef __ASSEMBLY__
47
48 enum ultra_tlb_layout {
49         spitfire = 0,
50         cheetah = 1,
51         cheetah_plus = 2,
52 };
53
54 extern enum ultra_tlb_layout tlb_type;
55
56 #define CHEETAH_HIGHEST_LOCKED_TLBENT   (16 - 1)
57
58 #define L1DCACHE_SIZE           0x4000
59
60 #define sparc64_highest_locked_tlbent() \
61         (tlb_type == spitfire ? \
62          SPITFIRE_HIGHEST_LOCKED_TLBENT : \
63          CHEETAH_HIGHEST_LOCKED_TLBENT)
64
65 static __inline__ unsigned long spitfire_get_isfsr(void)
66 {
67         unsigned long ret;
68
69         __asm__ __volatile__("ldxa      [%1] %2, %0"
70                              : "=r" (ret)
71                              : "r" (TLB_SFSR), "i" (ASI_IMMU));
72         return ret;
73 }
74
75 static __inline__ unsigned long spitfire_get_dsfsr(void)
76 {
77         unsigned long ret;
78
79         __asm__ __volatile__("ldxa      [%1] %2, %0"
80                              : "=r" (ret)
81                              : "r" (TLB_SFSR), "i" (ASI_DMMU));
82         return ret;
83 }
84
85 static __inline__ unsigned long spitfire_get_sfar(void)
86 {
87         unsigned long ret;
88
89         __asm__ __volatile__("ldxa      [%1] %2, %0"
90                              : "=r" (ret)
91                              : "r" (DMMU_SFAR), "i" (ASI_DMMU));
92         return ret;
93 }
94
95 static __inline__ void spitfire_put_isfsr(unsigned long sfsr)
96 {
97         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
98                              "membar    #Sync"
99                              : /* no outputs */
100                              : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_IMMU));
101 }
102
103 static __inline__ void spitfire_put_dsfsr(unsigned long sfsr)
104 {
105         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
106                              "membar    #Sync"
107                              : /* no outputs */
108                              : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_DMMU));
109 }
110
111 static __inline__ unsigned long spitfire_get_primary_context(void)
112 {
113         unsigned long ctx;
114
115         __asm__ __volatile__("ldxa      [%1] %2, %0"
116                              : "=r" (ctx)
117                              : "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
118         return ctx;
119 }
120
121 static __inline__ void spitfire_set_primary_context(unsigned long ctx)
122 {
123         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
124                              "membar    #Sync"
125                              : /* No outputs */
126                              : "r" (ctx & 0x3ff),
127                                "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
128         __asm__ __volatile__ ("membar #Sync" : : : "memory");
129 }
130
131 static __inline__ unsigned long spitfire_get_secondary_context(void)
132 {
133         unsigned long ctx;
134
135         __asm__ __volatile__("ldxa      [%1] %2, %0"
136                              : "=r" (ctx)
137                              : "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
138         return ctx;
139 }
140
141 static __inline__ void spitfire_set_secondary_context(unsigned long ctx)
142 {
143         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
144                              "membar    #Sync"
145                              : /* No outputs */
146                              : "r" (ctx & 0x3ff),
147                                "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
148         __asm__ __volatile__ ("membar #Sync" : : : "memory");
149 }
150
151 /* The data cache is write through, so this just invalidates the
152  * specified line.
153  */
154 static __inline__ void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
155 {
156         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
157                              "membar    #Sync"
158                              : /* No outputs */
159                              : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
160         __asm__ __volatile__ ("membar #Sync" : : : "memory");
161 }
162
163 /* The instruction cache lines are flushed with this, but note that
164  * this does not flush the pipeline.  It is possible for a line to
165  * get flushed but stale instructions to still be in the pipeline,
166  * a flush instruction (to any address) is sufficient to handle
167  * this issue after the line is invalidated.
168  */
169 static __inline__ void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
170 {
171         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
172                              "membar    #Sync"
173                              : /* No outputs */
174                              : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
175 }
176
177 static __inline__ unsigned long spitfire_get_dtlb_data(int entry)
178 {
179         unsigned long data;
180
181         __asm__ __volatile__("ldxa      [%1] %2, %0"
182                              : "=r" (data)
183                              : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
184
185         /* Clear TTE diag bits. */
186         data &= ~0x0003fe0000000000UL;
187
188         return data;
189 }
190
191 static __inline__ unsigned long spitfire_get_dtlb_tag(int entry)
192 {
193         unsigned long tag;
194
195         __asm__ __volatile__("ldxa      [%1] %2, %0"
196                              : "=r" (tag)
197                              : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
198         return tag;
199 }
200
201 static __inline__ void spitfire_put_dtlb_data(int entry, unsigned long data)
202 {
203         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
204                              "membar    #Sync"
205                              : /* No outputs */
206                              : "r" (data), "r" (entry << 3),
207                                "i" (ASI_DTLB_DATA_ACCESS));
208 }
209
210 static __inline__ unsigned long spitfire_get_itlb_data(int entry)
211 {
212         unsigned long data;
213
214         __asm__ __volatile__("ldxa      [%1] %2, %0"
215                              : "=r" (data)
216                              : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
217
218         /* Clear TTE diag bits. */
219         data &= ~0x0003fe0000000000UL;
220
221         return data;
222 }
223
224 static __inline__ unsigned long spitfire_get_itlb_tag(int entry)
225 {
226         unsigned long tag;
227
228         __asm__ __volatile__("ldxa      [%1] %2, %0"
229                              : "=r" (tag)
230                              : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
231         return tag;
232 }
233
234 static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
235 {
236         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
237                              "membar    #Sync"
238                              : /* No outputs */
239                              : "r" (data), "r" (entry << 3),
240                                "i" (ASI_ITLB_DATA_ACCESS));
241 }
242
243 /* Spitfire hardware assisted TLB flushes. */
244
245 /* Context level flushes. */
246 static __inline__ void spitfire_flush_dtlb_primary_context(void)
247 {
248         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
249                              "membar    #Sync"
250                              : /* No outputs */
251                              : "r" (0x40), "i" (ASI_DMMU_DEMAP));
252 }
253
254 static __inline__ void spitfire_flush_itlb_primary_context(void)
255 {
256         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
257                              "membar    #Sync"
258                              : /* No outputs */
259                              : "r" (0x40), "i" (ASI_IMMU_DEMAP));
260 }
261
262 static __inline__ void spitfire_flush_dtlb_secondary_context(void)
263 {
264         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
265                              "membar    #Sync"
266                              : /* No outputs */
267                              : "r" (0x50), "i" (ASI_DMMU_DEMAP));
268 }
269
270 static __inline__ void spitfire_flush_itlb_secondary_context(void)
271 {
272         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
273                              "membar    #Sync"
274                              : /* No outputs */
275                              : "r" (0x50), "i" (ASI_IMMU_DEMAP));
276 }
277
278 static __inline__ void spitfire_flush_dtlb_nucleus_context(void)
279 {
280         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
281                              "membar    #Sync"
282                              : /* No outputs */
283                              : "r" (0x60), "i" (ASI_DMMU_DEMAP));
284 }
285
286 static __inline__ void spitfire_flush_itlb_nucleus_context(void)
287 {
288         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
289                              "membar    #Sync"
290                              : /* No outputs */
291                              : "r" (0x60), "i" (ASI_IMMU_DEMAP));
292 }
293
294 /* Page level flushes. */
295 static __inline__ void spitfire_flush_dtlb_primary_page(unsigned long page)
296 {
297         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
298                              "membar    #Sync"
299                              : /* No outputs */
300                              : "r" (page), "i" (ASI_DMMU_DEMAP));
301 }
302
303 static __inline__ void spitfire_flush_itlb_primary_page(unsigned long page)
304 {
305         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
306                              "membar    #Sync"
307                              : /* No outputs */
308                              : "r" (page), "i" (ASI_IMMU_DEMAP));
309 }
310
311 static __inline__ void spitfire_flush_dtlb_secondary_page(unsigned long page)
312 {
313         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
314                              "membar    #Sync"
315                              : /* No outputs */
316                              : "r" (page | 0x10), "i" (ASI_DMMU_DEMAP));
317 }
318
319 static __inline__ void spitfire_flush_itlb_secondary_page(unsigned long page)
320 {
321         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
322                              "membar    #Sync"
323                              : /* No outputs */
324                              : "r" (page | 0x10), "i" (ASI_IMMU_DEMAP));
325 }
326
327 static __inline__ void spitfire_flush_dtlb_nucleus_page(unsigned long page)
328 {
329         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
330                              "membar    #Sync"
331                              : /* No outputs */
332                              : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
333 }
334
335 static __inline__ void spitfire_flush_itlb_nucleus_page(unsigned long page)
336 {
337         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
338                              "membar    #Sync"
339                              : /* No outputs */
340                              : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
341 }
342
343 /* Cheetah has "all non-locked" tlb flushes. */
344 static __inline__ void cheetah_flush_dtlb_all(void)
345 {
346         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
347                              "membar    #Sync"
348                              : /* No outputs */
349                              : "r" (0x80), "i" (ASI_DMMU_DEMAP));
350 }
351
352 static __inline__ void cheetah_flush_itlb_all(void)
353 {
354         __asm__ __volatile__("stxa      %%g0, [%0] %1\n\t"
355                              "membar    #Sync"
356                              : /* No outputs */
357                              : "r" (0x80), "i" (ASI_IMMU_DEMAP));
358 }
359
360 /* Cheetah has a 4-tlb layout so direct access is a bit different.
361  * The first two TLBs are fully assosciative, hold 16 entries, and are
362  * used only for locked and >8K sized translations.  One exists for
363  * data accesses and one for instruction accesses.
364  *
365  * The third TLB is for data accesses to 8K non-locked translations, is
366  * 2 way assosciative, and holds 512 entries.  The fourth TLB is for
367  * instruction accesses to 8K non-locked translations, is 2 way
368  * assosciative, and holds 128 entries.
369  *
370  * Cheetah has some bug where bogus data can be returned from
371  * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
372  * the problem for me. -DaveM
373  */
374 static __inline__ unsigned long cheetah_get_ldtlb_data(int entry)
375 {
376         unsigned long data;
377
378         __asm__ __volatile__("ldxa      [%1] %2, %%g0\n\t"
379                              "ldxa      [%1] %2, %0"
380                              : "=r" (data)
381                              : "r" ((0 << 16) | (entry << 3)),
382                              "i" (ASI_DTLB_DATA_ACCESS));
383
384         return data;
385 }
386
387 static __inline__ unsigned long cheetah_get_litlb_data(int entry)
388 {
389         unsigned long data;
390
391         __asm__ __volatile__("ldxa      [%1] %2, %%g0\n\t"
392                              "ldxa      [%1] %2, %0"
393                              : "=r" (data)
394                              : "r" ((0 << 16) | (entry << 3)),
395                              "i" (ASI_ITLB_DATA_ACCESS));
396
397         return data;
398 }
399
400 static __inline__ unsigned long cheetah_get_ldtlb_tag(int entry)
401 {
402         unsigned long tag;
403
404         __asm__ __volatile__("ldxa      [%1] %2, %0"
405                              : "=r" (tag)
406                              : "r" ((0 << 16) | (entry << 3)),
407                              "i" (ASI_DTLB_TAG_READ));
408
409         return tag;
410 }
411
412 static __inline__ unsigned long cheetah_get_litlb_tag(int entry)
413 {
414         unsigned long tag;
415
416         __asm__ __volatile__("ldxa      [%1] %2, %0"
417                              : "=r" (tag)
418                              : "r" ((0 << 16) | (entry << 3)),
419                              "i" (ASI_ITLB_TAG_READ));
420
421         return tag;
422 }
423
424 static __inline__ void cheetah_put_ldtlb_data(int entry, unsigned long data)
425 {
426         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
427                              "membar    #Sync"
428                              : /* No outputs */
429                              : "r" (data),
430                                "r" ((0 << 16) | (entry << 3)),
431                                "i" (ASI_DTLB_DATA_ACCESS));
432 }
433
434 static __inline__ void cheetah_put_litlb_data(int entry, unsigned long data)
435 {
436         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
437                              "membar    #Sync"
438                              : /* No outputs */
439                              : "r" (data),
440                                "r" ((0 << 16) | (entry << 3)),
441                                "i" (ASI_ITLB_DATA_ACCESS));
442 }
443
444 static __inline__ unsigned long cheetah_get_dtlb_data(int entry, int tlb)
445 {
446         unsigned long data;
447
448         __asm__ __volatile__("ldxa      [%1] %2, %%g0\n\t"
449                              "ldxa      [%1] %2, %0"
450                              : "=r" (data)
451                              : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
452
453         return data;
454 }
455
456 static __inline__ unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
457 {
458         unsigned long tag;
459
460         __asm__ __volatile__("ldxa      [%1] %2, %0"
461                              : "=r" (tag)
462                              : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
463         return tag;
464 }
465
466 static __inline__ void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
467 {
468         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
469                              "membar    #Sync"
470                              : /* No outputs */
471                              : "r" (data),
472                                "r" ((tlb << 16) | (entry << 3)),
473                                "i" (ASI_DTLB_DATA_ACCESS));
474 }
475
476 static __inline__ unsigned long cheetah_get_itlb_data(int entry)
477 {
478         unsigned long data;
479
480         __asm__ __volatile__("ldxa      [%1] %2, %%g0\n\t"
481                              "ldxa      [%1] %2, %0"
482                              : "=r" (data)
483                              : "r" ((2 << 16) | (entry << 3)),
484                                "i" (ASI_ITLB_DATA_ACCESS));
485
486         return data;
487 }
488
489 static __inline__ unsigned long cheetah_get_itlb_tag(int entry)
490 {
491         unsigned long tag;
492
493         __asm__ __volatile__("ldxa      [%1] %2, %0"
494                              : "=r" (tag)
495                              : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
496         return tag;
497 }
498
499 static __inline__ void cheetah_put_itlb_data(int entry, unsigned long data)
500 {
501         __asm__ __volatile__("stxa      %0, [%1] %2\n\t"
502                              "membar    #Sync"
503                              : /* No outputs */
504                              : "r" (data), "r" ((2 << 16) | (entry << 3)),
505                                "i" (ASI_ITLB_DATA_ACCESS));
506 }
507
508 #endif /* !(__ASSEMBLY__) */
509
510 #endif /* !(_SPARC64_SPITFIRE_H) */