2614510c28d0da467600c8ceeff86d7218b39a32
[kvmfornfv.git] / kernel / drivers / gpu / drm / nouveau / nvkm / engine / gr / nv04.c
1 /*
2  * Copyright 2007 Stephane Marchesin
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragr) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24 #include <engine/gr.h>
25 #include "regs.h"
26
27 #include <core/client.h>
28 #include <core/device.h>
29 #include <core/handle.h>
30 #include <engine/fifo.h>
31 #include <subdev/instmem.h>
32 #include <subdev/timer.h>
33
34 static u32
35 nv04_gr_ctx_regs[] = {
36         0x0040053c,
37         0x00400544,
38         0x00400540,
39         0x00400548,
40         NV04_PGRAPH_CTX_SWITCH1,
41         NV04_PGRAPH_CTX_SWITCH2,
42         NV04_PGRAPH_CTX_SWITCH3,
43         NV04_PGRAPH_CTX_SWITCH4,
44         NV04_PGRAPH_CTX_CACHE1,
45         NV04_PGRAPH_CTX_CACHE2,
46         NV04_PGRAPH_CTX_CACHE3,
47         NV04_PGRAPH_CTX_CACHE4,
48         0x00400184,
49         0x004001a4,
50         0x004001c4,
51         0x004001e4,
52         0x00400188,
53         0x004001a8,
54         0x004001c8,
55         0x004001e8,
56         0x0040018c,
57         0x004001ac,
58         0x004001cc,
59         0x004001ec,
60         0x00400190,
61         0x004001b0,
62         0x004001d0,
63         0x004001f0,
64         0x00400194,
65         0x004001b4,
66         0x004001d4,
67         0x004001f4,
68         0x00400198,
69         0x004001b8,
70         0x004001d8,
71         0x004001f8,
72         0x0040019c,
73         0x004001bc,
74         0x004001dc,
75         0x004001fc,
76         0x00400174,
77         NV04_PGRAPH_DMA_START_0,
78         NV04_PGRAPH_DMA_START_1,
79         NV04_PGRAPH_DMA_LENGTH,
80         NV04_PGRAPH_DMA_MISC,
81         NV04_PGRAPH_DMA_PITCH,
82         NV04_PGRAPH_BOFFSET0,
83         NV04_PGRAPH_BBASE0,
84         NV04_PGRAPH_BLIMIT0,
85         NV04_PGRAPH_BOFFSET1,
86         NV04_PGRAPH_BBASE1,
87         NV04_PGRAPH_BLIMIT1,
88         NV04_PGRAPH_BOFFSET2,
89         NV04_PGRAPH_BBASE2,
90         NV04_PGRAPH_BLIMIT2,
91         NV04_PGRAPH_BOFFSET3,
92         NV04_PGRAPH_BBASE3,
93         NV04_PGRAPH_BLIMIT3,
94         NV04_PGRAPH_BOFFSET4,
95         NV04_PGRAPH_BBASE4,
96         NV04_PGRAPH_BLIMIT4,
97         NV04_PGRAPH_BOFFSET5,
98         NV04_PGRAPH_BBASE5,
99         NV04_PGRAPH_BLIMIT5,
100         NV04_PGRAPH_BPITCH0,
101         NV04_PGRAPH_BPITCH1,
102         NV04_PGRAPH_BPITCH2,
103         NV04_PGRAPH_BPITCH3,
104         NV04_PGRAPH_BPITCH4,
105         NV04_PGRAPH_SURFACE,
106         NV04_PGRAPH_STATE,
107         NV04_PGRAPH_BSWIZZLE2,
108         NV04_PGRAPH_BSWIZZLE5,
109         NV04_PGRAPH_BPIXEL,
110         NV04_PGRAPH_NOTIFY,
111         NV04_PGRAPH_PATT_COLOR0,
112         NV04_PGRAPH_PATT_COLOR1,
113         NV04_PGRAPH_PATT_COLORRAM+0x00,
114         NV04_PGRAPH_PATT_COLORRAM+0x04,
115         NV04_PGRAPH_PATT_COLORRAM+0x08,
116         NV04_PGRAPH_PATT_COLORRAM+0x0c,
117         NV04_PGRAPH_PATT_COLORRAM+0x10,
118         NV04_PGRAPH_PATT_COLORRAM+0x14,
119         NV04_PGRAPH_PATT_COLORRAM+0x18,
120         NV04_PGRAPH_PATT_COLORRAM+0x1c,
121         NV04_PGRAPH_PATT_COLORRAM+0x20,
122         NV04_PGRAPH_PATT_COLORRAM+0x24,
123         NV04_PGRAPH_PATT_COLORRAM+0x28,
124         NV04_PGRAPH_PATT_COLORRAM+0x2c,
125         NV04_PGRAPH_PATT_COLORRAM+0x30,
126         NV04_PGRAPH_PATT_COLORRAM+0x34,
127         NV04_PGRAPH_PATT_COLORRAM+0x38,
128         NV04_PGRAPH_PATT_COLORRAM+0x3c,
129         NV04_PGRAPH_PATT_COLORRAM+0x40,
130         NV04_PGRAPH_PATT_COLORRAM+0x44,
131         NV04_PGRAPH_PATT_COLORRAM+0x48,
132         NV04_PGRAPH_PATT_COLORRAM+0x4c,
133         NV04_PGRAPH_PATT_COLORRAM+0x50,
134         NV04_PGRAPH_PATT_COLORRAM+0x54,
135         NV04_PGRAPH_PATT_COLORRAM+0x58,
136         NV04_PGRAPH_PATT_COLORRAM+0x5c,
137         NV04_PGRAPH_PATT_COLORRAM+0x60,
138         NV04_PGRAPH_PATT_COLORRAM+0x64,
139         NV04_PGRAPH_PATT_COLORRAM+0x68,
140         NV04_PGRAPH_PATT_COLORRAM+0x6c,
141         NV04_PGRAPH_PATT_COLORRAM+0x70,
142         NV04_PGRAPH_PATT_COLORRAM+0x74,
143         NV04_PGRAPH_PATT_COLORRAM+0x78,
144         NV04_PGRAPH_PATT_COLORRAM+0x7c,
145         NV04_PGRAPH_PATT_COLORRAM+0x80,
146         NV04_PGRAPH_PATT_COLORRAM+0x84,
147         NV04_PGRAPH_PATT_COLORRAM+0x88,
148         NV04_PGRAPH_PATT_COLORRAM+0x8c,
149         NV04_PGRAPH_PATT_COLORRAM+0x90,
150         NV04_PGRAPH_PATT_COLORRAM+0x94,
151         NV04_PGRAPH_PATT_COLORRAM+0x98,
152         NV04_PGRAPH_PATT_COLORRAM+0x9c,
153         NV04_PGRAPH_PATT_COLORRAM+0xa0,
154         NV04_PGRAPH_PATT_COLORRAM+0xa4,
155         NV04_PGRAPH_PATT_COLORRAM+0xa8,
156         NV04_PGRAPH_PATT_COLORRAM+0xac,
157         NV04_PGRAPH_PATT_COLORRAM+0xb0,
158         NV04_PGRAPH_PATT_COLORRAM+0xb4,
159         NV04_PGRAPH_PATT_COLORRAM+0xb8,
160         NV04_PGRAPH_PATT_COLORRAM+0xbc,
161         NV04_PGRAPH_PATT_COLORRAM+0xc0,
162         NV04_PGRAPH_PATT_COLORRAM+0xc4,
163         NV04_PGRAPH_PATT_COLORRAM+0xc8,
164         NV04_PGRAPH_PATT_COLORRAM+0xcc,
165         NV04_PGRAPH_PATT_COLORRAM+0xd0,
166         NV04_PGRAPH_PATT_COLORRAM+0xd4,
167         NV04_PGRAPH_PATT_COLORRAM+0xd8,
168         NV04_PGRAPH_PATT_COLORRAM+0xdc,
169         NV04_PGRAPH_PATT_COLORRAM+0xe0,
170         NV04_PGRAPH_PATT_COLORRAM+0xe4,
171         NV04_PGRAPH_PATT_COLORRAM+0xe8,
172         NV04_PGRAPH_PATT_COLORRAM+0xec,
173         NV04_PGRAPH_PATT_COLORRAM+0xf0,
174         NV04_PGRAPH_PATT_COLORRAM+0xf4,
175         NV04_PGRAPH_PATT_COLORRAM+0xf8,
176         NV04_PGRAPH_PATT_COLORRAM+0xfc,
177         NV04_PGRAPH_PATTERN,
178         0x0040080c,
179         NV04_PGRAPH_PATTERN_SHAPE,
180         0x00400600,
181         NV04_PGRAPH_ROP3,
182         NV04_PGRAPH_CHROMA,
183         NV04_PGRAPH_BETA_AND,
184         NV04_PGRAPH_BETA_PREMULT,
185         NV04_PGRAPH_CONTROL0,
186         NV04_PGRAPH_CONTROL1,
187         NV04_PGRAPH_CONTROL2,
188         NV04_PGRAPH_BLEND,
189         NV04_PGRAPH_STORED_FMT,
190         NV04_PGRAPH_SOURCE_COLOR,
191         0x00400560,
192         0x00400568,
193         0x00400564,
194         0x0040056c,
195         0x00400400,
196         0x00400480,
197         0x00400404,
198         0x00400484,
199         0x00400408,
200         0x00400488,
201         0x0040040c,
202         0x0040048c,
203         0x00400410,
204         0x00400490,
205         0x00400414,
206         0x00400494,
207         0x00400418,
208         0x00400498,
209         0x0040041c,
210         0x0040049c,
211         0x00400420,
212         0x004004a0,
213         0x00400424,
214         0x004004a4,
215         0x00400428,
216         0x004004a8,
217         0x0040042c,
218         0x004004ac,
219         0x00400430,
220         0x004004b0,
221         0x00400434,
222         0x004004b4,
223         0x00400438,
224         0x004004b8,
225         0x0040043c,
226         0x004004bc,
227         0x00400440,
228         0x004004c0,
229         0x00400444,
230         0x004004c4,
231         0x00400448,
232         0x004004c8,
233         0x0040044c,
234         0x004004cc,
235         0x00400450,
236         0x004004d0,
237         0x00400454,
238         0x004004d4,
239         0x00400458,
240         0x004004d8,
241         0x0040045c,
242         0x004004dc,
243         0x00400460,
244         0x004004e0,
245         0x00400464,
246         0x004004e4,
247         0x00400468,
248         0x004004e8,
249         0x0040046c,
250         0x004004ec,
251         0x00400470,
252         0x004004f0,
253         0x00400474,
254         0x004004f4,
255         0x00400478,
256         0x004004f8,
257         0x0040047c,
258         0x004004fc,
259         0x00400534,
260         0x00400538,
261         0x00400514,
262         0x00400518,
263         0x0040051c,
264         0x00400520,
265         0x00400524,
266         0x00400528,
267         0x0040052c,
268         0x00400530,
269         0x00400d00,
270         0x00400d40,
271         0x00400d80,
272         0x00400d04,
273         0x00400d44,
274         0x00400d84,
275         0x00400d08,
276         0x00400d48,
277         0x00400d88,
278         0x00400d0c,
279         0x00400d4c,
280         0x00400d8c,
281         0x00400d10,
282         0x00400d50,
283         0x00400d90,
284         0x00400d14,
285         0x00400d54,
286         0x00400d94,
287         0x00400d18,
288         0x00400d58,
289         0x00400d98,
290         0x00400d1c,
291         0x00400d5c,
292         0x00400d9c,
293         0x00400d20,
294         0x00400d60,
295         0x00400da0,
296         0x00400d24,
297         0x00400d64,
298         0x00400da4,
299         0x00400d28,
300         0x00400d68,
301         0x00400da8,
302         0x00400d2c,
303         0x00400d6c,
304         0x00400dac,
305         0x00400d30,
306         0x00400d70,
307         0x00400db0,
308         0x00400d34,
309         0x00400d74,
310         0x00400db4,
311         0x00400d38,
312         0x00400d78,
313         0x00400db8,
314         0x00400d3c,
315         0x00400d7c,
316         0x00400dbc,
317         0x00400590,
318         0x00400594,
319         0x00400598,
320         0x0040059c,
321         0x004005a8,
322         0x004005ac,
323         0x004005b0,
324         0x004005b4,
325         0x004005c0,
326         0x004005c4,
327         0x004005c8,
328         0x004005cc,
329         0x004005d0,
330         0x004005d4,
331         0x004005d8,
332         0x004005dc,
333         0x004005e0,
334         NV04_PGRAPH_PASSTHRU_0,
335         NV04_PGRAPH_PASSTHRU_1,
336         NV04_PGRAPH_PASSTHRU_2,
337         NV04_PGRAPH_DVD_COLORFMT,
338         NV04_PGRAPH_SCALED_FORMAT,
339         NV04_PGRAPH_MISC24_0,
340         NV04_PGRAPH_MISC24_1,
341         NV04_PGRAPH_MISC24_2,
342         0x00400500,
343         0x00400504,
344         NV04_PGRAPH_VALID1,
345         NV04_PGRAPH_VALID2,
346         NV04_PGRAPH_DEBUG_3
347 };
348
349 struct nv04_gr_priv {
350         struct nvkm_gr base;
351         struct nv04_gr_chan *chan[16];
352         spinlock_t lock;
353 };
354
355 struct nv04_gr_chan {
356         struct nvkm_object base;
357         int chid;
358         u32 nv04[ARRAY_SIZE(nv04_gr_ctx_regs)];
359 };
360
361
362 static inline struct nv04_gr_priv *
363 nv04_gr_priv(struct nv04_gr_chan *chan)
364 {
365         return (void *)nv_object(chan)->engine;
366 }
367
368 /*******************************************************************************
369  * Graphics object classes
370  ******************************************************************************/
371
372 /*
373  * Software methods, why they are needed, and how they all work:
374  *
375  * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
376  * 2d engine settings are kept inside the grobjs themselves. The grobjs are
377  * 3 words long on both. grobj format on NV04 is:
378  *
379  * word 0:
380  *  - bits 0-7: class
381  *  - bit 12: color key active
382  *  - bit 13: clip rect active
383  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
384  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
385  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
386  *            NV03_CONTEXT_SURFACE_DST].
387  *  - bits 15-17: 2d operation [aka patch config]
388  *  - bit 24: patch valid [enables rendering using this object]
389  *  - bit 25: surf3d valid [for tex_tri and multitex_tri only]
390  * word 1:
391  *  - bits 0-1: mono format
392  *  - bits 8-13: color format
393  *  - bits 16-31: DMA_NOTIFY instance
394  * word 2:
395  *  - bits 0-15: DMA_A instance
396  *  - bits 16-31: DMA_B instance
397  *
398  * On NV05 it's:
399  *
400  * word 0:
401  *  - bits 0-7: class
402  *  - bit 12: color key active
403  *  - bit 13: clip rect active
404  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
405  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
406  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
407  *            NV03_CONTEXT_SURFACE_DST].
408  *  - bits 15-17: 2d operation [aka patch config]
409  *  - bits 20-22: dither mode
410  *  - bit 24: patch valid [enables rendering using this object]
411  *  - bit 25: surface_dst/surface_color/surf2d/surf3d valid
412  *  - bit 26: surface_src/surface_zeta valid
413  *  - bit 27: pattern valid
414  *  - bit 28: rop valid
415  *  - bit 29: beta1 valid
416  *  - bit 30: beta4 valid
417  * word 1:
418  *  - bits 0-1: mono format
419  *  - bits 8-13: color format
420  *  - bits 16-31: DMA_NOTIFY instance
421  * word 2:
422  *  - bits 0-15: DMA_A instance
423  *  - bits 16-31: DMA_B instance
424  *
425  * NV05 will set/unset the relevant valid bits when you poke the relevant
426  * object-binding methods with object of the proper type, or with the NULL
427  * type. It'll only allow rendering using the grobj if all needed objects
428  * are bound. The needed set of objects depends on selected operation: for
429  * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
430  *
431  * NV04 doesn't have these methods implemented at all, and doesn't have the
432  * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
433  * is set. So we have to emulate them in software, internally keeping the
434  * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
435  * but the last word isn't actually used for anything, we abuse it for this
436  * purpose.
437  *
438  * Actually, NV05 can optionally check bit 24 too, but we disable this since
439  * there's no use for it.
440  *
441  * For unknown reasons, NV04 implements surf3d binding in hardware as an
442  * exception. Also for unknown reasons, NV04 doesn't implement the clipping
443  * methods on the surf3d object, so we have to emulate them too.
444  */
445
446 static void
447 nv04_gr_set_ctx1(struct nvkm_object *object, u32 mask, u32 value)
448 {
449         struct nv04_gr_priv *priv = (void *)object->engine;
450         int subc = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
451         u32 tmp;
452
453         tmp  = nv_ro32(object, 0x00);
454         tmp &= ~mask;
455         tmp |= value;
456         nv_wo32(object, 0x00, tmp);
457
458         nv_wr32(priv, NV04_PGRAPH_CTX_SWITCH1, tmp);
459         nv_wr32(priv, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
460 }
461
462 static void
463 nv04_gr_set_ctx_val(struct nvkm_object *object, u32 mask, u32 value)
464 {
465         int class, op, valid = 1;
466         u32 tmp, ctx1;
467
468         ctx1 = nv_ro32(object, 0x00);
469         class = ctx1 & 0xff;
470         op = (ctx1 >> 15) & 7;
471
472         tmp = nv_ro32(object, 0x0c);
473         tmp &= ~mask;
474         tmp |= value;
475         nv_wo32(object, 0x0c, tmp);
476
477         /* check for valid surf2d/surf_dst/surf_color */
478         if (!(tmp & 0x02000000))
479                 valid = 0;
480         /* check for valid surf_src/surf_zeta */
481         if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
482                 valid = 0;
483
484         switch (op) {
485         /* SRCCOPY_AND, SRCCOPY: no extra objects required */
486         case 0:
487         case 3:
488                 break;
489         /* ROP_AND: requires pattern and rop */
490         case 1:
491                 if (!(tmp & 0x18000000))
492                         valid = 0;
493                 break;
494         /* BLEND_AND: requires beta1 */
495         case 2:
496                 if (!(tmp & 0x20000000))
497                         valid = 0;
498                 break;
499         /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
500         case 4:
501         case 5:
502                 if (!(tmp & 0x40000000))
503                         valid = 0;
504                 break;
505         }
506
507         nv04_gr_set_ctx1(object, 0x01000000, valid << 24);
508 }
509
510 static int
511 nv04_gr_mthd_set_operation(struct nvkm_object *object, u32 mthd,
512                            void *args, u32 size)
513 {
514         u32 class = nv_ro32(object, 0) & 0xff;
515         u32 data = *(u32 *)args;
516         if (data > 5)
517                 return 1;
518         /* Old versions of the objects only accept first three operations. */
519         if (data > 2 && class < 0x40)
520                 return 1;
521         nv04_gr_set_ctx1(object, 0x00038000, data << 15);
522         /* changing operation changes set of objects needed for validation */
523         nv04_gr_set_ctx_val(object, 0, 0);
524         return 0;
525 }
526
527 static int
528 nv04_gr_mthd_surf3d_clip_h(struct nvkm_object *object, u32 mthd,
529                            void *args, u32 size)
530 {
531         struct nv04_gr_priv *priv = (void *)object->engine;
532         u32 data = *(u32 *)args;
533         u32 min = data & 0xffff, max;
534         u32 w = data >> 16;
535         if (min & 0x8000)
536                 /* too large */
537                 return 1;
538         if (w & 0x8000)
539                 /* yes, it accepts negative for some reason. */
540                 w |= 0xffff0000;
541         max = min + w;
542         max &= 0x3ffff;
543         nv_wr32(priv, 0x40053c, min);
544         nv_wr32(priv, 0x400544, max);
545         return 0;
546 }
547
548 static int
549 nv04_gr_mthd_surf3d_clip_v(struct nvkm_object *object, u32 mthd,
550                            void *args, u32 size)
551 {
552         struct nv04_gr_priv *priv = (void *)object->engine;
553         u32 data = *(u32 *)args;
554         u32 min = data & 0xffff, max;
555         u32 w = data >> 16;
556         if (min & 0x8000)
557                 /* too large */
558                 return 1;
559         if (w & 0x8000)
560                 /* yes, it accepts negative for some reason. */
561                 w |= 0xffff0000;
562         max = min + w;
563         max &= 0x3ffff;
564         nv_wr32(priv, 0x400540, min);
565         nv_wr32(priv, 0x400548, max);
566         return 0;
567 }
568
569 static u16
570 nv04_gr_mthd_bind_class(struct nvkm_object *object, u32 *args, u32 size)
571 {
572         struct nvkm_instmem *imem = nvkm_instmem(object);
573         u32 inst = *(u32 *)args << 4;
574         return nv_ro32(imem, inst);
575 }
576
577 static int
578 nv04_gr_mthd_bind_surf2d(struct nvkm_object *object, u32 mthd,
579                             void *args, u32 size)
580 {
581         switch (nv04_gr_mthd_bind_class(object, args, size)) {
582         case 0x30:
583                 nv04_gr_set_ctx1(object, 0x00004000, 0);
584                 nv04_gr_set_ctx_val(object, 0x02000000, 0);
585                 return 0;
586         case 0x42:
587                 nv04_gr_set_ctx1(object, 0x00004000, 0);
588                 nv04_gr_set_ctx_val(object, 0x02000000, 0x02000000);
589                 return 0;
590         }
591         return 1;
592 }
593
594 static int
595 nv04_gr_mthd_bind_surf2d_swzsurf(struct nvkm_object *object, u32 mthd,
596                                  void *args, u32 size)
597 {
598         switch (nv04_gr_mthd_bind_class(object, args, size)) {
599         case 0x30:
600                 nv04_gr_set_ctx1(object, 0x00004000, 0);
601                 nv04_gr_set_ctx_val(object, 0x02000000, 0);
602                 return 0;
603         case 0x42:
604                 nv04_gr_set_ctx1(object, 0x00004000, 0);
605                 nv04_gr_set_ctx_val(object, 0x02000000, 0x02000000);
606                 return 0;
607         case 0x52:
608                 nv04_gr_set_ctx1(object, 0x00004000, 0x00004000);
609                 nv04_gr_set_ctx_val(object, 0x02000000, 0x02000000);
610                 return 0;
611         }
612         return 1;
613 }
614
615 static int
616 nv01_gr_mthd_bind_patt(struct nvkm_object *object, u32 mthd,
617                        void *args, u32 size)
618 {
619         switch (nv04_gr_mthd_bind_class(object, args, size)) {
620         case 0x30:
621                 nv04_gr_set_ctx_val(object, 0x08000000, 0);
622                 return 0;
623         case 0x18:
624                 nv04_gr_set_ctx_val(object, 0x08000000, 0x08000000);
625                 return 0;
626         }
627         return 1;
628 }
629
630 static int
631 nv04_gr_mthd_bind_patt(struct nvkm_object *object, u32 mthd,
632                        void *args, u32 size)
633 {
634         switch (nv04_gr_mthd_bind_class(object, args, size)) {
635         case 0x30:
636                 nv04_gr_set_ctx_val(object, 0x08000000, 0);
637                 return 0;
638         case 0x44:
639                 nv04_gr_set_ctx_val(object, 0x08000000, 0x08000000);
640                 return 0;
641         }
642         return 1;
643 }
644
645 static int
646 nv04_gr_mthd_bind_rop(struct nvkm_object *object, u32 mthd,
647                       void *args, u32 size)
648 {
649         switch (nv04_gr_mthd_bind_class(object, args, size)) {
650         case 0x30:
651                 nv04_gr_set_ctx_val(object, 0x10000000, 0);
652                 return 0;
653         case 0x43:
654                 nv04_gr_set_ctx_val(object, 0x10000000, 0x10000000);
655                 return 0;
656         }
657         return 1;
658 }
659
660 static int
661 nv04_gr_mthd_bind_beta1(struct nvkm_object *object, u32 mthd,
662                         void *args, u32 size)
663 {
664         switch (nv04_gr_mthd_bind_class(object, args, size)) {
665         case 0x30:
666                 nv04_gr_set_ctx_val(object, 0x20000000, 0);
667                 return 0;
668         case 0x12:
669                 nv04_gr_set_ctx_val(object, 0x20000000, 0x20000000);
670                 return 0;
671         }
672         return 1;
673 }
674
675 static int
676 nv04_gr_mthd_bind_beta4(struct nvkm_object *object, u32 mthd,
677                         void *args, u32 size)
678 {
679         switch (nv04_gr_mthd_bind_class(object, args, size)) {
680         case 0x30:
681                 nv04_gr_set_ctx_val(object, 0x40000000, 0);
682                 return 0;
683         case 0x72:
684                 nv04_gr_set_ctx_val(object, 0x40000000, 0x40000000);
685                 return 0;
686         }
687         return 1;
688 }
689
690 static int
691 nv04_gr_mthd_bind_surf_dst(struct nvkm_object *object, u32 mthd,
692                            void *args, u32 size)
693 {
694         switch (nv04_gr_mthd_bind_class(object, args, size)) {
695         case 0x30:
696                 nv04_gr_set_ctx_val(object, 0x02000000, 0);
697                 return 0;
698         case 0x58:
699                 nv04_gr_set_ctx_val(object, 0x02000000, 0x02000000);
700                 return 0;
701         }
702         return 1;
703 }
704
705 static int
706 nv04_gr_mthd_bind_surf_src(struct nvkm_object *object, u32 mthd,
707                            void *args, u32 size)
708 {
709         switch (nv04_gr_mthd_bind_class(object, args, size)) {
710         case 0x30:
711                 nv04_gr_set_ctx_val(object, 0x04000000, 0);
712                 return 0;
713         case 0x59:
714                 nv04_gr_set_ctx_val(object, 0x04000000, 0x04000000);
715                 return 0;
716         }
717         return 1;
718 }
719
720 static int
721 nv04_gr_mthd_bind_surf_color(struct nvkm_object *object, u32 mthd,
722                              void *args, u32 size)
723 {
724         switch (nv04_gr_mthd_bind_class(object, args, size)) {
725         case 0x30:
726                 nv04_gr_set_ctx_val(object, 0x02000000, 0);
727                 return 0;
728         case 0x5a:
729                 nv04_gr_set_ctx_val(object, 0x02000000, 0x02000000);
730                 return 0;
731         }
732         return 1;
733 }
734
735 static int
736 nv04_gr_mthd_bind_surf_zeta(struct nvkm_object *object, u32 mthd,
737                             void *args, u32 size)
738 {
739         switch (nv04_gr_mthd_bind_class(object, args, size)) {
740         case 0x30:
741                 nv04_gr_set_ctx_val(object, 0x04000000, 0);
742                 return 0;
743         case 0x5b:
744                 nv04_gr_set_ctx_val(object, 0x04000000, 0x04000000);
745                 return 0;
746         }
747         return 1;
748 }
749
750 static int
751 nv01_gr_mthd_bind_clip(struct nvkm_object *object, u32 mthd,
752                        void *args, u32 size)
753 {
754         switch (nv04_gr_mthd_bind_class(object, args, size)) {
755         case 0x30:
756                 nv04_gr_set_ctx1(object, 0x2000, 0);
757                 return 0;
758         case 0x19:
759                 nv04_gr_set_ctx1(object, 0x2000, 0x2000);
760                 return 0;
761         }
762         return 1;
763 }
764
765 static int
766 nv01_gr_mthd_bind_chroma(struct nvkm_object *object, u32 mthd,
767                          void *args, u32 size)
768 {
769         switch (nv04_gr_mthd_bind_class(object, args, size)) {
770         case 0x30:
771                 nv04_gr_set_ctx1(object, 0x1000, 0);
772                 return 0;
773         /* Yes, for some reason even the old versions of objects
774          * accept 0x57 and not 0x17. Consistency be damned.
775          */
776         case 0x57:
777                 nv04_gr_set_ctx1(object, 0x1000, 0x1000);
778                 return 0;
779         }
780         return 1;
781 }
782
783 static struct nvkm_omthds
784 nv03_gr_gdi_omthds[] = {
785         { 0x0184, 0x0184, nv01_gr_mthd_bind_patt },
786         { 0x0188, 0x0188, nv04_gr_mthd_bind_rop },
787         { 0x018c, 0x018c, nv04_gr_mthd_bind_beta1 },
788         { 0x0190, 0x0190, nv04_gr_mthd_bind_surf_dst },
789         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
790         {}
791 };
792
793 static struct nvkm_omthds
794 nv04_gr_gdi_omthds[] = {
795         { 0x0188, 0x0188, nv04_gr_mthd_bind_patt },
796         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
797         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
798         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta4 },
799         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf2d },
800         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
801         {}
802 };
803
804 static struct nvkm_omthds
805 nv01_gr_blit_omthds[] = {
806         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
807         { 0x0188, 0x0188, nv01_gr_mthd_bind_clip },
808         { 0x018c, 0x018c, nv01_gr_mthd_bind_patt },
809         { 0x0190, 0x0190, nv04_gr_mthd_bind_rop },
810         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta1 },
811         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf_dst },
812         { 0x019c, 0x019c, nv04_gr_mthd_bind_surf_src },
813         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
814         {}
815 };
816
817 static struct nvkm_omthds
818 nv04_gr_blit_omthds[] = {
819         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
820         { 0x0188, 0x0188, nv01_gr_mthd_bind_clip },
821         { 0x018c, 0x018c, nv04_gr_mthd_bind_patt },
822         { 0x0190, 0x0190, nv04_gr_mthd_bind_rop },
823         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta1 },
824         { 0x0198, 0x0198, nv04_gr_mthd_bind_beta4 },
825         { 0x019c, 0x019c, nv04_gr_mthd_bind_surf2d },
826         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
827         {}
828 };
829
830 static struct nvkm_omthds
831 nv04_gr_iifc_omthds[] = {
832         { 0x0188, 0x0188, nv01_gr_mthd_bind_chroma },
833         { 0x018c, 0x018c, nv01_gr_mthd_bind_clip },
834         { 0x0190, 0x0190, nv04_gr_mthd_bind_patt },
835         { 0x0194, 0x0194, nv04_gr_mthd_bind_rop },
836         { 0x0198, 0x0198, nv04_gr_mthd_bind_beta1 },
837         { 0x019c, 0x019c, nv04_gr_mthd_bind_beta4 },
838         { 0x01a0, 0x01a0, nv04_gr_mthd_bind_surf2d_swzsurf },
839         { 0x03e4, 0x03e4, nv04_gr_mthd_set_operation },
840         {}
841 };
842
843 static struct nvkm_omthds
844 nv01_gr_ifc_omthds[] = {
845         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
846         { 0x0188, 0x0188, nv01_gr_mthd_bind_clip },
847         { 0x018c, 0x018c, nv01_gr_mthd_bind_patt },
848         { 0x0190, 0x0190, nv04_gr_mthd_bind_rop },
849         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta1 },
850         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf_dst },
851         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
852         {}
853 };
854
855 static struct nvkm_omthds
856 nv04_gr_ifc_omthds[] = {
857         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
858         { 0x0188, 0x0188, nv01_gr_mthd_bind_clip },
859         { 0x018c, 0x018c, nv04_gr_mthd_bind_patt },
860         { 0x0190, 0x0190, nv04_gr_mthd_bind_rop },
861         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta1 },
862         { 0x0198, 0x0198, nv04_gr_mthd_bind_beta4 },
863         { 0x019c, 0x019c, nv04_gr_mthd_bind_surf2d },
864         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
865         {}
866 };
867
868 static struct nvkm_omthds
869 nv03_gr_sifc_omthds[] = {
870         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
871         { 0x0188, 0x0188, nv01_gr_mthd_bind_patt },
872         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
873         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
874         { 0x0194, 0x0194, nv04_gr_mthd_bind_surf_dst },
875         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
876         {}
877 };
878
879 static struct nvkm_omthds
880 nv04_gr_sifc_omthds[] = {
881         { 0x0184, 0x0184, nv01_gr_mthd_bind_chroma },
882         { 0x0188, 0x0188, nv04_gr_mthd_bind_patt },
883         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
884         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
885         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta4 },
886         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf2d },
887         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
888         {}
889 };
890
891 static struct nvkm_omthds
892 nv03_gr_sifm_omthds[] = {
893         { 0x0188, 0x0188, nv01_gr_mthd_bind_patt },
894         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
895         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
896         { 0x0194, 0x0194, nv04_gr_mthd_bind_surf_dst },
897         { 0x0304, 0x0304, nv04_gr_mthd_set_operation },
898         {}
899 };
900
901 static struct nvkm_omthds
902 nv04_gr_sifm_omthds[] = {
903         { 0x0188, 0x0188, nv04_gr_mthd_bind_patt },
904         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
905         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
906         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta4 },
907         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf2d },
908         { 0x0304, 0x0304, nv04_gr_mthd_set_operation },
909         {}
910 };
911
912 static struct nvkm_omthds
913 nv04_gr_surf3d_omthds[] = {
914         { 0x02f8, 0x02f8, nv04_gr_mthd_surf3d_clip_h },
915         { 0x02fc, 0x02fc, nv04_gr_mthd_surf3d_clip_v },
916         {}
917 };
918
919 static struct nvkm_omthds
920 nv03_gr_ttri_omthds[] = {
921         { 0x0188, 0x0188, nv01_gr_mthd_bind_clip },
922         { 0x018c, 0x018c, nv04_gr_mthd_bind_surf_color },
923         { 0x0190, 0x0190, nv04_gr_mthd_bind_surf_zeta },
924         {}
925 };
926
927 static struct nvkm_omthds
928 nv01_gr_prim_omthds[] = {
929         { 0x0184, 0x0184, nv01_gr_mthd_bind_clip },
930         { 0x0188, 0x0188, nv01_gr_mthd_bind_patt },
931         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
932         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
933         { 0x0194, 0x0194, nv04_gr_mthd_bind_surf_dst },
934         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
935         {}
936 };
937
938 static struct nvkm_omthds
939 nv04_gr_prim_omthds[] = {
940         { 0x0184, 0x0184, nv01_gr_mthd_bind_clip },
941         { 0x0188, 0x0188, nv04_gr_mthd_bind_patt },
942         { 0x018c, 0x018c, nv04_gr_mthd_bind_rop },
943         { 0x0190, 0x0190, nv04_gr_mthd_bind_beta1 },
944         { 0x0194, 0x0194, nv04_gr_mthd_bind_beta4 },
945         { 0x0198, 0x0198, nv04_gr_mthd_bind_surf2d },
946         { 0x02fc, 0x02fc, nv04_gr_mthd_set_operation },
947         {}
948 };
949
950 static int
951 nv04_gr_object_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
952                     struct nvkm_oclass *oclass, void *data, u32 size,
953                     struct nvkm_object **pobject)
954 {
955         struct nvkm_gpuobj *obj;
956         int ret;
957
958         ret = nvkm_gpuobj_create(parent, engine, oclass, 0, parent,
959                                  16, 16, 0, &obj);
960         *pobject = nv_object(obj);
961         if (ret)
962                 return ret;
963
964         nv_wo32(obj, 0x00, nv_mclass(obj));
965 #ifdef __BIG_ENDIAN
966         nv_mo32(obj, 0x00, 0x00080000, 0x00080000);
967 #endif
968         nv_wo32(obj, 0x04, 0x00000000);
969         nv_wo32(obj, 0x08, 0x00000000);
970         nv_wo32(obj, 0x0c, 0x00000000);
971         return 0;
972 }
973
974 struct nvkm_ofuncs
975 nv04_gr_ofuncs = {
976         .ctor = nv04_gr_object_ctor,
977         .dtor = _nvkm_gpuobj_dtor,
978         .init = _nvkm_gpuobj_init,
979         .fini = _nvkm_gpuobj_fini,
980         .rd32 = _nvkm_gpuobj_rd32,
981         .wr32 = _nvkm_gpuobj_wr32,
982 };
983
984 static struct nvkm_oclass
985 nv04_gr_sclass[] = {
986         { 0x0012, &nv04_gr_ofuncs }, /* beta1 */
987         { 0x0017, &nv04_gr_ofuncs }, /* chroma */
988         { 0x0018, &nv04_gr_ofuncs }, /* pattern (nv01) */
989         { 0x0019, &nv04_gr_ofuncs }, /* clip */
990         { 0x001c, &nv04_gr_ofuncs, nv01_gr_prim_omthds }, /* line */
991         { 0x001d, &nv04_gr_ofuncs, nv01_gr_prim_omthds }, /* tri */
992         { 0x001e, &nv04_gr_ofuncs, nv01_gr_prim_omthds }, /* rect */
993         { 0x001f, &nv04_gr_ofuncs, nv01_gr_blit_omthds },
994         { 0x0021, &nv04_gr_ofuncs, nv01_gr_ifc_omthds },
995         { 0x0030, &nv04_gr_ofuncs }, /* null */
996         { 0x0036, &nv04_gr_ofuncs, nv03_gr_sifc_omthds },
997         { 0x0037, &nv04_gr_ofuncs, nv03_gr_sifm_omthds },
998         { 0x0038, &nv04_gr_ofuncs }, /* dvd subpicture */
999         { 0x0039, &nv04_gr_ofuncs }, /* m2mf */
1000         { 0x0042, &nv04_gr_ofuncs }, /* surf2d */
1001         { 0x0043, &nv04_gr_ofuncs }, /* rop */
1002         { 0x0044, &nv04_gr_ofuncs }, /* pattern */
1003         { 0x0048, &nv04_gr_ofuncs, nv03_gr_ttri_omthds },
1004         { 0x004a, &nv04_gr_ofuncs, nv04_gr_gdi_omthds },
1005         { 0x004b, &nv04_gr_ofuncs, nv03_gr_gdi_omthds },
1006         { 0x0052, &nv04_gr_ofuncs }, /* swzsurf */
1007         { 0x0053, &nv04_gr_ofuncs, nv04_gr_surf3d_omthds },
1008         { 0x0054, &nv04_gr_ofuncs }, /* ttri */
1009         { 0x0055, &nv04_gr_ofuncs }, /* mtri */
1010         { 0x0057, &nv04_gr_ofuncs }, /* chroma */
1011         { 0x0058, &nv04_gr_ofuncs }, /* surf_dst */
1012         { 0x0059, &nv04_gr_ofuncs }, /* surf_src */
1013         { 0x005a, &nv04_gr_ofuncs }, /* surf_color */
1014         { 0x005b, &nv04_gr_ofuncs }, /* surf_zeta */
1015         { 0x005c, &nv04_gr_ofuncs, nv04_gr_prim_omthds }, /* line */
1016         { 0x005d, &nv04_gr_ofuncs, nv04_gr_prim_omthds }, /* tri */
1017         { 0x005e, &nv04_gr_ofuncs, nv04_gr_prim_omthds }, /* rect */
1018         { 0x005f, &nv04_gr_ofuncs, nv04_gr_blit_omthds },
1019         { 0x0060, &nv04_gr_ofuncs, nv04_gr_iifc_omthds },
1020         { 0x0061, &nv04_gr_ofuncs, nv04_gr_ifc_omthds },
1021         { 0x0064, &nv04_gr_ofuncs }, /* iifc (nv05) */
1022         { 0x0065, &nv04_gr_ofuncs }, /* ifc (nv05) */
1023         { 0x0066, &nv04_gr_ofuncs }, /* sifc (nv05) */
1024         { 0x0072, &nv04_gr_ofuncs }, /* beta4 */
1025         { 0x0076, &nv04_gr_ofuncs, nv04_gr_sifc_omthds },
1026         { 0x0077, &nv04_gr_ofuncs, nv04_gr_sifm_omthds },
1027         {},
1028 };
1029
1030 /*******************************************************************************
1031  * PGRAPH context
1032  ******************************************************************************/
1033
1034 static struct nv04_gr_chan *
1035 nv04_gr_channel(struct nv04_gr_priv *priv)
1036 {
1037         struct nv04_gr_chan *chan = NULL;
1038         if (nv_rd32(priv, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1039                 int chid = nv_rd32(priv, NV04_PGRAPH_CTX_USER) >> 24;
1040                 if (chid < ARRAY_SIZE(priv->chan))
1041                         chan = priv->chan[chid];
1042         }
1043         return chan;
1044 }
1045
1046 static int
1047 nv04_gr_load_context(struct nv04_gr_chan *chan, int chid)
1048 {
1049         struct nv04_gr_priv *priv = nv04_gr_priv(chan);
1050         int i;
1051
1052         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
1053                 nv_wr32(priv, nv04_gr_ctx_regs[i], chan->nv04[i]);
1054
1055         nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1056         nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1057         nv_mask(priv, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1058         return 0;
1059 }
1060
1061 static int
1062 nv04_gr_unload_context(struct nv04_gr_chan *chan)
1063 {
1064         struct nv04_gr_priv *priv = nv04_gr_priv(chan);
1065         int i;
1066
1067         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
1068                 chan->nv04[i] = nv_rd32(priv, nv04_gr_ctx_regs[i]);
1069
1070         nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1071         nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1072         return 0;
1073 }
1074
1075 static void
1076 nv04_gr_context_switch(struct nv04_gr_priv *priv)
1077 {
1078         struct nv04_gr_chan *prev = NULL;
1079         struct nv04_gr_chan *next = NULL;
1080         unsigned long flags;
1081         int chid;
1082
1083         spin_lock_irqsave(&priv->lock, flags);
1084         nv04_gr_idle(priv);
1085
1086         /* If previous context is valid, we need to save it */
1087         prev = nv04_gr_channel(priv);
1088         if (prev)
1089                 nv04_gr_unload_context(prev);
1090
1091         /* load context for next channel */
1092         chid = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1093         next = priv->chan[chid];
1094         if (next)
1095                 nv04_gr_load_context(next, chid);
1096
1097         spin_unlock_irqrestore(&priv->lock, flags);
1098 }
1099
1100 static u32 *ctx_reg(struct nv04_gr_chan *chan, u32 reg)
1101 {
1102         int i;
1103
1104         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++) {
1105                 if (nv04_gr_ctx_regs[i] == reg)
1106                         return &chan->nv04[i];
1107         }
1108
1109         return NULL;
1110 }
1111
1112 static int
1113 nv04_gr_context_ctor(struct nvkm_object *parent,
1114                      struct nvkm_object *engine,
1115                      struct nvkm_oclass *oclass, void *data, u32 size,
1116                      struct nvkm_object **pobject)
1117 {
1118         struct nvkm_fifo_chan *fifo = (void *)parent;
1119         struct nv04_gr_priv *priv = (void *)engine;
1120         struct nv04_gr_chan *chan;
1121         unsigned long flags;
1122         int ret;
1123
1124         ret = nvkm_object_create(parent, engine, oclass, 0, &chan);
1125         *pobject = nv_object(chan);
1126         if (ret)
1127                 return ret;
1128
1129         spin_lock_irqsave(&priv->lock, flags);
1130         if (priv->chan[fifo->chid]) {
1131                 *pobject = nv_object(priv->chan[fifo->chid]);
1132                 atomic_inc(&(*pobject)->refcount);
1133                 spin_unlock_irqrestore(&priv->lock, flags);
1134                 nvkm_object_destroy(&chan->base);
1135                 return 1;
1136         }
1137
1138         *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1139
1140         priv->chan[fifo->chid] = chan;
1141         chan->chid = fifo->chid;
1142         spin_unlock_irqrestore(&priv->lock, flags);
1143         return 0;
1144 }
1145
1146 static void
1147 nv04_gr_context_dtor(struct nvkm_object *object)
1148 {
1149         struct nv04_gr_priv *priv = (void *)object->engine;
1150         struct nv04_gr_chan *chan = (void *)object;
1151         unsigned long flags;
1152
1153         spin_lock_irqsave(&priv->lock, flags);
1154         priv->chan[chan->chid] = NULL;
1155         spin_unlock_irqrestore(&priv->lock, flags);
1156
1157         nvkm_object_destroy(&chan->base);
1158 }
1159
1160 static int
1161 nv04_gr_context_fini(struct nvkm_object *object, bool suspend)
1162 {
1163         struct nv04_gr_priv *priv = (void *)object->engine;
1164         struct nv04_gr_chan *chan = (void *)object;
1165         unsigned long flags;
1166
1167         spin_lock_irqsave(&priv->lock, flags);
1168         nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1169         if (nv04_gr_channel(priv) == chan)
1170                 nv04_gr_unload_context(chan);
1171         nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1172         spin_unlock_irqrestore(&priv->lock, flags);
1173
1174         return nvkm_object_fini(&chan->base, suspend);
1175 }
1176
1177 static struct nvkm_oclass
1178 nv04_gr_cclass = {
1179         .handle = NV_ENGCTX(GR, 0x04),
1180         .ofuncs = &(struct nvkm_ofuncs) {
1181                 .ctor = nv04_gr_context_ctor,
1182                 .dtor = nv04_gr_context_dtor,
1183                 .init = nvkm_object_init,
1184                 .fini = nv04_gr_context_fini,
1185         },
1186 };
1187
1188 /*******************************************************************************
1189  * PGRAPH engine/subdev functions
1190  ******************************************************************************/
1191
1192 bool
1193 nv04_gr_idle(void *obj)
1194 {
1195         struct nvkm_gr *gr = nvkm_gr(obj);
1196         u32 mask = 0xffffffff;
1197
1198         if (nv_device(obj)->card_type == NV_40)
1199                 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1200
1201         if (!nv_wait(gr, NV04_PGRAPH_STATUS, mask, 0)) {
1202                 nv_error(gr, "idle timed out with status 0x%08x\n",
1203                          nv_rd32(gr, NV04_PGRAPH_STATUS));
1204                 return false;
1205         }
1206
1207         return true;
1208 }
1209
1210 static const struct nvkm_bitfield
1211 nv04_gr_intr_name[] = {
1212         { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1213         {}
1214 };
1215
1216 static const struct nvkm_bitfield
1217 nv04_gr_nstatus[] = {
1218         { NV04_PGRAPH_NSTATUS_STATE_IN_USE,       "STATE_IN_USE" },
1219         { NV04_PGRAPH_NSTATUS_INVALID_STATE,      "INVALID_STATE" },
1220         { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT,       "BAD_ARGUMENT" },
1221         { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT,   "PROTECTION_FAULT" },
1222         {}
1223 };
1224
1225 const struct nvkm_bitfield
1226 nv04_gr_nsource[] = {
1227         { NV03_PGRAPH_NSOURCE_NOTIFICATION,       "NOTIFICATION" },
1228         { NV03_PGRAPH_NSOURCE_DATA_ERROR,         "DATA_ERROR" },
1229         { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR,   "PROTECTION_ERROR" },
1230         { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION,    "RANGE_EXCEPTION" },
1231         { NV03_PGRAPH_NSOURCE_LIMIT_COLOR,        "LIMIT_COLOR" },
1232         { NV03_PGRAPH_NSOURCE_LIMIT_ZETA,         "LIMIT_ZETA" },
1233         { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD,       "ILLEGAL_MTHD" },
1234         { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION,   "DMA_R_PROTECTION" },
1235         { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION,   "DMA_W_PROTECTION" },
1236         { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION,   "FORMAT_EXCEPTION" },
1237         { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION,    "PATCH_EXCEPTION" },
1238         { NV03_PGRAPH_NSOURCE_STATE_INVALID,      "STATE_INVALID" },
1239         { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY,      "DOUBLE_NOTIFY" },
1240         { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE,      "NOTIFY_IN_USE" },
1241         { NV03_PGRAPH_NSOURCE_METHOD_CNT,         "METHOD_CNT" },
1242         { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION,   "BFR_NOTIFICATION" },
1243         { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1244         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A,        "DMA_WIDTH_A" },
1245         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B,        "DMA_WIDTH_B" },
1246         {}
1247 };
1248
1249 static void
1250 nv04_gr_intr(struct nvkm_subdev *subdev)
1251 {
1252         struct nv04_gr_priv *priv = (void *)subdev;
1253         struct nv04_gr_chan *chan = NULL;
1254         struct nvkm_namedb *namedb = NULL;
1255         struct nvkm_handle *handle = NULL;
1256         u32 stat = nv_rd32(priv, NV03_PGRAPH_INTR);
1257         u32 nsource = nv_rd32(priv, NV03_PGRAPH_NSOURCE);
1258         u32 nstatus = nv_rd32(priv, NV03_PGRAPH_NSTATUS);
1259         u32 addr = nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR);
1260         u32 chid = (addr & 0x0f000000) >> 24;
1261         u32 subc = (addr & 0x0000e000) >> 13;
1262         u32 mthd = (addr & 0x00001ffc);
1263         u32 data = nv_rd32(priv, NV04_PGRAPH_TRAPPED_DATA);
1264         u32 class = nv_rd32(priv, 0x400180 + subc * 4) & 0xff;
1265         u32 inst = (nv_rd32(priv, 0x40016c) & 0xffff) << 4;
1266         u32 show = stat;
1267         unsigned long flags;
1268
1269         spin_lock_irqsave(&priv->lock, flags);
1270         chan = priv->chan[chid];
1271         if (chan)
1272                 namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS);
1273         spin_unlock_irqrestore(&priv->lock, flags);
1274
1275         if (stat & NV_PGRAPH_INTR_NOTIFY) {
1276                 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1277                         handle = nvkm_namedb_get_vinst(namedb, inst);
1278                         if (handle && !nv_call(handle->object, mthd, data))
1279                                 show &= ~NV_PGRAPH_INTR_NOTIFY;
1280                 }
1281         }
1282
1283         if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1284                 nv_wr32(priv, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1285                 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1286                 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1287                 nv04_gr_context_switch(priv);
1288         }
1289
1290         nv_wr32(priv, NV03_PGRAPH_INTR, stat);
1291         nv_wr32(priv, NV04_PGRAPH_FIFO, 0x00000001);
1292
1293         if (show) {
1294                 nv_error(priv, "%s", "");
1295                 nvkm_bitfield_print(nv04_gr_intr_name, show);
1296                 pr_cont(" nsource:");
1297                 nvkm_bitfield_print(nv04_gr_nsource, nsource);
1298                 pr_cont(" nstatus:");
1299                 nvkm_bitfield_print(nv04_gr_nstatus, nstatus);
1300                 pr_cont("\n");
1301                 nv_error(priv,
1302                          "ch %d [%s] subc %d class 0x%04x mthd 0x%04x data 0x%08x\n",
1303                          chid, nvkm_client_name(chan), subc, class, mthd,
1304                          data);
1305         }
1306
1307         nvkm_namedb_put(handle);
1308 }
1309
1310 static int
1311 nv04_gr_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
1312              struct nvkm_oclass *oclass, void *data, u32 size,
1313              struct nvkm_object **pobject)
1314 {
1315         struct nv04_gr_priv *priv;
1316         int ret;
1317
1318         ret = nvkm_gr_create(parent, engine, oclass, true, &priv);
1319         *pobject = nv_object(priv);
1320         if (ret)
1321                 return ret;
1322
1323         nv_subdev(priv)->unit = 0x00001000;
1324         nv_subdev(priv)->intr = nv04_gr_intr;
1325         nv_engine(priv)->cclass = &nv04_gr_cclass;
1326         nv_engine(priv)->sclass = nv04_gr_sclass;
1327         spin_lock_init(&priv->lock);
1328         return 0;
1329 }
1330
1331 static int
1332 nv04_gr_init(struct nvkm_object *object)
1333 {
1334         struct nvkm_engine *engine = nv_engine(object);
1335         struct nv04_gr_priv *priv = (void *)engine;
1336         int ret;
1337
1338         ret = nvkm_gr_init(&priv->base);
1339         if (ret)
1340                 return ret;
1341
1342         /* Enable PGRAPH interrupts */
1343         nv_wr32(priv, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1344         nv_wr32(priv, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1345
1346         nv_wr32(priv, NV04_PGRAPH_VALID1, 0);
1347         nv_wr32(priv, NV04_PGRAPH_VALID2, 0);
1348         /*nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1349         nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1350         nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1351         /*1231C000 blob, 001 haiku*/
1352         /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1353         nv_wr32(priv, NV04_PGRAPH_DEBUG_1, 0x72111100);
1354         /*0x72111100 blob , 01 haiku*/
1355         /*nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1356         nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1357         /*haiku same*/
1358
1359         /*nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1360         nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1361         /*haiku and blob 10d4*/
1362
1363         nv_wr32(priv, NV04_PGRAPH_STATE        , 0xFFFFFFFF);
1364         nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL  , 0x10000100);
1365         nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1366
1367         /* These don't belong here, they're part of a per-channel context */
1368         nv_wr32(priv, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1369         nv_wr32(priv, NV04_PGRAPH_BETA_AND     , 0xFFFFFFFF);
1370         return 0;
1371 }
1372
1373 struct nvkm_oclass
1374 nv04_gr_oclass = {
1375         .handle = NV_ENGINE(GR, 0x04),
1376         .ofuncs = &(struct nvkm_ofuncs) {
1377                 .ctor = nv04_gr_ctor,
1378                 .dtor = _nvkm_gr_dtor,
1379                 .init = nv04_gr_init,
1380                 .fini = _nvkm_gr_fini,
1381         },
1382 };