Merge branch 'drm-nouveau-next' of git://git.freedesktop.org/git/nouveau/linux-2...
[pandora-kernel.git] / drivers / gpu / drm / nouveau / nv04_graph.c
1 /*
2  * Copyright 2007 Stephane Marchesin
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24
25 #include "drmP.h"
26 #include "drm.h"
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
31
32 static int  nv04_graph_register(struct drm_device *dev);
33 static void nv04_graph_isr(struct drm_device *dev);
34
35 static uint32_t nv04_graph_ctx_regs[] = {
36         0x0040053c,
37         0x00400544,
38         0x00400540,
39         0x00400548,
40         NV04_PGRAPH_CTX_SWITCH1,
41         NV04_PGRAPH_CTX_SWITCH2,
42         NV04_PGRAPH_CTX_SWITCH3,
43         NV04_PGRAPH_CTX_SWITCH4,
44         NV04_PGRAPH_CTX_CACHE1,
45         NV04_PGRAPH_CTX_CACHE2,
46         NV04_PGRAPH_CTX_CACHE3,
47         NV04_PGRAPH_CTX_CACHE4,
48         0x00400184,
49         0x004001a4,
50         0x004001c4,
51         0x004001e4,
52         0x00400188,
53         0x004001a8,
54         0x004001c8,
55         0x004001e8,
56         0x0040018c,
57         0x004001ac,
58         0x004001cc,
59         0x004001ec,
60         0x00400190,
61         0x004001b0,
62         0x004001d0,
63         0x004001f0,
64         0x00400194,
65         0x004001b4,
66         0x004001d4,
67         0x004001f4,
68         0x00400198,
69         0x004001b8,
70         0x004001d8,
71         0x004001f8,
72         0x0040019c,
73         0x004001bc,
74         0x004001dc,
75         0x004001fc,
76         0x00400174,
77         NV04_PGRAPH_DMA_START_0,
78         NV04_PGRAPH_DMA_START_1,
79         NV04_PGRAPH_DMA_LENGTH,
80         NV04_PGRAPH_DMA_MISC,
81         NV04_PGRAPH_DMA_PITCH,
82         NV04_PGRAPH_BOFFSET0,
83         NV04_PGRAPH_BBASE0,
84         NV04_PGRAPH_BLIMIT0,
85         NV04_PGRAPH_BOFFSET1,
86         NV04_PGRAPH_BBASE1,
87         NV04_PGRAPH_BLIMIT1,
88         NV04_PGRAPH_BOFFSET2,
89         NV04_PGRAPH_BBASE2,
90         NV04_PGRAPH_BLIMIT2,
91         NV04_PGRAPH_BOFFSET3,
92         NV04_PGRAPH_BBASE3,
93         NV04_PGRAPH_BLIMIT3,
94         NV04_PGRAPH_BOFFSET4,
95         NV04_PGRAPH_BBASE4,
96         NV04_PGRAPH_BLIMIT4,
97         NV04_PGRAPH_BOFFSET5,
98         NV04_PGRAPH_BBASE5,
99         NV04_PGRAPH_BLIMIT5,
100         NV04_PGRAPH_BPITCH0,
101         NV04_PGRAPH_BPITCH1,
102         NV04_PGRAPH_BPITCH2,
103         NV04_PGRAPH_BPITCH3,
104         NV04_PGRAPH_BPITCH4,
105         NV04_PGRAPH_SURFACE,
106         NV04_PGRAPH_STATE,
107         NV04_PGRAPH_BSWIZZLE2,
108         NV04_PGRAPH_BSWIZZLE5,
109         NV04_PGRAPH_BPIXEL,
110         NV04_PGRAPH_NOTIFY,
111         NV04_PGRAPH_PATT_COLOR0,
112         NV04_PGRAPH_PATT_COLOR1,
113         NV04_PGRAPH_PATT_COLORRAM+0x00,
114         NV04_PGRAPH_PATT_COLORRAM+0x04,
115         NV04_PGRAPH_PATT_COLORRAM+0x08,
116         NV04_PGRAPH_PATT_COLORRAM+0x0c,
117         NV04_PGRAPH_PATT_COLORRAM+0x10,
118         NV04_PGRAPH_PATT_COLORRAM+0x14,
119         NV04_PGRAPH_PATT_COLORRAM+0x18,
120         NV04_PGRAPH_PATT_COLORRAM+0x1c,
121         NV04_PGRAPH_PATT_COLORRAM+0x20,
122         NV04_PGRAPH_PATT_COLORRAM+0x24,
123         NV04_PGRAPH_PATT_COLORRAM+0x28,
124         NV04_PGRAPH_PATT_COLORRAM+0x2c,
125         NV04_PGRAPH_PATT_COLORRAM+0x30,
126         NV04_PGRAPH_PATT_COLORRAM+0x34,
127         NV04_PGRAPH_PATT_COLORRAM+0x38,
128         NV04_PGRAPH_PATT_COLORRAM+0x3c,
129         NV04_PGRAPH_PATT_COLORRAM+0x40,
130         NV04_PGRAPH_PATT_COLORRAM+0x44,
131         NV04_PGRAPH_PATT_COLORRAM+0x48,
132         NV04_PGRAPH_PATT_COLORRAM+0x4c,
133         NV04_PGRAPH_PATT_COLORRAM+0x50,
134         NV04_PGRAPH_PATT_COLORRAM+0x54,
135         NV04_PGRAPH_PATT_COLORRAM+0x58,
136         NV04_PGRAPH_PATT_COLORRAM+0x5c,
137         NV04_PGRAPH_PATT_COLORRAM+0x60,
138         NV04_PGRAPH_PATT_COLORRAM+0x64,
139         NV04_PGRAPH_PATT_COLORRAM+0x68,
140         NV04_PGRAPH_PATT_COLORRAM+0x6c,
141         NV04_PGRAPH_PATT_COLORRAM+0x70,
142         NV04_PGRAPH_PATT_COLORRAM+0x74,
143         NV04_PGRAPH_PATT_COLORRAM+0x78,
144         NV04_PGRAPH_PATT_COLORRAM+0x7c,
145         NV04_PGRAPH_PATT_COLORRAM+0x80,
146         NV04_PGRAPH_PATT_COLORRAM+0x84,
147         NV04_PGRAPH_PATT_COLORRAM+0x88,
148         NV04_PGRAPH_PATT_COLORRAM+0x8c,
149         NV04_PGRAPH_PATT_COLORRAM+0x90,
150         NV04_PGRAPH_PATT_COLORRAM+0x94,
151         NV04_PGRAPH_PATT_COLORRAM+0x98,
152         NV04_PGRAPH_PATT_COLORRAM+0x9c,
153         NV04_PGRAPH_PATT_COLORRAM+0xa0,
154         NV04_PGRAPH_PATT_COLORRAM+0xa4,
155         NV04_PGRAPH_PATT_COLORRAM+0xa8,
156         NV04_PGRAPH_PATT_COLORRAM+0xac,
157         NV04_PGRAPH_PATT_COLORRAM+0xb0,
158         NV04_PGRAPH_PATT_COLORRAM+0xb4,
159         NV04_PGRAPH_PATT_COLORRAM+0xb8,
160         NV04_PGRAPH_PATT_COLORRAM+0xbc,
161         NV04_PGRAPH_PATT_COLORRAM+0xc0,
162         NV04_PGRAPH_PATT_COLORRAM+0xc4,
163         NV04_PGRAPH_PATT_COLORRAM+0xc8,
164         NV04_PGRAPH_PATT_COLORRAM+0xcc,
165         NV04_PGRAPH_PATT_COLORRAM+0xd0,
166         NV04_PGRAPH_PATT_COLORRAM+0xd4,
167         NV04_PGRAPH_PATT_COLORRAM+0xd8,
168         NV04_PGRAPH_PATT_COLORRAM+0xdc,
169         NV04_PGRAPH_PATT_COLORRAM+0xe0,
170         NV04_PGRAPH_PATT_COLORRAM+0xe4,
171         NV04_PGRAPH_PATT_COLORRAM+0xe8,
172         NV04_PGRAPH_PATT_COLORRAM+0xec,
173         NV04_PGRAPH_PATT_COLORRAM+0xf0,
174         NV04_PGRAPH_PATT_COLORRAM+0xf4,
175         NV04_PGRAPH_PATT_COLORRAM+0xf8,
176         NV04_PGRAPH_PATT_COLORRAM+0xfc,
177         NV04_PGRAPH_PATTERN,
178         0x0040080c,
179         NV04_PGRAPH_PATTERN_SHAPE,
180         0x00400600,
181         NV04_PGRAPH_ROP3,
182         NV04_PGRAPH_CHROMA,
183         NV04_PGRAPH_BETA_AND,
184         NV04_PGRAPH_BETA_PREMULT,
185         NV04_PGRAPH_CONTROL0,
186         NV04_PGRAPH_CONTROL1,
187         NV04_PGRAPH_CONTROL2,
188         NV04_PGRAPH_BLEND,
189         NV04_PGRAPH_STORED_FMT,
190         NV04_PGRAPH_SOURCE_COLOR,
191         0x00400560,
192         0x00400568,
193         0x00400564,
194         0x0040056c,
195         0x00400400,
196         0x00400480,
197         0x00400404,
198         0x00400484,
199         0x00400408,
200         0x00400488,
201         0x0040040c,
202         0x0040048c,
203         0x00400410,
204         0x00400490,
205         0x00400414,
206         0x00400494,
207         0x00400418,
208         0x00400498,
209         0x0040041c,
210         0x0040049c,
211         0x00400420,
212         0x004004a0,
213         0x00400424,
214         0x004004a4,
215         0x00400428,
216         0x004004a8,
217         0x0040042c,
218         0x004004ac,
219         0x00400430,
220         0x004004b0,
221         0x00400434,
222         0x004004b4,
223         0x00400438,
224         0x004004b8,
225         0x0040043c,
226         0x004004bc,
227         0x00400440,
228         0x004004c0,
229         0x00400444,
230         0x004004c4,
231         0x00400448,
232         0x004004c8,
233         0x0040044c,
234         0x004004cc,
235         0x00400450,
236         0x004004d0,
237         0x00400454,
238         0x004004d4,
239         0x00400458,
240         0x004004d8,
241         0x0040045c,
242         0x004004dc,
243         0x00400460,
244         0x004004e0,
245         0x00400464,
246         0x004004e4,
247         0x00400468,
248         0x004004e8,
249         0x0040046c,
250         0x004004ec,
251         0x00400470,
252         0x004004f0,
253         0x00400474,
254         0x004004f4,
255         0x00400478,
256         0x004004f8,
257         0x0040047c,
258         0x004004fc,
259         0x00400534,
260         0x00400538,
261         0x00400514,
262         0x00400518,
263         0x0040051c,
264         0x00400520,
265         0x00400524,
266         0x00400528,
267         0x0040052c,
268         0x00400530,
269         0x00400d00,
270         0x00400d40,
271         0x00400d80,
272         0x00400d04,
273         0x00400d44,
274         0x00400d84,
275         0x00400d08,
276         0x00400d48,
277         0x00400d88,
278         0x00400d0c,
279         0x00400d4c,
280         0x00400d8c,
281         0x00400d10,
282         0x00400d50,
283         0x00400d90,
284         0x00400d14,
285         0x00400d54,
286         0x00400d94,
287         0x00400d18,
288         0x00400d58,
289         0x00400d98,
290         0x00400d1c,
291         0x00400d5c,
292         0x00400d9c,
293         0x00400d20,
294         0x00400d60,
295         0x00400da0,
296         0x00400d24,
297         0x00400d64,
298         0x00400da4,
299         0x00400d28,
300         0x00400d68,
301         0x00400da8,
302         0x00400d2c,
303         0x00400d6c,
304         0x00400dac,
305         0x00400d30,
306         0x00400d70,
307         0x00400db0,
308         0x00400d34,
309         0x00400d74,
310         0x00400db4,
311         0x00400d38,
312         0x00400d78,
313         0x00400db8,
314         0x00400d3c,
315         0x00400d7c,
316         0x00400dbc,
317         0x00400590,
318         0x00400594,
319         0x00400598,
320         0x0040059c,
321         0x004005a8,
322         0x004005ac,
323         0x004005b0,
324         0x004005b4,
325         0x004005c0,
326         0x004005c4,
327         0x004005c8,
328         0x004005cc,
329         0x004005d0,
330         0x004005d4,
331         0x004005d8,
332         0x004005dc,
333         0x004005e0,
334         NV04_PGRAPH_PASSTHRU_0,
335         NV04_PGRAPH_PASSTHRU_1,
336         NV04_PGRAPH_PASSTHRU_2,
337         NV04_PGRAPH_DVD_COLORFMT,
338         NV04_PGRAPH_SCALED_FORMAT,
339         NV04_PGRAPH_MISC24_0,
340         NV04_PGRAPH_MISC24_1,
341         NV04_PGRAPH_MISC24_2,
342         0x00400500,
343         0x00400504,
344         NV04_PGRAPH_VALID1,
345         NV04_PGRAPH_VALID2,
346         NV04_PGRAPH_DEBUG_3
347 };
348
349 struct graph_state {
350         uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
351 };
352
353 struct nouveau_channel *
354 nv04_graph_channel(struct drm_device *dev)
355 {
356         struct drm_nouveau_private *dev_priv = dev->dev_private;
357         int chid = dev_priv->engine.fifo.channels;
358
359         if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
360                 chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
361
362         if (chid >= dev_priv->engine.fifo.channels)
363                 return NULL;
364
365         return dev_priv->channels.ptr[chid];
366 }
367
368 static void
369 nv04_graph_context_switch(struct drm_device *dev)
370 {
371         struct drm_nouveau_private *dev_priv = dev->dev_private;
372         struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
373         struct nouveau_channel *chan = NULL;
374         int chid;
375
376         nouveau_wait_for_idle(dev);
377
378         /* If previous context is valid, we need to save it */
379         pgraph->unload_context(dev);
380
381         /* Load context for next channel */
382         chid = dev_priv->engine.fifo.channel_id(dev);
383         chan = dev_priv->channels.ptr[chid];
384         if (chan)
385                 nv04_graph_load_context(chan);
386 }
387
388 static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
389 {
390         int i;
391
392         for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
393                 if (nv04_graph_ctx_regs[i] == reg)
394                         return &ctx->nv04[i];
395         }
396
397         return NULL;
398 }
399
400 int nv04_graph_create_context(struct nouveau_channel *chan)
401 {
402         struct graph_state *pgraph_ctx;
403         NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
404
405         chan->pgraph_ctx = pgraph_ctx = kzalloc(sizeof(*pgraph_ctx),
406                                                 GFP_KERNEL);
407         if (pgraph_ctx == NULL)
408                 return -ENOMEM;
409
410         *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
411
412         return 0;
413 }
414
415 void nv04_graph_destroy_context(struct nouveau_channel *chan)
416 {
417         struct drm_device *dev = chan->dev;
418         struct drm_nouveau_private *dev_priv = dev->dev_private;
419         struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
420         struct graph_state *pgraph_ctx = chan->pgraph_ctx;
421         unsigned long flags;
422
423         spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
424         pgraph->fifo_access(dev, false);
425
426         /* Unload the context if it's the currently active one */
427         if (pgraph->channel(dev) == chan)
428                 pgraph->unload_context(dev);
429
430         /* Free the context resources */
431         kfree(pgraph_ctx);
432         chan->pgraph_ctx = NULL;
433
434         pgraph->fifo_access(dev, true);
435         spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
436 }
437
438 int nv04_graph_load_context(struct nouveau_channel *chan)
439 {
440         struct drm_device *dev = chan->dev;
441         struct graph_state *pgraph_ctx = chan->pgraph_ctx;
442         uint32_t tmp;
443         int i;
444
445         for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
446                 nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
447
448         nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
449
450         tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
451         nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
452
453         tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
454         nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
455
456         return 0;
457 }
458
459 int
460 nv04_graph_unload_context(struct drm_device *dev)
461 {
462         struct drm_nouveau_private *dev_priv = dev->dev_private;
463         struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
464         struct nouveau_channel *chan = NULL;
465         struct graph_state *ctx;
466         uint32_t tmp;
467         int i;
468
469         chan = pgraph->channel(dev);
470         if (!chan)
471                 return 0;
472         ctx = chan->pgraph_ctx;
473
474         for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
475                 ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
476
477         nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
478         tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
479         tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
480         nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
481         return 0;
482 }
483
484 int nv04_graph_init(struct drm_device *dev)
485 {
486         struct drm_nouveau_private *dev_priv = dev->dev_private;
487         uint32_t tmp;
488         int ret;
489
490         nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
491                         ~NV_PMC_ENABLE_PGRAPH);
492         nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
493                          NV_PMC_ENABLE_PGRAPH);
494
495         ret = nv04_graph_register(dev);
496         if (ret)
497                 return ret;
498
499         /* Enable PGRAPH interrupts */
500         nouveau_irq_register(dev, 12, nv04_graph_isr);
501         nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
502         nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
503
504         nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
505         nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
506         /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
507         nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
508         nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
509         /*1231C000 blob, 001 haiku*/
510         //*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
511         nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
512         /*0x72111100 blob , 01 haiku*/
513         /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
514         nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
515         /*haiku same*/
516
517         /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
518         nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
519         /*haiku and blob 10d4*/
520
521         nv_wr32(dev, NV04_PGRAPH_STATE        , 0xFFFFFFFF);
522         nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL  , 0x10000100);
523         tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
524         tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
525         nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
526
527         /* These don't belong here, they're part of a per-channel context */
528         nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
529         nv_wr32(dev, NV04_PGRAPH_BETA_AND     , 0xFFFFFFFF);
530
531         return 0;
532 }
533
534 void nv04_graph_takedown(struct drm_device *dev)
535 {
536         nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
537         nouveau_irq_unregister(dev, 12);
538 }
539
540 void
541 nv04_graph_fifo_access(struct drm_device *dev, bool enabled)
542 {
543         if (enabled)
544                 nv_wr32(dev, NV04_PGRAPH_FIFO,
545                                         nv_rd32(dev, NV04_PGRAPH_FIFO) | 1);
546         else
547                 nv_wr32(dev, NV04_PGRAPH_FIFO,
548                                         nv_rd32(dev, NV04_PGRAPH_FIFO) & ~1);
549 }
550
551 static int
552 nv04_graph_mthd_set_ref(struct nouveau_channel *chan,
553                         u32 class, u32 mthd, u32 data)
554 {
555         atomic_set(&chan->fence.last_sequence_irq, data);
556         return 0;
557 }
558
559 int
560 nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
561                           u32 class, u32 mthd, u32 data)
562 {
563         struct drm_device *dev = chan->dev;
564         struct nouveau_page_flip_state s;
565
566         if (!nouveau_finish_page_flip(chan, &s))
567                 nv_set_crtc_base(dev, s.crtc,
568                                  s.offset + s.y * s.pitch + s.x * s.bpp / 8);
569
570         return 0;
571 }
572
573 /*
574  * Software methods, why they are needed, and how they all work:
575  *
576  * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
577  * 2d engine settings are kept inside the grobjs themselves. The grobjs are
578  * 3 words long on both. grobj format on NV04 is:
579  *
580  * word 0:
581  *  - bits 0-7: class
582  *  - bit 12: color key active
583  *  - bit 13: clip rect active
584  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
585  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
586  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
587  *            NV03_CONTEXT_SURFACE_DST].
588  *  - bits 15-17: 2d operation [aka patch config]
589  *  - bit 24: patch valid [enables rendering using this object]
590  *  - bit 25: surf3d valid [for tex_tri and multitex_tri only]
591  * word 1:
592  *  - bits 0-1: mono format
593  *  - bits 8-13: color format
594  *  - bits 16-31: DMA_NOTIFY instance
595  * word 2:
596  *  - bits 0-15: DMA_A instance
597  *  - bits 16-31: DMA_B instance
598  *
599  * On NV05 it's:
600  *
601  * word 0:
602  *  - bits 0-7: class
603  *  - bit 12: color key active
604  *  - bit 13: clip rect active
605  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
606  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
607  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
608  *            NV03_CONTEXT_SURFACE_DST].
609  *  - bits 15-17: 2d operation [aka patch config]
610  *  - bits 20-22: dither mode
611  *  - bit 24: patch valid [enables rendering using this object]
612  *  - bit 25: surface_dst/surface_color/surf2d/surf3d valid
613  *  - bit 26: surface_src/surface_zeta valid
614  *  - bit 27: pattern valid
615  *  - bit 28: rop valid
616  *  - bit 29: beta1 valid
617  *  - bit 30: beta4 valid
618  * word 1:
619  *  - bits 0-1: mono format
620  *  - bits 8-13: color format
621  *  - bits 16-31: DMA_NOTIFY instance
622  * word 2:
623  *  - bits 0-15: DMA_A instance
624  *  - bits 16-31: DMA_B instance
625  *
626  * NV05 will set/unset the relevant valid bits when you poke the relevant
627  * object-binding methods with object of the proper type, or with the NULL
628  * type. It'll only allow rendering using the grobj if all needed objects
629  * are bound. The needed set of objects depends on selected operation: for
630  * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
631  *
632  * NV04 doesn't have these methods implemented at all, and doesn't have the
633  * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
634  * is set. So we have to emulate them in software, internally keeping the
635  * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
636  * but the last word isn't actually used for anything, we abuse it for this
637  * purpose.
638  *
639  * Actually, NV05 can optionally check bit 24 too, but we disable this since
640  * there's no use for it.
641  *
642  * For unknown reasons, NV04 implements surf3d binding in hardware as an
643  * exception. Also for unknown reasons, NV04 doesn't implement the clipping
644  * methods on the surf3d object, so we have to emulate them too.
645  */
646
647 static void
648 nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
649 {
650         struct drm_device *dev = chan->dev;
651         u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
652         int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
653         u32 tmp;
654
655         tmp  = nv_ri32(dev, instance);
656         tmp &= ~mask;
657         tmp |= value;
658
659         nv_wi32(dev, instance, tmp);
660         nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
661         nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
662 }
663
664 static void
665 nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
666 {
667         struct drm_device *dev = chan->dev;
668         u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
669         u32 tmp, ctx1;
670         int class, op, valid = 1;
671
672         ctx1 = nv_ri32(dev, instance);
673         class = ctx1 & 0xff;
674         op = (ctx1 >> 15) & 7;
675         tmp  = nv_ri32(dev, instance + 0xc);
676         tmp &= ~mask;
677         tmp |= value;
678         nv_wi32(dev, instance + 0xc, tmp);
679
680         /* check for valid surf2d/surf_dst/surf_color */
681         if (!(tmp & 0x02000000))
682                 valid = 0;
683         /* check for valid surf_src/surf_zeta */
684         if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
685                 valid = 0;
686
687         switch (op) {
688         /* SRCCOPY_AND, SRCCOPY: no extra objects required */
689         case 0:
690         case 3:
691                 break;
692         /* ROP_AND: requires pattern and rop */
693         case 1:
694                 if (!(tmp & 0x18000000))
695                         valid = 0;
696                 break;
697         /* BLEND_AND: requires beta1 */
698         case 2:
699                 if (!(tmp & 0x20000000))
700                         valid = 0;
701                 break;
702         /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
703         case 4:
704         case 5:
705                 if (!(tmp & 0x40000000))
706                         valid = 0;
707                 break;
708         }
709
710         nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
711 }
712
713 static int
714 nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
715                               u32 class, u32 mthd, u32 data)
716 {
717         if (data > 5)
718                 return 1;
719         /* Old versions of the objects only accept first three operations. */
720         if (data > 2 && class < 0x40)
721                 return 1;
722         nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
723         /* changing operation changes set of objects needed for validation */
724         nv04_graph_set_ctx_val(chan, 0, 0);
725         return 0;
726 }
727
728 static int
729 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
730                               u32 class, u32 mthd, u32 data)
731 {
732         uint32_t min = data & 0xffff, max;
733         uint32_t w = data >> 16;
734         if (min & 0x8000)
735                 /* too large */
736                 return 1;
737         if (w & 0x8000)
738                 /* yes, it accepts negative for some reason. */
739                 w |= 0xffff0000;
740         max = min + w;
741         max &= 0x3ffff;
742         nv_wr32(chan->dev, 0x40053c, min);
743         nv_wr32(chan->dev, 0x400544, max);
744         return 0;
745 }
746
747 static int
748 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
749                               u32 class, u32 mthd, u32 data)
750 {
751         uint32_t min = data & 0xffff, max;
752         uint32_t w = data >> 16;
753         if (min & 0x8000)
754                 /* too large */
755                 return 1;
756         if (w & 0x8000)
757                 /* yes, it accepts negative for some reason. */
758                 w |= 0xffff0000;
759         max = min + w;
760         max &= 0x3ffff;
761         nv_wr32(chan->dev, 0x400540, min);
762         nv_wr32(chan->dev, 0x400548, max);
763         return 0;
764 }
765
766 static int
767 nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
768                             u32 class, u32 mthd, u32 data)
769 {
770         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
771         case 0x30:
772                 nv04_graph_set_ctx1(chan, 0x00004000, 0);
773                 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
774                 return 0;
775         case 0x42:
776                 nv04_graph_set_ctx1(chan, 0x00004000, 0);
777                 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
778                 return 0;
779         }
780         return 1;
781 }
782
783 static int
784 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
785                                     u32 class, u32 mthd, u32 data)
786 {
787         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
788         case 0x30:
789                 nv04_graph_set_ctx1(chan, 0x00004000, 0);
790                 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
791                 return 0;
792         case 0x42:
793                 nv04_graph_set_ctx1(chan, 0x00004000, 0);
794                 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
795                 return 0;
796         case 0x52:
797                 nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
798                 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
799                 return 0;
800         }
801         return 1;
802 }
803
804 static int
805 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
806                                u32 class, u32 mthd, u32 data)
807 {
808         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
809         case 0x30:
810                 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
811                 return 0;
812         case 0x18:
813                 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
814                 return 0;
815         }
816         return 1;
817 }
818
819 static int
820 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
821                                u32 class, u32 mthd, u32 data)
822 {
823         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
824         case 0x30:
825                 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
826                 return 0;
827         case 0x44:
828                 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
829                 return 0;
830         }
831         return 1;
832 }
833
834 static int
835 nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
836                          u32 class, u32 mthd, u32 data)
837 {
838         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
839         case 0x30:
840                 nv04_graph_set_ctx_val(chan, 0x10000000, 0);
841                 return 0;
842         case 0x43:
843                 nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
844                 return 0;
845         }
846         return 1;
847 }
848
849 static int
850 nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
851                            u32 class, u32 mthd, u32 data)
852 {
853         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
854         case 0x30:
855                 nv04_graph_set_ctx_val(chan, 0x20000000, 0);
856                 return 0;
857         case 0x12:
858                 nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
859                 return 0;
860         }
861         return 1;
862 }
863
864 static int
865 nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
866                            u32 class, u32 mthd, u32 data)
867 {
868         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
869         case 0x30:
870                 nv04_graph_set_ctx_val(chan, 0x40000000, 0);
871                 return 0;
872         case 0x72:
873                 nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
874                 return 0;
875         }
876         return 1;
877 }
878
879 static int
880 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
881                               u32 class, u32 mthd, u32 data)
882 {
883         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
884         case 0x30:
885                 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
886                 return 0;
887         case 0x58:
888                 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
889                 return 0;
890         }
891         return 1;
892 }
893
894 static int
895 nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
896                               u32 class, u32 mthd, u32 data)
897 {
898         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
899         case 0x30:
900                 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
901                 return 0;
902         case 0x59:
903                 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
904                 return 0;
905         }
906         return 1;
907 }
908
909 static int
910 nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
911                                 u32 class, u32 mthd, u32 data)
912 {
913         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
914         case 0x30:
915                 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
916                 return 0;
917         case 0x5a:
918                 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
919                 return 0;
920         }
921         return 1;
922 }
923
924 static int
925 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
926                                u32 class, u32 mthd, u32 data)
927 {
928         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
929         case 0x30:
930                 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
931                 return 0;
932         case 0x5b:
933                 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
934                 return 0;
935         }
936         return 1;
937 }
938
939 static int
940 nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
941                           u32 class, u32 mthd, u32 data)
942 {
943         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
944         case 0x30:
945                 nv04_graph_set_ctx1(chan, 0x2000, 0);
946                 return 0;
947         case 0x19:
948                 nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
949                 return 0;
950         }
951         return 1;
952 }
953
954 static int
955 nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
956                             u32 class, u32 mthd, u32 data)
957 {
958         switch (nv_ri32(chan->dev, data << 4) & 0xff) {
959         case 0x30:
960                 nv04_graph_set_ctx1(chan, 0x1000, 0);
961                 return 0;
962         /* Yes, for some reason even the old versions of objects
963          * accept 0x57 and not 0x17. Consistency be damned.
964          */
965         case 0x57:
966                 nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
967                 return 0;
968         }
969         return 1;
970 }
971
972 static int
973 nv04_graph_register(struct drm_device *dev)
974 {
975         struct drm_nouveau_private *dev_priv = dev->dev_private;
976
977         if (dev_priv->engine.graph.registered)
978                 return 0;
979
980         /* dvd subpicture */
981         NVOBJ_CLASS(dev, 0x0038, GR);
982
983         /* m2mf */
984         NVOBJ_CLASS(dev, 0x0039, GR);
985
986         /* nv03 gdirect */
987         NVOBJ_CLASS(dev, 0x004b, GR);
988         NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
989         NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
990         NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
991         NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
992         NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
993
994         /* nv04 gdirect */
995         NVOBJ_CLASS(dev, 0x004a, GR);
996         NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
997         NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
998         NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
999         NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
1000         NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
1001         NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
1002
1003         /* nv01 imageblit */
1004         NVOBJ_CLASS(dev, 0x001f, GR);
1005         NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
1006         NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
1007         NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1008         NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
1009         NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
1010         NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
1011         NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
1012         NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
1013
1014         /* nv04 imageblit */
1015         NVOBJ_CLASS(dev, 0x005f, GR);
1016         NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
1017         NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
1018         NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1019         NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
1020         NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
1021         NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
1022         NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
1023         NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
1024
1025         /* nv04 iifc */
1026         NVOBJ_CLASS(dev, 0x0060, GR);
1027         NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
1028         NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
1029         NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
1030         NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
1031         NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
1032         NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
1033         NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
1034         NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
1035
1036         /* nv05 iifc */
1037         NVOBJ_CLASS(dev, 0x0064, GR);
1038
1039         /* nv01 ifc */
1040         NVOBJ_CLASS(dev, 0x0021, GR);
1041         NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
1042         NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
1043         NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1044         NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
1045         NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
1046         NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
1047         NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
1048
1049         /* nv04 ifc */
1050         NVOBJ_CLASS(dev, 0x0061, GR);
1051         NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
1052         NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
1053         NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1054         NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
1055         NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
1056         NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
1057         NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
1058         NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
1059
1060         /* nv05 ifc */
1061         NVOBJ_CLASS(dev, 0x0065, GR);
1062
1063         /* nv03 sifc */
1064         NVOBJ_CLASS(dev, 0x0036, GR);
1065         NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
1066         NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1067         NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
1068         NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
1069         NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
1070         NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
1071
1072         /* nv04 sifc */
1073         NVOBJ_CLASS(dev, 0x0076, GR);
1074         NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
1075         NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1076         NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
1077         NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
1078         NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
1079         NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
1080         NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
1081
1082         /* nv05 sifc */
1083         NVOBJ_CLASS(dev, 0x0066, GR);
1084
1085         /* nv03 sifm */
1086         NVOBJ_CLASS(dev, 0x0037, GR);
1087         NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1088         NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
1089         NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
1090         NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
1091         NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
1092
1093         /* nv04 sifm */
1094         NVOBJ_CLASS(dev, 0x0077, GR);
1095         NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1096         NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
1097         NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
1098         NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
1099         NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
1100         NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
1101
1102         /* null */
1103         NVOBJ_CLASS(dev, 0x0030, GR);
1104
1105         /* surf2d */
1106         NVOBJ_CLASS(dev, 0x0042, GR);
1107
1108         /* rop */
1109         NVOBJ_CLASS(dev, 0x0043, GR);
1110
1111         /* beta1 */
1112         NVOBJ_CLASS(dev, 0x0012, GR);
1113
1114         /* beta4 */
1115         NVOBJ_CLASS(dev, 0x0072, GR);
1116
1117         /* cliprect */
1118         NVOBJ_CLASS(dev, 0x0019, GR);
1119
1120         /* nv01 pattern */
1121         NVOBJ_CLASS(dev, 0x0018, GR);
1122
1123         /* nv04 pattern */
1124         NVOBJ_CLASS(dev, 0x0044, GR);
1125
1126         /* swzsurf */
1127         NVOBJ_CLASS(dev, 0x0052, GR);
1128
1129         /* surf3d */
1130         NVOBJ_CLASS(dev, 0x0053, GR);
1131         NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
1132         NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
1133
1134         /* nv03 tex_tri */
1135         NVOBJ_CLASS(dev, 0x0048, GR);
1136         NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
1137         NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
1138         NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
1139
1140         /* tex_tri */
1141         NVOBJ_CLASS(dev, 0x0054, GR);
1142
1143         /* multitex_tri */
1144         NVOBJ_CLASS(dev, 0x0055, GR);
1145
1146         /* nv01 chroma */
1147         NVOBJ_CLASS(dev, 0x0017, GR);
1148
1149         /* nv04 chroma */
1150         NVOBJ_CLASS(dev, 0x0057, GR);
1151
1152         /* surf_dst */
1153         NVOBJ_CLASS(dev, 0x0058, GR);
1154
1155         /* surf_src */
1156         NVOBJ_CLASS(dev, 0x0059, GR);
1157
1158         /* surf_color */
1159         NVOBJ_CLASS(dev, 0x005a, GR);
1160
1161         /* surf_zeta */
1162         NVOBJ_CLASS(dev, 0x005b, GR);
1163
1164         /* nv01 line */
1165         NVOBJ_CLASS(dev, 0x001c, GR);
1166         NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
1167         NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1168         NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
1169         NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
1170         NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
1171         NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
1172
1173         /* nv04 line */
1174         NVOBJ_CLASS(dev, 0x005c, GR);
1175         NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
1176         NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1177         NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
1178         NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
1179         NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
1180         NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
1181         NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
1182
1183         /* nv01 tri */
1184         NVOBJ_CLASS(dev, 0x001d, GR);
1185         NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
1186         NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1187         NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
1188         NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
1189         NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
1190         NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
1191
1192         /* nv04 tri */
1193         NVOBJ_CLASS(dev, 0x005d, GR);
1194         NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
1195         NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1196         NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
1197         NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
1198         NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
1199         NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
1200         NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
1201
1202         /* nv01 rect */
1203         NVOBJ_CLASS(dev, 0x001e, GR);
1204         NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
1205         NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1206         NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
1207         NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
1208         NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
1209         NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
1210
1211         /* nv04 rect */
1212         NVOBJ_CLASS(dev, 0x005e, GR);
1213         NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
1214         NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1215         NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
1216         NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
1217         NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
1218         NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
1219         NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);
1220
1221         /* nvsw */
1222         NVOBJ_CLASS(dev, 0x506e, SW);
1223         NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
1224         NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1225
1226         dev_priv->engine.graph.registered = true;
1227         return 0;
1228 };
1229
1230 static struct nouveau_bitfield nv04_graph_intr[] = {
1231         { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1232         {}
1233 };
1234
1235 static struct nouveau_bitfield nv04_graph_nstatus[] =
1236 {
1237         { NV04_PGRAPH_NSTATUS_STATE_IN_USE,       "STATE_IN_USE" },
1238         { NV04_PGRAPH_NSTATUS_INVALID_STATE,      "INVALID_STATE" },
1239         { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT,       "BAD_ARGUMENT" },
1240         { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT,   "PROTECTION_FAULT" },
1241         {}
1242 };
1243
1244 struct nouveau_bitfield nv04_graph_nsource[] =
1245 {
1246         { NV03_PGRAPH_NSOURCE_NOTIFICATION,       "NOTIFICATION" },
1247         { NV03_PGRAPH_NSOURCE_DATA_ERROR,         "DATA_ERROR" },
1248         { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR,   "PROTECTION_ERROR" },
1249         { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION,    "RANGE_EXCEPTION" },
1250         { NV03_PGRAPH_NSOURCE_LIMIT_COLOR,        "LIMIT_COLOR" },
1251         { NV03_PGRAPH_NSOURCE_LIMIT_ZETA,         "LIMIT_ZETA" },
1252         { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD,       "ILLEGAL_MTHD" },
1253         { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION,   "DMA_R_PROTECTION" },
1254         { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION,   "DMA_W_PROTECTION" },
1255         { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION,   "FORMAT_EXCEPTION" },
1256         { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION,    "PATCH_EXCEPTION" },
1257         { NV03_PGRAPH_NSOURCE_STATE_INVALID,      "STATE_INVALID" },
1258         { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY,      "DOUBLE_NOTIFY" },
1259         { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE,      "NOTIFY_IN_USE" },
1260         { NV03_PGRAPH_NSOURCE_METHOD_CNT,         "METHOD_CNT" },
1261         { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION,   "BFR_NOTIFICATION" },
1262         { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1263         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A,        "DMA_WIDTH_A" },
1264         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B,        "DMA_WIDTH_B" },
1265         {}
1266 };
1267
1268 static void
1269 nv04_graph_isr(struct drm_device *dev)
1270 {
1271         u32 stat;
1272
1273         while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1274                 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1275                 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1276                 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1277                 u32 chid = (addr & 0x0f000000) >> 24;
1278                 u32 subc = (addr & 0x0000e000) >> 13;
1279                 u32 mthd = (addr & 0x00001ffc);
1280                 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1281                 u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1282                 u32 show = stat;
1283
1284                 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1285                         if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1286                                 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1287                                         show &= ~NV_PGRAPH_INTR_NOTIFY;
1288                         }
1289                 }
1290
1291                 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1292                         nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1293                         stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1294                         show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1295                         nv04_graph_context_switch(dev);
1296                 }
1297
1298                 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1299                 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1300
1301                 if (show && nouveau_ratelimit()) {
1302                         NV_INFO(dev, "PGRAPH -");
1303                         nouveau_bitfield_print(nv04_graph_intr, show);
1304                         printk(" nsource:");
1305                         nouveau_bitfield_print(nv04_graph_nsource, nsource);
1306                         printk(" nstatus:");
1307                         nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1308                         printk("\n");
1309                         NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1310                                      "mthd 0x%04x data 0x%08x\n",
1311                                 chid, subc, class, mthd, data);
1312                 }
1313         }
1314 }