2 * Copyright 2007 Stephane Marchesin
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
31 #include "nouveau_ramht.h"
33 struct nv04_graph_engine
{
34 struct nouveau_exec_engine base
;
37 static uint32_t nv04_graph_ctx_regs
[] = {
42 NV04_PGRAPH_CTX_SWITCH1
,
43 NV04_PGRAPH_CTX_SWITCH2
,
44 NV04_PGRAPH_CTX_SWITCH3
,
45 NV04_PGRAPH_CTX_SWITCH4
,
46 NV04_PGRAPH_CTX_CACHE1
,
47 NV04_PGRAPH_CTX_CACHE2
,
48 NV04_PGRAPH_CTX_CACHE3
,
49 NV04_PGRAPH_CTX_CACHE4
,
79 NV04_PGRAPH_DMA_START_0
,
80 NV04_PGRAPH_DMA_START_1
,
81 NV04_PGRAPH_DMA_LENGTH
,
83 NV04_PGRAPH_DMA_PITCH
,
109 NV04_PGRAPH_BSWIZZLE2
,
110 NV04_PGRAPH_BSWIZZLE5
,
113 NV04_PGRAPH_PATT_COLOR0
,
114 NV04_PGRAPH_PATT_COLOR1
,
115 NV04_PGRAPH_PATT_COLORRAM
+0x00,
116 NV04_PGRAPH_PATT_COLORRAM
+0x04,
117 NV04_PGRAPH_PATT_COLORRAM
+0x08,
118 NV04_PGRAPH_PATT_COLORRAM
+0x0c,
119 NV04_PGRAPH_PATT_COLORRAM
+0x10,
120 NV04_PGRAPH_PATT_COLORRAM
+0x14,
121 NV04_PGRAPH_PATT_COLORRAM
+0x18,
122 NV04_PGRAPH_PATT_COLORRAM
+0x1c,
123 NV04_PGRAPH_PATT_COLORRAM
+0x20,
124 NV04_PGRAPH_PATT_COLORRAM
+0x24,
125 NV04_PGRAPH_PATT_COLORRAM
+0x28,
126 NV04_PGRAPH_PATT_COLORRAM
+0x2c,
127 NV04_PGRAPH_PATT_COLORRAM
+0x30,
128 NV04_PGRAPH_PATT_COLORRAM
+0x34,
129 NV04_PGRAPH_PATT_COLORRAM
+0x38,
130 NV04_PGRAPH_PATT_COLORRAM
+0x3c,
131 NV04_PGRAPH_PATT_COLORRAM
+0x40,
132 NV04_PGRAPH_PATT_COLORRAM
+0x44,
133 NV04_PGRAPH_PATT_COLORRAM
+0x48,
134 NV04_PGRAPH_PATT_COLORRAM
+0x4c,
135 NV04_PGRAPH_PATT_COLORRAM
+0x50,
136 NV04_PGRAPH_PATT_COLORRAM
+0x54,
137 NV04_PGRAPH_PATT_COLORRAM
+0x58,
138 NV04_PGRAPH_PATT_COLORRAM
+0x5c,
139 NV04_PGRAPH_PATT_COLORRAM
+0x60,
140 NV04_PGRAPH_PATT_COLORRAM
+0x64,
141 NV04_PGRAPH_PATT_COLORRAM
+0x68,
142 NV04_PGRAPH_PATT_COLORRAM
+0x6c,
143 NV04_PGRAPH_PATT_COLORRAM
+0x70,
144 NV04_PGRAPH_PATT_COLORRAM
+0x74,
145 NV04_PGRAPH_PATT_COLORRAM
+0x78,
146 NV04_PGRAPH_PATT_COLORRAM
+0x7c,
147 NV04_PGRAPH_PATT_COLORRAM
+0x80,
148 NV04_PGRAPH_PATT_COLORRAM
+0x84,
149 NV04_PGRAPH_PATT_COLORRAM
+0x88,
150 NV04_PGRAPH_PATT_COLORRAM
+0x8c,
151 NV04_PGRAPH_PATT_COLORRAM
+0x90,
152 NV04_PGRAPH_PATT_COLORRAM
+0x94,
153 NV04_PGRAPH_PATT_COLORRAM
+0x98,
154 NV04_PGRAPH_PATT_COLORRAM
+0x9c,
155 NV04_PGRAPH_PATT_COLORRAM
+0xa0,
156 NV04_PGRAPH_PATT_COLORRAM
+0xa4,
157 NV04_PGRAPH_PATT_COLORRAM
+0xa8,
158 NV04_PGRAPH_PATT_COLORRAM
+0xac,
159 NV04_PGRAPH_PATT_COLORRAM
+0xb0,
160 NV04_PGRAPH_PATT_COLORRAM
+0xb4,
161 NV04_PGRAPH_PATT_COLORRAM
+0xb8,
162 NV04_PGRAPH_PATT_COLORRAM
+0xbc,
163 NV04_PGRAPH_PATT_COLORRAM
+0xc0,
164 NV04_PGRAPH_PATT_COLORRAM
+0xc4,
165 NV04_PGRAPH_PATT_COLORRAM
+0xc8,
166 NV04_PGRAPH_PATT_COLORRAM
+0xcc,
167 NV04_PGRAPH_PATT_COLORRAM
+0xd0,
168 NV04_PGRAPH_PATT_COLORRAM
+0xd4,
169 NV04_PGRAPH_PATT_COLORRAM
+0xd8,
170 NV04_PGRAPH_PATT_COLORRAM
+0xdc,
171 NV04_PGRAPH_PATT_COLORRAM
+0xe0,
172 NV04_PGRAPH_PATT_COLORRAM
+0xe4,
173 NV04_PGRAPH_PATT_COLORRAM
+0xe8,
174 NV04_PGRAPH_PATT_COLORRAM
+0xec,
175 NV04_PGRAPH_PATT_COLORRAM
+0xf0,
176 NV04_PGRAPH_PATT_COLORRAM
+0xf4,
177 NV04_PGRAPH_PATT_COLORRAM
+0xf8,
178 NV04_PGRAPH_PATT_COLORRAM
+0xfc,
181 NV04_PGRAPH_PATTERN_SHAPE
,
185 NV04_PGRAPH_BETA_AND
,
186 NV04_PGRAPH_BETA_PREMULT
,
187 NV04_PGRAPH_CONTROL0
,
188 NV04_PGRAPH_CONTROL1
,
189 NV04_PGRAPH_CONTROL2
,
191 NV04_PGRAPH_STORED_FMT
,
192 NV04_PGRAPH_SOURCE_COLOR
,
336 NV04_PGRAPH_PASSTHRU_0
,
337 NV04_PGRAPH_PASSTHRU_1
,
338 NV04_PGRAPH_PASSTHRU_2
,
339 NV04_PGRAPH_DVD_COLORFMT
,
340 NV04_PGRAPH_SCALED_FORMAT
,
341 NV04_PGRAPH_MISC24_0
,
342 NV04_PGRAPH_MISC24_1
,
343 NV04_PGRAPH_MISC24_2
,
352 uint32_t nv04
[ARRAY_SIZE(nv04_graph_ctx_regs
)];
355 static struct nouveau_channel
*
356 nv04_graph_channel(struct drm_device
*dev
)
358 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
359 int chid
= dev_priv
->engine
.fifo
.channels
;
361 if (nv_rd32(dev
, NV04_PGRAPH_CTX_CONTROL
) & 0x00010000)
362 chid
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) >> 24;
364 if (chid
>= dev_priv
->engine
.fifo
.channels
)
367 return dev_priv
->channels
.ptr
[chid
];
370 static uint32_t *ctx_reg(struct graph_state
*ctx
, uint32_t reg
)
374 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++) {
375 if (nv04_graph_ctx_regs
[i
] == reg
)
376 return &ctx
->nv04
[i
];
383 nv04_graph_load_context(struct nouveau_channel
*chan
)
385 struct graph_state
*pgraph_ctx
= chan
->engctx
[NVOBJ_ENGINE_GR
];
386 struct drm_device
*dev
= chan
->dev
;
390 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++)
391 nv_wr32(dev
, nv04_graph_ctx_regs
[i
], pgraph_ctx
->nv04
[i
]);
393 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10010100);
395 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
396 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
| chan
->id
<< 24);
398 tmp
= nv_rd32(dev
, NV04_PGRAPH_FFINTFC_ST2
);
399 nv_wr32(dev
, NV04_PGRAPH_FFINTFC_ST2
, tmp
& 0x000fffff);
405 nv04_graph_unload_context(struct drm_device
*dev
)
407 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
408 struct nouveau_channel
*chan
= NULL
;
409 struct graph_state
*ctx
;
413 chan
= nv04_graph_channel(dev
);
416 ctx
= chan
->engctx
[NVOBJ_ENGINE_GR
];
418 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++)
419 ctx
->nv04
[i
] = nv_rd32(dev
, nv04_graph_ctx_regs
[i
]);
421 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10000000);
422 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
423 tmp
|= (dev_priv
->engine
.fifo
.channels
- 1) << 24;
424 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
);
429 nv04_graph_context_new(struct nouveau_channel
*chan
, int engine
)
431 struct graph_state
*pgraph_ctx
;
432 NV_DEBUG(chan
->dev
, "nv04_graph_context_create %d\n", chan
->id
);
434 pgraph_ctx
= kzalloc(sizeof(*pgraph_ctx
), GFP_KERNEL
);
435 if (pgraph_ctx
== NULL
)
438 *ctx_reg(pgraph_ctx
, NV04_PGRAPH_DEBUG_3
) = 0xfad4ff31;
440 chan
->engctx
[engine
] = pgraph_ctx
;
445 nv04_graph_context_del(struct nouveau_channel
*chan
, int engine
)
447 struct drm_device
*dev
= chan
->dev
;
448 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
449 struct graph_state
*pgraph_ctx
= chan
->engctx
[engine
];
452 spin_lock_irqsave(&dev_priv
->context_switch_lock
, flags
);
453 nv_mask(dev
, NV04_PGRAPH_FIFO
, 0x00000001, 0x00000000);
455 /* Unload the context if it's the currently active one */
456 if (nv04_graph_channel(dev
) == chan
)
457 nv04_graph_unload_context(dev
);
459 nv_mask(dev
, NV04_PGRAPH_FIFO
, 0x00000001, 0x00000001);
460 spin_unlock_irqrestore(&dev_priv
->context_switch_lock
, flags
);
462 /* Free the context resources */
464 chan
->engctx
[engine
] = NULL
;
468 nv04_graph_object_new(struct nouveau_channel
*chan
, int engine
,
469 u32 handle
, u16
class)
471 struct drm_device
*dev
= chan
->dev
;
472 struct nouveau_gpuobj
*obj
= NULL
;
475 ret
= nouveau_gpuobj_new(dev
, chan
, 16, 16, NVOBJ_FLAG_ZERO_FREE
, &obj
);
482 nv_wo32(obj
, 0x00, 0x00080000 | class);
484 nv_wo32(obj
, 0x00, class);
486 nv_wo32(obj
, 0x04, 0x00000000);
487 nv_wo32(obj
, 0x08, 0x00000000);
488 nv_wo32(obj
, 0x0c, 0x00000000);
490 ret
= nouveau_ramht_insert(chan
, handle
, obj
);
491 nouveau_gpuobj_ref(NULL
, &obj
);
496 nv04_graph_init(struct drm_device
*dev
, int engine
)
498 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
501 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) &
502 ~NV_PMC_ENABLE_PGRAPH
);
503 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) |
504 NV_PMC_ENABLE_PGRAPH
);
506 /* Enable PGRAPH interrupts */
507 nv_wr32(dev
, NV03_PGRAPH_INTR
, 0xFFFFFFFF);
508 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0xFFFFFFFF);
510 nv_wr32(dev
, NV04_PGRAPH_VALID1
, 0);
511 nv_wr32(dev
, NV04_PGRAPH_VALID2
, 0);
512 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
513 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
514 nv_wr32(dev
, NV04_PGRAPH_DEBUG_0
, 0x1231c000);
515 /*1231C000 blob, 001 haiku*/
516 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
517 nv_wr32(dev
, NV04_PGRAPH_DEBUG_1
, 0x72111100);
518 /*0x72111100 blob , 01 haiku*/
519 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
520 nv_wr32(dev
, NV04_PGRAPH_DEBUG_2
, 0x11d5f071);
523 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
524 nv_wr32(dev
, NV04_PGRAPH_DEBUG_3
, 0xf0d4ff31);
525 /*haiku and blob 10d4*/
527 nv_wr32(dev
, NV04_PGRAPH_STATE
, 0xFFFFFFFF);
528 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10000100);
529 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
530 tmp
|= (dev_priv
->engine
.fifo
.channels
- 1) << 24;
531 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
);
533 /* These don't belong here, they're part of a per-channel context */
534 nv_wr32(dev
, NV04_PGRAPH_PATTERN_SHAPE
, 0x00000000);
535 nv_wr32(dev
, NV04_PGRAPH_BETA_AND
, 0xFFFFFFFF);
541 nv04_graph_fini(struct drm_device
*dev
, int engine
, bool suspend
)
543 nv_mask(dev
, NV04_PGRAPH_FIFO
, 0x00000001, 0x00000000);
544 if (!nv_wait(dev
, NV04_PGRAPH_STATUS
, ~0, 0) && suspend
) {
545 nv_mask(dev
, NV04_PGRAPH_FIFO
, 0x00000001, 0x00000001);
548 nv04_graph_unload_context(dev
);
549 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0x00000000);
554 nv04_graph_mthd_set_ref(struct nouveau_channel
*chan
,
555 u32
class, u32 mthd
, u32 data
)
557 atomic_set(&chan
->fence
.last_sequence_irq
, data
);
562 nv04_graph_mthd_page_flip(struct nouveau_channel
*chan
,
563 u32
class, u32 mthd
, u32 data
)
565 struct drm_device
*dev
= chan
->dev
;
566 struct nouveau_page_flip_state s
;
568 if (!nouveau_finish_page_flip(chan
, &s
))
569 nv_set_crtc_base(dev
, s
.crtc
,
570 s
.offset
+ s
.y
* s
.pitch
+ s
.x
* s
.bpp
/ 8);
576 * Software methods, why they are needed, and how they all work:
578 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
579 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
580 * 3 words long on both. grobj format on NV04 is:
584 * - bit 12: color key active
585 * - bit 13: clip rect active
586 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
587 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
588 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
589 * NV03_CONTEXT_SURFACE_DST].
590 * - bits 15-17: 2d operation [aka patch config]
591 * - bit 24: patch valid [enables rendering using this object]
592 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
594 * - bits 0-1: mono format
595 * - bits 8-13: color format
596 * - bits 16-31: DMA_NOTIFY instance
598 * - bits 0-15: DMA_A instance
599 * - bits 16-31: DMA_B instance
605 * - bit 12: color key active
606 * - bit 13: clip rect active
607 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
608 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
609 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
610 * NV03_CONTEXT_SURFACE_DST].
611 * - bits 15-17: 2d operation [aka patch config]
612 * - bits 20-22: dither mode
613 * - bit 24: patch valid [enables rendering using this object]
614 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
615 * - bit 26: surface_src/surface_zeta valid
616 * - bit 27: pattern valid
617 * - bit 28: rop valid
618 * - bit 29: beta1 valid
619 * - bit 30: beta4 valid
621 * - bits 0-1: mono format
622 * - bits 8-13: color format
623 * - bits 16-31: DMA_NOTIFY instance
625 * - bits 0-15: DMA_A instance
626 * - bits 16-31: DMA_B instance
628 * NV05 will set/unset the relevant valid bits when you poke the relevant
629 * object-binding methods with object of the proper type, or with the NULL
630 * type. It'll only allow rendering using the grobj if all needed objects
631 * are bound. The needed set of objects depends on selected operation: for
632 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
634 * NV04 doesn't have these methods implemented at all, and doesn't have the
635 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
636 * is set. So we have to emulate them in software, internally keeping the
637 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
638 * but the last word isn't actually used for anything, we abuse it for this
641 * Actually, NV05 can optionally check bit 24 too, but we disable this since
642 * there's no use for it.
644 * For unknown reasons, NV04 implements surf3d binding in hardware as an
645 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
646 * methods on the surf3d object, so we have to emulate them too.
650 nv04_graph_set_ctx1(struct nouveau_channel
*chan
, u32 mask
, u32 value
)
652 struct drm_device
*dev
= chan
->dev
;
653 u32 instance
= (nv_rd32(dev
, NV04_PGRAPH_CTX_SWITCH4
) & 0xffff) << 4;
654 int subc
= (nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
) >> 13) & 0x7;
657 tmp
= nv_ri32(dev
, instance
);
661 nv_wi32(dev
, instance
, tmp
);
662 nv_wr32(dev
, NV04_PGRAPH_CTX_SWITCH1
, tmp
);
663 nv_wr32(dev
, NV04_PGRAPH_CTX_CACHE1
+ (subc
<<2), tmp
);
667 nv04_graph_set_ctx_val(struct nouveau_channel
*chan
, u32 mask
, u32 value
)
669 struct drm_device
*dev
= chan
->dev
;
670 u32 instance
= (nv_rd32(dev
, NV04_PGRAPH_CTX_SWITCH4
) & 0xffff) << 4;
672 int class, op
, valid
= 1;
674 ctx1
= nv_ri32(dev
, instance
);
676 op
= (ctx1
>> 15) & 7;
677 tmp
= nv_ri32(dev
, instance
+ 0xc);
680 nv_wi32(dev
, instance
+ 0xc, tmp
);
682 /* check for valid surf2d/surf_dst/surf_color */
683 if (!(tmp
& 0x02000000))
685 /* check for valid surf_src/surf_zeta */
686 if ((class == 0x1f || class == 0x48) && !(tmp
& 0x04000000))
690 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
694 /* ROP_AND: requires pattern and rop */
696 if (!(tmp
& 0x18000000))
699 /* BLEND_AND: requires beta1 */
701 if (!(tmp
& 0x20000000))
704 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
707 if (!(tmp
& 0x40000000))
712 nv04_graph_set_ctx1(chan
, 0x01000000, valid
<< 24);
716 nv04_graph_mthd_set_operation(struct nouveau_channel
*chan
,
717 u32
class, u32 mthd
, u32 data
)
721 /* Old versions of the objects only accept first three operations. */
722 if (data
> 2 && class < 0x40)
724 nv04_graph_set_ctx1(chan
, 0x00038000, data
<< 15);
725 /* changing operation changes set of objects needed for validation */
726 nv04_graph_set_ctx_val(chan
, 0, 0);
731 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel
*chan
,
732 u32
class, u32 mthd
, u32 data
)
734 uint32_t min
= data
& 0xffff, max
;
735 uint32_t w
= data
>> 16;
740 /* yes, it accepts negative for some reason. */
744 nv_wr32(chan
->dev
, 0x40053c, min
);
745 nv_wr32(chan
->dev
, 0x400544, max
);
750 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel
*chan
,
751 u32
class, u32 mthd
, u32 data
)
753 uint32_t min
= data
& 0xffff, max
;
754 uint32_t w
= data
>> 16;
759 /* yes, it accepts negative for some reason. */
763 nv_wr32(chan
->dev
, 0x400540, min
);
764 nv_wr32(chan
->dev
, 0x400548, max
);
769 nv04_graph_mthd_bind_surf2d(struct nouveau_channel
*chan
,
770 u32
class, u32 mthd
, u32 data
)
772 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
774 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
775 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
778 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
779 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
786 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel
*chan
,
787 u32
class, u32 mthd
, u32 data
)
789 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
791 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
792 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
795 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
796 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
799 nv04_graph_set_ctx1(chan
, 0x00004000, 0x00004000);
800 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
807 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel
*chan
,
808 u32
class, u32 mthd
, u32 data
)
810 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
812 nv04_graph_set_ctx_val(chan
, 0x08000000, 0);
815 nv04_graph_set_ctx_val(chan
, 0x08000000, 0x08000000);
822 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel
*chan
,
823 u32
class, u32 mthd
, u32 data
)
825 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
827 nv04_graph_set_ctx_val(chan
, 0x08000000, 0);
830 nv04_graph_set_ctx_val(chan
, 0x08000000, 0x08000000);
837 nv04_graph_mthd_bind_rop(struct nouveau_channel
*chan
,
838 u32
class, u32 mthd
, u32 data
)
840 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
842 nv04_graph_set_ctx_val(chan
, 0x10000000, 0);
845 nv04_graph_set_ctx_val(chan
, 0x10000000, 0x10000000);
852 nv04_graph_mthd_bind_beta1(struct nouveau_channel
*chan
,
853 u32
class, u32 mthd
, u32 data
)
855 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
857 nv04_graph_set_ctx_val(chan
, 0x20000000, 0);
860 nv04_graph_set_ctx_val(chan
, 0x20000000, 0x20000000);
867 nv04_graph_mthd_bind_beta4(struct nouveau_channel
*chan
,
868 u32
class, u32 mthd
, u32 data
)
870 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
872 nv04_graph_set_ctx_val(chan
, 0x40000000, 0);
875 nv04_graph_set_ctx_val(chan
, 0x40000000, 0x40000000);
882 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel
*chan
,
883 u32
class, u32 mthd
, u32 data
)
885 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
887 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
890 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
897 nv04_graph_mthd_bind_surf_src(struct nouveau_channel
*chan
,
898 u32
class, u32 mthd
, u32 data
)
900 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
902 nv04_graph_set_ctx_val(chan
, 0x04000000, 0);
905 nv04_graph_set_ctx_val(chan
, 0x04000000, 0x04000000);
912 nv04_graph_mthd_bind_surf_color(struct nouveau_channel
*chan
,
913 u32
class, u32 mthd
, u32 data
)
915 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
917 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
920 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
927 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel
*chan
,
928 u32
class, u32 mthd
, u32 data
)
930 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
932 nv04_graph_set_ctx_val(chan
, 0x04000000, 0);
935 nv04_graph_set_ctx_val(chan
, 0x04000000, 0x04000000);
942 nv04_graph_mthd_bind_clip(struct nouveau_channel
*chan
,
943 u32
class, u32 mthd
, u32 data
)
945 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
947 nv04_graph_set_ctx1(chan
, 0x2000, 0);
950 nv04_graph_set_ctx1(chan
, 0x2000, 0x2000);
957 nv04_graph_mthd_bind_chroma(struct nouveau_channel
*chan
,
958 u32
class, u32 mthd
, u32 data
)
960 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
962 nv04_graph_set_ctx1(chan
, 0x1000, 0);
964 /* Yes, for some reason even the old versions of objects
965 * accept 0x57 and not 0x17. Consistency be damned.
968 nv04_graph_set_ctx1(chan
, 0x1000, 0x1000);
974 static struct nouveau_bitfield nv04_graph_intr
[] = {
975 { NV_PGRAPH_INTR_NOTIFY
, "NOTIFY" },
979 static struct nouveau_bitfield nv04_graph_nstatus
[] = {
980 { NV04_PGRAPH_NSTATUS_STATE_IN_USE
, "STATE_IN_USE" },
981 { NV04_PGRAPH_NSTATUS_INVALID_STATE
, "INVALID_STATE" },
982 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT
, "BAD_ARGUMENT" },
983 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT
, "PROTECTION_FAULT" },
987 struct nouveau_bitfield nv04_graph_nsource
[] = {
988 { NV03_PGRAPH_NSOURCE_NOTIFICATION
, "NOTIFICATION" },
989 { NV03_PGRAPH_NSOURCE_DATA_ERROR
, "DATA_ERROR" },
990 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR
, "PROTECTION_ERROR" },
991 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION
, "RANGE_EXCEPTION" },
992 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR
, "LIMIT_COLOR" },
993 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA
, "LIMIT_ZETA" },
994 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD
, "ILLEGAL_MTHD" },
995 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION
, "DMA_R_PROTECTION" },
996 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION
, "DMA_W_PROTECTION" },
997 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION
, "FORMAT_EXCEPTION" },
998 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION
, "PATCH_EXCEPTION" },
999 { NV03_PGRAPH_NSOURCE_STATE_INVALID
, "STATE_INVALID" },
1000 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY
, "DOUBLE_NOTIFY" },
1001 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE
, "NOTIFY_IN_USE" },
1002 { NV03_PGRAPH_NSOURCE_METHOD_CNT
, "METHOD_CNT" },
1003 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION
, "BFR_NOTIFICATION" },
1004 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION
, "DMA_VTX_PROTECTION" },
1005 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A
, "DMA_WIDTH_A" },
1006 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B
, "DMA_WIDTH_B" },
1011 nv04_graph_context_switch(struct drm_device
*dev
)
1013 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
1014 struct nouveau_channel
*chan
= NULL
;
1017 nouveau_wait_for_idle(dev
);
1019 /* If previous context is valid, we need to save it */
1020 nv04_graph_unload_context(dev
);
1022 /* Load context for next channel */
1023 chid
= dev_priv
->engine
.fifo
.channel_id(dev
);
1024 chan
= dev_priv
->channels
.ptr
[chid
];
1026 nv04_graph_load_context(chan
);
1030 nv04_graph_isr(struct drm_device
*dev
)
1034 while ((stat
= nv_rd32(dev
, NV03_PGRAPH_INTR
))) {
1035 u32 nsource
= nv_rd32(dev
, NV03_PGRAPH_NSOURCE
);
1036 u32 nstatus
= nv_rd32(dev
, NV03_PGRAPH_NSTATUS
);
1037 u32 addr
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
);
1038 u32 chid
= (addr
& 0x0f000000) >> 24;
1039 u32 subc
= (addr
& 0x0000e000) >> 13;
1040 u32 mthd
= (addr
& 0x00001ffc);
1041 u32 data
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_DATA
);
1042 u32
class = nv_rd32(dev
, 0x400180 + subc
* 4) & 0xff;
1045 if (stat
& NV_PGRAPH_INTR_NOTIFY
) {
1046 if (nsource
& NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD
) {
1047 if (!nouveau_gpuobj_mthd_call2(dev
, chid
, class, mthd
, data
))
1048 show
&= ~NV_PGRAPH_INTR_NOTIFY
;
1052 if (stat
& NV_PGRAPH_INTR_CONTEXT_SWITCH
) {
1053 nv_wr32(dev
, NV03_PGRAPH_INTR
, NV_PGRAPH_INTR_CONTEXT_SWITCH
);
1054 stat
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1055 show
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1056 nv04_graph_context_switch(dev
);
1059 nv_wr32(dev
, NV03_PGRAPH_INTR
, stat
);
1060 nv_wr32(dev
, NV04_PGRAPH_FIFO
, 0x00000001);
1062 if (show
&& nouveau_ratelimit()) {
1063 NV_INFO(dev
, "PGRAPH -");
1064 nouveau_bitfield_print(nv04_graph_intr
, show
);
1065 printk(" nsource:");
1066 nouveau_bitfield_print(nv04_graph_nsource
, nsource
);
1067 printk(" nstatus:");
1068 nouveau_bitfield_print(nv04_graph_nstatus
, nstatus
);
1070 NV_INFO(dev
, "PGRAPH - ch %d/%d class 0x%04x "
1071 "mthd 0x%04x data 0x%08x\n",
1072 chid
, subc
, class, mthd
, data
);
1078 nv04_graph_destroy(struct drm_device
*dev
, int engine
)
1080 struct nv04_graph_engine
*pgraph
= nv_engine(dev
, engine
);
1082 nouveau_irq_unregister(dev
, 12);
1084 NVOBJ_ENGINE_DEL(dev
, GR
);
1089 nv04_graph_create(struct drm_device
*dev
)
1091 struct nv04_graph_engine
*pgraph
;
1093 pgraph
= kzalloc(sizeof(*pgraph
), GFP_KERNEL
);
1097 pgraph
->base
.destroy
= nv04_graph_destroy
;
1098 pgraph
->base
.init
= nv04_graph_init
;
1099 pgraph
->base
.fini
= nv04_graph_fini
;
1100 pgraph
->base
.context_new
= nv04_graph_context_new
;
1101 pgraph
->base
.context_del
= nv04_graph_context_del
;
1102 pgraph
->base
.object_new
= nv04_graph_object_new
;
1104 NVOBJ_ENGINE_ADD(dev
, GR
, &pgraph
->base
);
1105 nouveau_irq_register(dev
, 12, nv04_graph_isr
);
1107 /* dvd subpicture */
1108 NVOBJ_CLASS(dev
, 0x0038, GR
);
1111 NVOBJ_CLASS(dev
, 0x0039, GR
);
1114 NVOBJ_CLASS(dev
, 0x004b, GR
);
1115 NVOBJ_MTHD (dev
, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt
);
1116 NVOBJ_MTHD (dev
, 0x004b, 0x0188, nv04_graph_mthd_bind_rop
);
1117 NVOBJ_MTHD (dev
, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1
);
1118 NVOBJ_MTHD (dev
, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst
);
1119 NVOBJ_MTHD (dev
, 0x004b, 0x02fc, nv04_graph_mthd_set_operation
);
1122 NVOBJ_CLASS(dev
, 0x004a, GR
);
1123 NVOBJ_MTHD (dev
, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1124 NVOBJ_MTHD (dev
, 0x004a, 0x018c, nv04_graph_mthd_bind_rop
);
1125 NVOBJ_MTHD (dev
, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1
);
1126 NVOBJ_MTHD (dev
, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4
);
1127 NVOBJ_MTHD (dev
, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d
);
1128 NVOBJ_MTHD (dev
, 0x004a, 0x02fc, nv04_graph_mthd_set_operation
);
1130 /* nv01 imageblit */
1131 NVOBJ_CLASS(dev
, 0x001f, GR
);
1132 NVOBJ_MTHD (dev
, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma
);
1133 NVOBJ_MTHD (dev
, 0x001f, 0x0188, nv04_graph_mthd_bind_clip
);
1134 NVOBJ_MTHD (dev
, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt
);
1135 NVOBJ_MTHD (dev
, 0x001f, 0x0190, nv04_graph_mthd_bind_rop
);
1136 NVOBJ_MTHD (dev
, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1
);
1137 NVOBJ_MTHD (dev
, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst
);
1138 NVOBJ_MTHD (dev
, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src
);
1139 NVOBJ_MTHD (dev
, 0x001f, 0x02fc, nv04_graph_mthd_set_operation
);
1141 /* nv04 imageblit */
1142 NVOBJ_CLASS(dev
, 0x005f, GR
);
1143 NVOBJ_MTHD (dev
, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma
);
1144 NVOBJ_MTHD (dev
, 0x005f, 0x0188, nv04_graph_mthd_bind_clip
);
1145 NVOBJ_MTHD (dev
, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt
);
1146 NVOBJ_MTHD (dev
, 0x005f, 0x0190, nv04_graph_mthd_bind_rop
);
1147 NVOBJ_MTHD (dev
, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1
);
1148 NVOBJ_MTHD (dev
, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4
);
1149 NVOBJ_MTHD (dev
, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d
);
1150 NVOBJ_MTHD (dev
, 0x005f, 0x02fc, nv04_graph_mthd_set_operation
);
1153 NVOBJ_CLASS(dev
, 0x0060, GR
);
1154 NVOBJ_MTHD (dev
, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma
);
1155 NVOBJ_MTHD (dev
, 0x0060, 0x018c, nv04_graph_mthd_bind_clip
);
1156 NVOBJ_MTHD (dev
, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt
);
1157 NVOBJ_MTHD (dev
, 0x0060, 0x0194, nv04_graph_mthd_bind_rop
);
1158 NVOBJ_MTHD (dev
, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1
);
1159 NVOBJ_MTHD (dev
, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4
);
1160 NVOBJ_MTHD (dev
, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf
);
1161 NVOBJ_MTHD (dev
, 0x0060, 0x03e4, nv04_graph_mthd_set_operation
);
1164 NVOBJ_CLASS(dev
, 0x0064, GR
);
1167 NVOBJ_CLASS(dev
, 0x0021, GR
);
1168 NVOBJ_MTHD (dev
, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma
);
1169 NVOBJ_MTHD (dev
, 0x0021, 0x0188, nv04_graph_mthd_bind_clip
);
1170 NVOBJ_MTHD (dev
, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt
);
1171 NVOBJ_MTHD (dev
, 0x0021, 0x0190, nv04_graph_mthd_bind_rop
);
1172 NVOBJ_MTHD (dev
, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1
);
1173 NVOBJ_MTHD (dev
, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst
);
1174 NVOBJ_MTHD (dev
, 0x0021, 0x02fc, nv04_graph_mthd_set_operation
);
1177 NVOBJ_CLASS(dev
, 0x0061, GR
);
1178 NVOBJ_MTHD (dev
, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma
);
1179 NVOBJ_MTHD (dev
, 0x0061, 0x0188, nv04_graph_mthd_bind_clip
);
1180 NVOBJ_MTHD (dev
, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt
);
1181 NVOBJ_MTHD (dev
, 0x0061, 0x0190, nv04_graph_mthd_bind_rop
);
1182 NVOBJ_MTHD (dev
, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1
);
1183 NVOBJ_MTHD (dev
, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4
);
1184 NVOBJ_MTHD (dev
, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d
);
1185 NVOBJ_MTHD (dev
, 0x0061, 0x02fc, nv04_graph_mthd_set_operation
);
1188 NVOBJ_CLASS(dev
, 0x0065, GR
);
1191 NVOBJ_CLASS(dev
, 0x0036, GR
);
1192 NVOBJ_MTHD (dev
, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma
);
1193 NVOBJ_MTHD (dev
, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1194 NVOBJ_MTHD (dev
, 0x0036, 0x018c, nv04_graph_mthd_bind_rop
);
1195 NVOBJ_MTHD (dev
, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1
);
1196 NVOBJ_MTHD (dev
, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1197 NVOBJ_MTHD (dev
, 0x0036, 0x02fc, nv04_graph_mthd_set_operation
);
1200 NVOBJ_CLASS(dev
, 0x0076, GR
);
1201 NVOBJ_MTHD (dev
, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma
);
1202 NVOBJ_MTHD (dev
, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1203 NVOBJ_MTHD (dev
, 0x0076, 0x018c, nv04_graph_mthd_bind_rop
);
1204 NVOBJ_MTHD (dev
, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1
);
1205 NVOBJ_MTHD (dev
, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4
);
1206 NVOBJ_MTHD (dev
, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d
);
1207 NVOBJ_MTHD (dev
, 0x0076, 0x02fc, nv04_graph_mthd_set_operation
);
1210 NVOBJ_CLASS(dev
, 0x0066, GR
);
1213 NVOBJ_CLASS(dev
, 0x0037, GR
);
1214 NVOBJ_MTHD (dev
, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1215 NVOBJ_MTHD (dev
, 0x0037, 0x018c, nv04_graph_mthd_bind_rop
);
1216 NVOBJ_MTHD (dev
, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1
);
1217 NVOBJ_MTHD (dev
, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1218 NVOBJ_MTHD (dev
, 0x0037, 0x0304, nv04_graph_mthd_set_operation
);
1221 NVOBJ_CLASS(dev
, 0x0077, GR
);
1222 NVOBJ_MTHD (dev
, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1223 NVOBJ_MTHD (dev
, 0x0077, 0x018c, nv04_graph_mthd_bind_rop
);
1224 NVOBJ_MTHD (dev
, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1
);
1225 NVOBJ_MTHD (dev
, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4
);
1226 NVOBJ_MTHD (dev
, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf
);
1227 NVOBJ_MTHD (dev
, 0x0077, 0x0304, nv04_graph_mthd_set_operation
);
1230 NVOBJ_CLASS(dev
, 0x0030, GR
);
1233 NVOBJ_CLASS(dev
, 0x0042, GR
);
1236 NVOBJ_CLASS(dev
, 0x0043, GR
);
1239 NVOBJ_CLASS(dev
, 0x0012, GR
);
1242 NVOBJ_CLASS(dev
, 0x0072, GR
);
1245 NVOBJ_CLASS(dev
, 0x0019, GR
);
1248 NVOBJ_CLASS(dev
, 0x0018, GR
);
1251 NVOBJ_CLASS(dev
, 0x0044, GR
);
1254 NVOBJ_CLASS(dev
, 0x0052, GR
);
1257 NVOBJ_CLASS(dev
, 0x0053, GR
);
1258 NVOBJ_MTHD (dev
, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h
);
1259 NVOBJ_MTHD (dev
, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v
);
1262 NVOBJ_CLASS(dev
, 0x0048, GR
);
1263 NVOBJ_MTHD (dev
, 0x0048, 0x0188, nv04_graph_mthd_bind_clip
);
1264 NVOBJ_MTHD (dev
, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color
);
1265 NVOBJ_MTHD (dev
, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta
);
1268 NVOBJ_CLASS(dev
, 0x0054, GR
);
1271 NVOBJ_CLASS(dev
, 0x0055, GR
);
1274 NVOBJ_CLASS(dev
, 0x0017, GR
);
1277 NVOBJ_CLASS(dev
, 0x0057, GR
);
1280 NVOBJ_CLASS(dev
, 0x0058, GR
);
1283 NVOBJ_CLASS(dev
, 0x0059, GR
);
1286 NVOBJ_CLASS(dev
, 0x005a, GR
);
1289 NVOBJ_CLASS(dev
, 0x005b, GR
);
1292 NVOBJ_CLASS(dev
, 0x001c, GR
);
1293 NVOBJ_MTHD (dev
, 0x001c, 0x0184, nv04_graph_mthd_bind_clip
);
1294 NVOBJ_MTHD (dev
, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1295 NVOBJ_MTHD (dev
, 0x001c, 0x018c, nv04_graph_mthd_bind_rop
);
1296 NVOBJ_MTHD (dev
, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1
);
1297 NVOBJ_MTHD (dev
, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1298 NVOBJ_MTHD (dev
, 0x001c, 0x02fc, nv04_graph_mthd_set_operation
);
1301 NVOBJ_CLASS(dev
, 0x005c, GR
);
1302 NVOBJ_MTHD (dev
, 0x005c, 0x0184, nv04_graph_mthd_bind_clip
);
1303 NVOBJ_MTHD (dev
, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1304 NVOBJ_MTHD (dev
, 0x005c, 0x018c, nv04_graph_mthd_bind_rop
);
1305 NVOBJ_MTHD (dev
, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1
);
1306 NVOBJ_MTHD (dev
, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4
);
1307 NVOBJ_MTHD (dev
, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d
);
1308 NVOBJ_MTHD (dev
, 0x005c, 0x02fc, nv04_graph_mthd_set_operation
);
1311 NVOBJ_CLASS(dev
, 0x001d, GR
);
1312 NVOBJ_MTHD (dev
, 0x001d, 0x0184, nv04_graph_mthd_bind_clip
);
1313 NVOBJ_MTHD (dev
, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1314 NVOBJ_MTHD (dev
, 0x001d, 0x018c, nv04_graph_mthd_bind_rop
);
1315 NVOBJ_MTHD (dev
, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1
);
1316 NVOBJ_MTHD (dev
, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1317 NVOBJ_MTHD (dev
, 0x001d, 0x02fc, nv04_graph_mthd_set_operation
);
1320 NVOBJ_CLASS(dev
, 0x005d, GR
);
1321 NVOBJ_MTHD (dev
, 0x005d, 0x0184, nv04_graph_mthd_bind_clip
);
1322 NVOBJ_MTHD (dev
, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1323 NVOBJ_MTHD (dev
, 0x005d, 0x018c, nv04_graph_mthd_bind_rop
);
1324 NVOBJ_MTHD (dev
, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1
);
1325 NVOBJ_MTHD (dev
, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4
);
1326 NVOBJ_MTHD (dev
, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d
);
1327 NVOBJ_MTHD (dev
, 0x005d, 0x02fc, nv04_graph_mthd_set_operation
);
1330 NVOBJ_CLASS(dev
, 0x001e, GR
);
1331 NVOBJ_MTHD (dev
, 0x001e, 0x0184, nv04_graph_mthd_bind_clip
);
1332 NVOBJ_MTHD (dev
, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1333 NVOBJ_MTHD (dev
, 0x001e, 0x018c, nv04_graph_mthd_bind_rop
);
1334 NVOBJ_MTHD (dev
, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1
);
1335 NVOBJ_MTHD (dev
, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1336 NVOBJ_MTHD (dev
, 0x001e, 0x02fc, nv04_graph_mthd_set_operation
);
1339 NVOBJ_CLASS(dev
, 0x005e, GR
);
1340 NVOBJ_MTHD (dev
, 0x005e, 0x0184, nv04_graph_mthd_bind_clip
);
1341 NVOBJ_MTHD (dev
, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1342 NVOBJ_MTHD (dev
, 0x005e, 0x018c, nv04_graph_mthd_bind_rop
);
1343 NVOBJ_MTHD (dev
, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1
);
1344 NVOBJ_MTHD (dev
, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4
);
1345 NVOBJ_MTHD (dev
, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d
);
1346 NVOBJ_MTHD (dev
, 0x005e, 0x02fc, nv04_graph_mthd_set_operation
);
1349 NVOBJ_CLASS(dev
, 0x506e, SW
);
1350 NVOBJ_MTHD (dev
, 0x506e, 0x0150, nv04_graph_mthd_set_ref
);
1351 NVOBJ_MTHD (dev
, 0x506e, 0x0500, nv04_graph_mthd_page_flip
);