2 * Copyright 2007 Stephane Marchesin
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
32 static int nv04_graph_register(struct drm_device
*dev
);
33 static void nv04_graph_isr(struct drm_device
*dev
);
35 static uint32_t nv04_graph_ctx_regs
[] = {
40 NV04_PGRAPH_CTX_SWITCH1
,
41 NV04_PGRAPH_CTX_SWITCH2
,
42 NV04_PGRAPH_CTX_SWITCH3
,
43 NV04_PGRAPH_CTX_SWITCH4
,
44 NV04_PGRAPH_CTX_CACHE1
,
45 NV04_PGRAPH_CTX_CACHE2
,
46 NV04_PGRAPH_CTX_CACHE3
,
47 NV04_PGRAPH_CTX_CACHE4
,
77 NV04_PGRAPH_DMA_START_0
,
78 NV04_PGRAPH_DMA_START_1
,
79 NV04_PGRAPH_DMA_LENGTH
,
81 NV04_PGRAPH_DMA_PITCH
,
107 NV04_PGRAPH_BSWIZZLE2
,
108 NV04_PGRAPH_BSWIZZLE5
,
111 NV04_PGRAPH_PATT_COLOR0
,
112 NV04_PGRAPH_PATT_COLOR1
,
113 NV04_PGRAPH_PATT_COLORRAM
+0x00,
114 NV04_PGRAPH_PATT_COLORRAM
+0x04,
115 NV04_PGRAPH_PATT_COLORRAM
+0x08,
116 NV04_PGRAPH_PATT_COLORRAM
+0x0c,
117 NV04_PGRAPH_PATT_COLORRAM
+0x10,
118 NV04_PGRAPH_PATT_COLORRAM
+0x14,
119 NV04_PGRAPH_PATT_COLORRAM
+0x18,
120 NV04_PGRAPH_PATT_COLORRAM
+0x1c,
121 NV04_PGRAPH_PATT_COLORRAM
+0x20,
122 NV04_PGRAPH_PATT_COLORRAM
+0x24,
123 NV04_PGRAPH_PATT_COLORRAM
+0x28,
124 NV04_PGRAPH_PATT_COLORRAM
+0x2c,
125 NV04_PGRAPH_PATT_COLORRAM
+0x30,
126 NV04_PGRAPH_PATT_COLORRAM
+0x34,
127 NV04_PGRAPH_PATT_COLORRAM
+0x38,
128 NV04_PGRAPH_PATT_COLORRAM
+0x3c,
129 NV04_PGRAPH_PATT_COLORRAM
+0x40,
130 NV04_PGRAPH_PATT_COLORRAM
+0x44,
131 NV04_PGRAPH_PATT_COLORRAM
+0x48,
132 NV04_PGRAPH_PATT_COLORRAM
+0x4c,
133 NV04_PGRAPH_PATT_COLORRAM
+0x50,
134 NV04_PGRAPH_PATT_COLORRAM
+0x54,
135 NV04_PGRAPH_PATT_COLORRAM
+0x58,
136 NV04_PGRAPH_PATT_COLORRAM
+0x5c,
137 NV04_PGRAPH_PATT_COLORRAM
+0x60,
138 NV04_PGRAPH_PATT_COLORRAM
+0x64,
139 NV04_PGRAPH_PATT_COLORRAM
+0x68,
140 NV04_PGRAPH_PATT_COLORRAM
+0x6c,
141 NV04_PGRAPH_PATT_COLORRAM
+0x70,
142 NV04_PGRAPH_PATT_COLORRAM
+0x74,
143 NV04_PGRAPH_PATT_COLORRAM
+0x78,
144 NV04_PGRAPH_PATT_COLORRAM
+0x7c,
145 NV04_PGRAPH_PATT_COLORRAM
+0x80,
146 NV04_PGRAPH_PATT_COLORRAM
+0x84,
147 NV04_PGRAPH_PATT_COLORRAM
+0x88,
148 NV04_PGRAPH_PATT_COLORRAM
+0x8c,
149 NV04_PGRAPH_PATT_COLORRAM
+0x90,
150 NV04_PGRAPH_PATT_COLORRAM
+0x94,
151 NV04_PGRAPH_PATT_COLORRAM
+0x98,
152 NV04_PGRAPH_PATT_COLORRAM
+0x9c,
153 NV04_PGRAPH_PATT_COLORRAM
+0xa0,
154 NV04_PGRAPH_PATT_COLORRAM
+0xa4,
155 NV04_PGRAPH_PATT_COLORRAM
+0xa8,
156 NV04_PGRAPH_PATT_COLORRAM
+0xac,
157 NV04_PGRAPH_PATT_COLORRAM
+0xb0,
158 NV04_PGRAPH_PATT_COLORRAM
+0xb4,
159 NV04_PGRAPH_PATT_COLORRAM
+0xb8,
160 NV04_PGRAPH_PATT_COLORRAM
+0xbc,
161 NV04_PGRAPH_PATT_COLORRAM
+0xc0,
162 NV04_PGRAPH_PATT_COLORRAM
+0xc4,
163 NV04_PGRAPH_PATT_COLORRAM
+0xc8,
164 NV04_PGRAPH_PATT_COLORRAM
+0xcc,
165 NV04_PGRAPH_PATT_COLORRAM
+0xd0,
166 NV04_PGRAPH_PATT_COLORRAM
+0xd4,
167 NV04_PGRAPH_PATT_COLORRAM
+0xd8,
168 NV04_PGRAPH_PATT_COLORRAM
+0xdc,
169 NV04_PGRAPH_PATT_COLORRAM
+0xe0,
170 NV04_PGRAPH_PATT_COLORRAM
+0xe4,
171 NV04_PGRAPH_PATT_COLORRAM
+0xe8,
172 NV04_PGRAPH_PATT_COLORRAM
+0xec,
173 NV04_PGRAPH_PATT_COLORRAM
+0xf0,
174 NV04_PGRAPH_PATT_COLORRAM
+0xf4,
175 NV04_PGRAPH_PATT_COLORRAM
+0xf8,
176 NV04_PGRAPH_PATT_COLORRAM
+0xfc,
179 NV04_PGRAPH_PATTERN_SHAPE
,
183 NV04_PGRAPH_BETA_AND
,
184 NV04_PGRAPH_BETA_PREMULT
,
185 NV04_PGRAPH_CONTROL0
,
186 NV04_PGRAPH_CONTROL1
,
187 NV04_PGRAPH_CONTROL2
,
189 NV04_PGRAPH_STORED_FMT
,
190 NV04_PGRAPH_SOURCE_COLOR
,
334 NV04_PGRAPH_PASSTHRU_0
,
335 NV04_PGRAPH_PASSTHRU_1
,
336 NV04_PGRAPH_PASSTHRU_2
,
337 NV04_PGRAPH_DVD_COLORFMT
,
338 NV04_PGRAPH_SCALED_FORMAT
,
339 NV04_PGRAPH_MISC24_0
,
340 NV04_PGRAPH_MISC24_1
,
341 NV04_PGRAPH_MISC24_2
,
350 uint32_t nv04
[ARRAY_SIZE(nv04_graph_ctx_regs
)];
353 struct nouveau_channel
*
354 nv04_graph_channel(struct drm_device
*dev
)
356 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
357 int chid
= dev_priv
->engine
.fifo
.channels
;
359 if (nv_rd32(dev
, NV04_PGRAPH_CTX_CONTROL
) & 0x00010000)
360 chid
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) >> 24;
362 if (chid
>= dev_priv
->engine
.fifo
.channels
)
365 return dev_priv
->channels
.ptr
[chid
];
369 nv04_graph_context_switch(struct drm_device
*dev
)
371 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
372 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
373 struct nouveau_channel
*chan
= NULL
;
376 nouveau_wait_for_idle(dev
);
378 /* If previous context is valid, we need to save it */
379 pgraph
->unload_context(dev
);
381 /* Load context for next channel */
382 chid
= dev_priv
->engine
.fifo
.channel_id(dev
);
383 chan
= dev_priv
->channels
.ptr
[chid
];
385 nv04_graph_load_context(chan
);
388 static uint32_t *ctx_reg(struct graph_state
*ctx
, uint32_t reg
)
392 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++) {
393 if (nv04_graph_ctx_regs
[i
] == reg
)
394 return &ctx
->nv04
[i
];
400 int nv04_graph_create_context(struct nouveau_channel
*chan
)
402 struct graph_state
*pgraph_ctx
;
403 NV_DEBUG(chan
->dev
, "nv04_graph_context_create %d\n", chan
->id
);
405 chan
->pgraph_ctx
= pgraph_ctx
= kzalloc(sizeof(*pgraph_ctx
),
407 if (pgraph_ctx
== NULL
)
410 *ctx_reg(pgraph_ctx
, NV04_PGRAPH_DEBUG_3
) = 0xfad4ff31;
415 void nv04_graph_destroy_context(struct nouveau_channel
*chan
)
417 struct drm_device
*dev
= chan
->dev
;
418 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
419 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
420 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
423 spin_lock_irqsave(&dev_priv
->context_switch_lock
, flags
);
424 pgraph
->fifo_access(dev
, false);
426 /* Unload the context if it's the currently active one */
427 if (pgraph
->channel(dev
) == chan
)
428 pgraph
->unload_context(dev
);
430 /* Free the context resources */
432 chan
->pgraph_ctx
= NULL
;
434 pgraph
->fifo_access(dev
, true);
435 spin_unlock_irqrestore(&dev_priv
->context_switch_lock
, flags
);
438 int nv04_graph_load_context(struct nouveau_channel
*chan
)
440 struct drm_device
*dev
= chan
->dev
;
441 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
445 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++)
446 nv_wr32(dev
, nv04_graph_ctx_regs
[i
], pgraph_ctx
->nv04
[i
]);
448 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10010100);
450 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
451 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
| chan
->id
<< 24);
453 tmp
= nv_rd32(dev
, NV04_PGRAPH_FFINTFC_ST2
);
454 nv_wr32(dev
, NV04_PGRAPH_FFINTFC_ST2
, tmp
& 0x000fffff);
460 nv04_graph_unload_context(struct drm_device
*dev
)
462 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
463 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
464 struct nouveau_channel
*chan
= NULL
;
465 struct graph_state
*ctx
;
469 chan
= pgraph
->channel(dev
);
472 ctx
= chan
->pgraph_ctx
;
474 for (i
= 0; i
< ARRAY_SIZE(nv04_graph_ctx_regs
); i
++)
475 ctx
->nv04
[i
] = nv_rd32(dev
, nv04_graph_ctx_regs
[i
]);
477 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10000000);
478 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
479 tmp
|= (dev_priv
->engine
.fifo
.channels
- 1) << 24;
480 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
);
484 int nv04_graph_init(struct drm_device
*dev
)
486 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
490 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) &
491 ~NV_PMC_ENABLE_PGRAPH
);
492 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) |
493 NV_PMC_ENABLE_PGRAPH
);
495 ret
= nv04_graph_register(dev
);
499 /* Enable PGRAPH interrupts */
500 nouveau_irq_register(dev
, 12, nv04_graph_isr
);
501 nv_wr32(dev
, NV03_PGRAPH_INTR
, 0xFFFFFFFF);
502 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0xFFFFFFFF);
504 nv_wr32(dev
, NV04_PGRAPH_VALID1
, 0);
505 nv_wr32(dev
, NV04_PGRAPH_VALID2
, 0);
506 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
507 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
508 nv_wr32(dev
, NV04_PGRAPH_DEBUG_0
, 0x1231c000);
509 /*1231C000 blob, 001 haiku*/
510 //*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
511 nv_wr32(dev
, NV04_PGRAPH_DEBUG_1
, 0x72111100);
512 /*0x72111100 blob , 01 haiku*/
513 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
514 nv_wr32(dev
, NV04_PGRAPH_DEBUG_2
, 0x11d5f071);
517 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
518 nv_wr32(dev
, NV04_PGRAPH_DEBUG_3
, 0xf0d4ff31);
519 /*haiku and blob 10d4*/
521 nv_wr32(dev
, NV04_PGRAPH_STATE
, 0xFFFFFFFF);
522 nv_wr32(dev
, NV04_PGRAPH_CTX_CONTROL
, 0x10000100);
523 tmp
= nv_rd32(dev
, NV04_PGRAPH_CTX_USER
) & 0x00ffffff;
524 tmp
|= (dev_priv
->engine
.fifo
.channels
- 1) << 24;
525 nv_wr32(dev
, NV04_PGRAPH_CTX_USER
, tmp
);
527 /* These don't belong here, they're part of a per-channel context */
528 nv_wr32(dev
, NV04_PGRAPH_PATTERN_SHAPE
, 0x00000000);
529 nv_wr32(dev
, NV04_PGRAPH_BETA_AND
, 0xFFFFFFFF);
534 void nv04_graph_takedown(struct drm_device
*dev
)
536 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0x00000000);
537 nouveau_irq_unregister(dev
, 12);
541 nv04_graph_fifo_access(struct drm_device
*dev
, bool enabled
)
544 nv_wr32(dev
, NV04_PGRAPH_FIFO
,
545 nv_rd32(dev
, NV04_PGRAPH_FIFO
) | 1);
547 nv_wr32(dev
, NV04_PGRAPH_FIFO
,
548 nv_rd32(dev
, NV04_PGRAPH_FIFO
) & ~1);
552 nv04_graph_mthd_set_ref(struct nouveau_channel
*chan
,
553 u32
class, u32 mthd
, u32 data
)
555 atomic_set(&chan
->fence
.last_sequence_irq
, data
);
560 nv04_graph_mthd_page_flip(struct nouveau_channel
*chan
,
561 u32
class, u32 mthd
, u32 data
)
563 struct drm_device
*dev
= chan
->dev
;
564 struct nouveau_page_flip_state s
;
566 if (!nouveau_finish_page_flip(chan
, &s
))
567 nv_set_crtc_base(dev
, s
.crtc
,
568 s
.offset
+ s
.y
* s
.pitch
+ s
.x
* s
.bpp
/ 8);
574 * Software methods, why they are needed, and how they all work:
576 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
577 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
578 * 3 words long on both. grobj format on NV04 is:
582 * - bit 12: color key active
583 * - bit 13: clip rect active
584 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
585 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
586 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
587 * NV03_CONTEXT_SURFACE_DST].
588 * - bits 15-17: 2d operation [aka patch config]
589 * - bit 24: patch valid [enables rendering using this object]
590 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
592 * - bits 0-1: mono format
593 * - bits 8-13: color format
594 * - bits 16-31: DMA_NOTIFY instance
596 * - bits 0-15: DMA_A instance
597 * - bits 16-31: DMA_B instance
603 * - bit 12: color key active
604 * - bit 13: clip rect active
605 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
606 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
607 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
608 * NV03_CONTEXT_SURFACE_DST].
609 * - bits 15-17: 2d operation [aka patch config]
610 * - bits 20-22: dither mode
611 * - bit 24: patch valid [enables rendering using this object]
612 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
613 * - bit 26: surface_src/surface_zeta valid
614 * - bit 27: pattern valid
615 * - bit 28: rop valid
616 * - bit 29: beta1 valid
617 * - bit 30: beta4 valid
619 * - bits 0-1: mono format
620 * - bits 8-13: color format
621 * - bits 16-31: DMA_NOTIFY instance
623 * - bits 0-15: DMA_A instance
624 * - bits 16-31: DMA_B instance
626 * NV05 will set/unset the relevant valid bits when you poke the relevant
627 * object-binding methods with object of the proper type, or with the NULL
628 * type. It'll only allow rendering using the grobj if all needed objects
629 * are bound. The needed set of objects depends on selected operation: for
630 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
632 * NV04 doesn't have these methods implemented at all, and doesn't have the
633 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
634 * is set. So we have to emulate them in software, internally keeping the
635 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
636 * but the last word isn't actually used for anything, we abuse it for this
639 * Actually, NV05 can optionally check bit 24 too, but we disable this since
640 * there's no use for it.
642 * For unknown reasons, NV04 implements surf3d binding in hardware as an
643 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
644 * methods on the surf3d object, so we have to emulate them too.
648 nv04_graph_set_ctx1(struct nouveau_channel
*chan
, u32 mask
, u32 value
)
650 struct drm_device
*dev
= chan
->dev
;
651 u32 instance
= (nv_rd32(dev
, NV04_PGRAPH_CTX_SWITCH4
) & 0xffff) << 4;
652 int subc
= (nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
) >> 13) & 0x7;
655 tmp
= nv_ri32(dev
, instance
);
659 nv_wi32(dev
, instance
, tmp
);
660 nv_wr32(dev
, NV04_PGRAPH_CTX_SWITCH1
, tmp
);
661 nv_wr32(dev
, NV04_PGRAPH_CTX_CACHE1
+ (subc
<<2), tmp
);
665 nv04_graph_set_ctx_val(struct nouveau_channel
*chan
, u32 mask
, u32 value
)
667 struct drm_device
*dev
= chan
->dev
;
668 u32 instance
= (nv_rd32(dev
, NV04_PGRAPH_CTX_SWITCH4
) & 0xffff) << 4;
670 int class, op
, valid
= 1;
672 ctx1
= nv_ri32(dev
, instance
);
674 op
= (ctx1
>> 15) & 7;
675 tmp
= nv_ri32(dev
, instance
+ 0xc);
678 nv_wi32(dev
, instance
+ 0xc, tmp
);
680 /* check for valid surf2d/surf_dst/surf_color */
681 if (!(tmp
& 0x02000000))
683 /* check for valid surf_src/surf_zeta */
684 if ((class == 0x1f || class == 0x48) && !(tmp
& 0x04000000))
688 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
692 /* ROP_AND: requires pattern and rop */
694 if (!(tmp
& 0x18000000))
697 /* BLEND_AND: requires beta1 */
699 if (!(tmp
& 0x20000000))
702 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
705 if (!(tmp
& 0x40000000))
710 nv04_graph_set_ctx1(chan
, 0x01000000, valid
<< 24);
714 nv04_graph_mthd_set_operation(struct nouveau_channel
*chan
,
715 u32
class, u32 mthd
, u32 data
)
719 /* Old versions of the objects only accept first three operations. */
720 if (data
> 2 && class < 0x40)
722 nv04_graph_set_ctx1(chan
, 0x00038000, data
<< 15);
723 /* changing operation changes set of objects needed for validation */
724 nv04_graph_set_ctx_val(chan
, 0, 0);
729 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel
*chan
,
730 u32
class, u32 mthd
, u32 data
)
732 uint32_t min
= data
& 0xffff, max
;
733 uint32_t w
= data
>> 16;
738 /* yes, it accepts negative for some reason. */
742 nv_wr32(chan
->dev
, 0x40053c, min
);
743 nv_wr32(chan
->dev
, 0x400544, max
);
748 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel
*chan
,
749 u32
class, u32 mthd
, u32 data
)
751 uint32_t min
= data
& 0xffff, max
;
752 uint32_t w
= data
>> 16;
757 /* yes, it accepts negative for some reason. */
761 nv_wr32(chan
->dev
, 0x400540, min
);
762 nv_wr32(chan
->dev
, 0x400548, max
);
767 nv04_graph_mthd_bind_surf2d(struct nouveau_channel
*chan
,
768 u32
class, u32 mthd
, u32 data
)
770 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
772 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
773 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
776 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
777 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
784 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel
*chan
,
785 u32
class, u32 mthd
, u32 data
)
787 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
789 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
790 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
793 nv04_graph_set_ctx1(chan
, 0x00004000, 0);
794 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
797 nv04_graph_set_ctx1(chan
, 0x00004000, 0x00004000);
798 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
805 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel
*chan
,
806 u32
class, u32 mthd
, u32 data
)
808 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
810 nv04_graph_set_ctx_val(chan
, 0x08000000, 0);
813 nv04_graph_set_ctx_val(chan
, 0x08000000, 0x08000000);
820 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel
*chan
,
821 u32
class, u32 mthd
, u32 data
)
823 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
825 nv04_graph_set_ctx_val(chan
, 0x08000000, 0);
828 nv04_graph_set_ctx_val(chan
, 0x08000000, 0x08000000);
835 nv04_graph_mthd_bind_rop(struct nouveau_channel
*chan
,
836 u32
class, u32 mthd
, u32 data
)
838 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
840 nv04_graph_set_ctx_val(chan
, 0x10000000, 0);
843 nv04_graph_set_ctx_val(chan
, 0x10000000, 0x10000000);
850 nv04_graph_mthd_bind_beta1(struct nouveau_channel
*chan
,
851 u32
class, u32 mthd
, u32 data
)
853 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
855 nv04_graph_set_ctx_val(chan
, 0x20000000, 0);
858 nv04_graph_set_ctx_val(chan
, 0x20000000, 0x20000000);
865 nv04_graph_mthd_bind_beta4(struct nouveau_channel
*chan
,
866 u32
class, u32 mthd
, u32 data
)
868 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
870 nv04_graph_set_ctx_val(chan
, 0x40000000, 0);
873 nv04_graph_set_ctx_val(chan
, 0x40000000, 0x40000000);
880 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel
*chan
,
881 u32
class, u32 mthd
, u32 data
)
883 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
885 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
888 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
895 nv04_graph_mthd_bind_surf_src(struct nouveau_channel
*chan
,
896 u32
class, u32 mthd
, u32 data
)
898 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
900 nv04_graph_set_ctx_val(chan
, 0x04000000, 0);
903 nv04_graph_set_ctx_val(chan
, 0x04000000, 0x04000000);
910 nv04_graph_mthd_bind_surf_color(struct nouveau_channel
*chan
,
911 u32
class, u32 mthd
, u32 data
)
913 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
915 nv04_graph_set_ctx_val(chan
, 0x02000000, 0);
918 nv04_graph_set_ctx_val(chan
, 0x02000000, 0x02000000);
925 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel
*chan
,
926 u32
class, u32 mthd
, u32 data
)
928 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
930 nv04_graph_set_ctx_val(chan
, 0x04000000, 0);
933 nv04_graph_set_ctx_val(chan
, 0x04000000, 0x04000000);
940 nv04_graph_mthd_bind_clip(struct nouveau_channel
*chan
,
941 u32
class, u32 mthd
, u32 data
)
943 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
945 nv04_graph_set_ctx1(chan
, 0x2000, 0);
948 nv04_graph_set_ctx1(chan
, 0x2000, 0x2000);
955 nv04_graph_mthd_bind_chroma(struct nouveau_channel
*chan
,
956 u32
class, u32 mthd
, u32 data
)
958 switch (nv_ri32(chan
->dev
, data
<< 4) & 0xff) {
960 nv04_graph_set_ctx1(chan
, 0x1000, 0);
962 /* Yes, for some reason even the old versions of objects
963 * accept 0x57 and not 0x17. Consistency be damned.
966 nv04_graph_set_ctx1(chan
, 0x1000, 0x1000);
973 nv04_graph_register(struct drm_device
*dev
)
975 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
977 if (dev_priv
->engine
.graph
.registered
)
981 NVOBJ_CLASS(dev
, 0x0038, GR
);
984 NVOBJ_CLASS(dev
, 0x0039, GR
);
987 NVOBJ_CLASS(dev
, 0x004b, GR
);
988 NVOBJ_MTHD (dev
, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt
);
989 NVOBJ_MTHD (dev
, 0x004b, 0x0188, nv04_graph_mthd_bind_rop
);
990 NVOBJ_MTHD (dev
, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1
);
991 NVOBJ_MTHD (dev
, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst
);
992 NVOBJ_MTHD (dev
, 0x004b, 0x02fc, nv04_graph_mthd_set_operation
);
995 NVOBJ_CLASS(dev
, 0x004a, GR
);
996 NVOBJ_MTHD (dev
, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
997 NVOBJ_MTHD (dev
, 0x004a, 0x018c, nv04_graph_mthd_bind_rop
);
998 NVOBJ_MTHD (dev
, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1
);
999 NVOBJ_MTHD (dev
, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4
);
1000 NVOBJ_MTHD (dev
, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d
);
1001 NVOBJ_MTHD (dev
, 0x004a, 0x02fc, nv04_graph_mthd_set_operation
);
1003 /* nv01 imageblit */
1004 NVOBJ_CLASS(dev
, 0x001f, GR
);
1005 NVOBJ_MTHD (dev
, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma
);
1006 NVOBJ_MTHD (dev
, 0x001f, 0x0188, nv04_graph_mthd_bind_clip
);
1007 NVOBJ_MTHD (dev
, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt
);
1008 NVOBJ_MTHD (dev
, 0x001f, 0x0190, nv04_graph_mthd_bind_rop
);
1009 NVOBJ_MTHD (dev
, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1
);
1010 NVOBJ_MTHD (dev
, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst
);
1011 NVOBJ_MTHD (dev
, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src
);
1012 NVOBJ_MTHD (dev
, 0x001f, 0x02fc, nv04_graph_mthd_set_operation
);
1014 /* nv04 imageblit */
1015 NVOBJ_CLASS(dev
, 0x005f, GR
);
1016 NVOBJ_MTHD (dev
, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma
);
1017 NVOBJ_MTHD (dev
, 0x005f, 0x0188, nv04_graph_mthd_bind_clip
);
1018 NVOBJ_MTHD (dev
, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt
);
1019 NVOBJ_MTHD (dev
, 0x005f, 0x0190, nv04_graph_mthd_bind_rop
);
1020 NVOBJ_MTHD (dev
, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1
);
1021 NVOBJ_MTHD (dev
, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4
);
1022 NVOBJ_MTHD (dev
, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d
);
1023 NVOBJ_MTHD (dev
, 0x005f, 0x02fc, nv04_graph_mthd_set_operation
);
1026 NVOBJ_CLASS(dev
, 0x0060, GR
);
1027 NVOBJ_MTHD (dev
, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma
);
1028 NVOBJ_MTHD (dev
, 0x0060, 0x018c, nv04_graph_mthd_bind_clip
);
1029 NVOBJ_MTHD (dev
, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt
);
1030 NVOBJ_MTHD (dev
, 0x0060, 0x0194, nv04_graph_mthd_bind_rop
);
1031 NVOBJ_MTHD (dev
, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1
);
1032 NVOBJ_MTHD (dev
, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4
);
1033 NVOBJ_MTHD (dev
, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf
);
1034 NVOBJ_MTHD (dev
, 0x0060, 0x03e4, nv04_graph_mthd_set_operation
);
1037 NVOBJ_CLASS(dev
, 0x0064, GR
);
1040 NVOBJ_CLASS(dev
, 0x0021, GR
);
1041 NVOBJ_MTHD (dev
, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma
);
1042 NVOBJ_MTHD (dev
, 0x0021, 0x0188, nv04_graph_mthd_bind_clip
);
1043 NVOBJ_MTHD (dev
, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt
);
1044 NVOBJ_MTHD (dev
, 0x0021, 0x0190, nv04_graph_mthd_bind_rop
);
1045 NVOBJ_MTHD (dev
, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1
);
1046 NVOBJ_MTHD (dev
, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst
);
1047 NVOBJ_MTHD (dev
, 0x0021, 0x02fc, nv04_graph_mthd_set_operation
);
1050 NVOBJ_CLASS(dev
, 0x0061, GR
);
1051 NVOBJ_MTHD (dev
, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma
);
1052 NVOBJ_MTHD (dev
, 0x0061, 0x0188, nv04_graph_mthd_bind_clip
);
1053 NVOBJ_MTHD (dev
, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt
);
1054 NVOBJ_MTHD (dev
, 0x0061, 0x0190, nv04_graph_mthd_bind_rop
);
1055 NVOBJ_MTHD (dev
, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1
);
1056 NVOBJ_MTHD (dev
, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4
);
1057 NVOBJ_MTHD (dev
, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d
);
1058 NVOBJ_MTHD (dev
, 0x0061, 0x02fc, nv04_graph_mthd_set_operation
);
1061 NVOBJ_CLASS(dev
, 0x0065, GR
);
1064 NVOBJ_CLASS(dev
, 0x0036, GR
);
1065 NVOBJ_MTHD (dev
, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma
);
1066 NVOBJ_MTHD (dev
, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1067 NVOBJ_MTHD (dev
, 0x0036, 0x018c, nv04_graph_mthd_bind_rop
);
1068 NVOBJ_MTHD (dev
, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1
);
1069 NVOBJ_MTHD (dev
, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1070 NVOBJ_MTHD (dev
, 0x0036, 0x02fc, nv04_graph_mthd_set_operation
);
1073 NVOBJ_CLASS(dev
, 0x0076, GR
);
1074 NVOBJ_MTHD (dev
, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma
);
1075 NVOBJ_MTHD (dev
, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1076 NVOBJ_MTHD (dev
, 0x0076, 0x018c, nv04_graph_mthd_bind_rop
);
1077 NVOBJ_MTHD (dev
, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1
);
1078 NVOBJ_MTHD (dev
, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4
);
1079 NVOBJ_MTHD (dev
, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d
);
1080 NVOBJ_MTHD (dev
, 0x0076, 0x02fc, nv04_graph_mthd_set_operation
);
1083 NVOBJ_CLASS(dev
, 0x0066, GR
);
1086 NVOBJ_CLASS(dev
, 0x0037, GR
);
1087 NVOBJ_MTHD (dev
, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1088 NVOBJ_MTHD (dev
, 0x0037, 0x018c, nv04_graph_mthd_bind_rop
);
1089 NVOBJ_MTHD (dev
, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1
);
1090 NVOBJ_MTHD (dev
, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1091 NVOBJ_MTHD (dev
, 0x0037, 0x0304, nv04_graph_mthd_set_operation
);
1094 NVOBJ_CLASS(dev
, 0x0077, GR
);
1095 NVOBJ_MTHD (dev
, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1096 NVOBJ_MTHD (dev
, 0x0077, 0x018c, nv04_graph_mthd_bind_rop
);
1097 NVOBJ_MTHD (dev
, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1
);
1098 NVOBJ_MTHD (dev
, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4
);
1099 NVOBJ_MTHD (dev
, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf
);
1100 NVOBJ_MTHD (dev
, 0x0077, 0x0304, nv04_graph_mthd_set_operation
);
1103 NVOBJ_CLASS(dev
, 0x0030, GR
);
1106 NVOBJ_CLASS(dev
, 0x0042, GR
);
1109 NVOBJ_CLASS(dev
, 0x0043, GR
);
1112 NVOBJ_CLASS(dev
, 0x0012, GR
);
1115 NVOBJ_CLASS(dev
, 0x0072, GR
);
1118 NVOBJ_CLASS(dev
, 0x0019, GR
);
1121 NVOBJ_CLASS(dev
, 0x0018, GR
);
1124 NVOBJ_CLASS(dev
, 0x0044, GR
);
1127 NVOBJ_CLASS(dev
, 0x0052, GR
);
1130 NVOBJ_CLASS(dev
, 0x0053, GR
);
1131 NVOBJ_MTHD (dev
, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h
);
1132 NVOBJ_MTHD (dev
, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v
);
1135 NVOBJ_CLASS(dev
, 0x0048, GR
);
1136 NVOBJ_MTHD (dev
, 0x0048, 0x0188, nv04_graph_mthd_bind_clip
);
1137 NVOBJ_MTHD (dev
, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color
);
1138 NVOBJ_MTHD (dev
, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta
);
1141 NVOBJ_CLASS(dev
, 0x0054, GR
);
1144 NVOBJ_CLASS(dev
, 0x0055, GR
);
1147 NVOBJ_CLASS(dev
, 0x0017, GR
);
1150 NVOBJ_CLASS(dev
, 0x0057, GR
);
1153 NVOBJ_CLASS(dev
, 0x0058, GR
);
1156 NVOBJ_CLASS(dev
, 0x0059, GR
);
1159 NVOBJ_CLASS(dev
, 0x005a, GR
);
1162 NVOBJ_CLASS(dev
, 0x005b, GR
);
1165 NVOBJ_CLASS(dev
, 0x001c, GR
);
1166 NVOBJ_MTHD (dev
, 0x001c, 0x0184, nv04_graph_mthd_bind_clip
);
1167 NVOBJ_MTHD (dev
, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1168 NVOBJ_MTHD (dev
, 0x001c, 0x018c, nv04_graph_mthd_bind_rop
);
1169 NVOBJ_MTHD (dev
, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1
);
1170 NVOBJ_MTHD (dev
, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1171 NVOBJ_MTHD (dev
, 0x001c, 0x02fc, nv04_graph_mthd_set_operation
);
1174 NVOBJ_CLASS(dev
, 0x005c, GR
);
1175 NVOBJ_MTHD (dev
, 0x005c, 0x0184, nv04_graph_mthd_bind_clip
);
1176 NVOBJ_MTHD (dev
, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1177 NVOBJ_MTHD (dev
, 0x005c, 0x018c, nv04_graph_mthd_bind_rop
);
1178 NVOBJ_MTHD (dev
, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1
);
1179 NVOBJ_MTHD (dev
, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4
);
1180 NVOBJ_MTHD (dev
, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d
);
1181 NVOBJ_MTHD (dev
, 0x005c, 0x02fc, nv04_graph_mthd_set_operation
);
1184 NVOBJ_CLASS(dev
, 0x001d, GR
);
1185 NVOBJ_MTHD (dev
, 0x001d, 0x0184, nv04_graph_mthd_bind_clip
);
1186 NVOBJ_MTHD (dev
, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1187 NVOBJ_MTHD (dev
, 0x001d, 0x018c, nv04_graph_mthd_bind_rop
);
1188 NVOBJ_MTHD (dev
, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1
);
1189 NVOBJ_MTHD (dev
, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1190 NVOBJ_MTHD (dev
, 0x001d, 0x02fc, nv04_graph_mthd_set_operation
);
1193 NVOBJ_CLASS(dev
, 0x005d, GR
);
1194 NVOBJ_MTHD (dev
, 0x005d, 0x0184, nv04_graph_mthd_bind_clip
);
1195 NVOBJ_MTHD (dev
, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1196 NVOBJ_MTHD (dev
, 0x005d, 0x018c, nv04_graph_mthd_bind_rop
);
1197 NVOBJ_MTHD (dev
, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1
);
1198 NVOBJ_MTHD (dev
, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4
);
1199 NVOBJ_MTHD (dev
, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d
);
1200 NVOBJ_MTHD (dev
, 0x005d, 0x02fc, nv04_graph_mthd_set_operation
);
1203 NVOBJ_CLASS(dev
, 0x001e, GR
);
1204 NVOBJ_MTHD (dev
, 0x001e, 0x0184, nv04_graph_mthd_bind_clip
);
1205 NVOBJ_MTHD (dev
, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt
);
1206 NVOBJ_MTHD (dev
, 0x001e, 0x018c, nv04_graph_mthd_bind_rop
);
1207 NVOBJ_MTHD (dev
, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1
);
1208 NVOBJ_MTHD (dev
, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst
);
1209 NVOBJ_MTHD (dev
, 0x001e, 0x02fc, nv04_graph_mthd_set_operation
);
1212 NVOBJ_CLASS(dev
, 0x005e, GR
);
1213 NVOBJ_MTHD (dev
, 0x005e, 0x0184, nv04_graph_mthd_bind_clip
);
1214 NVOBJ_MTHD (dev
, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt
);
1215 NVOBJ_MTHD (dev
, 0x005e, 0x018c, nv04_graph_mthd_bind_rop
);
1216 NVOBJ_MTHD (dev
, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1
);
1217 NVOBJ_MTHD (dev
, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4
);
1218 NVOBJ_MTHD (dev
, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d
);
1219 NVOBJ_MTHD (dev
, 0x005e, 0x02fc, nv04_graph_mthd_set_operation
);
1222 NVOBJ_CLASS(dev
, 0x506e, SW
);
1223 NVOBJ_MTHD (dev
, 0x506e, 0x0150, nv04_graph_mthd_set_ref
);
1224 NVOBJ_MTHD (dev
, 0x506e, 0x0500, nv04_graph_mthd_page_flip
);
1226 dev_priv
->engine
.graph
.registered
= true;
1230 static struct nouveau_bitfield nv04_graph_intr
[] = {
1231 { NV_PGRAPH_INTR_NOTIFY
, "NOTIFY" },
1235 static struct nouveau_bitfield nv04_graph_nstatus
[] =
1237 { NV04_PGRAPH_NSTATUS_STATE_IN_USE
, "STATE_IN_USE" },
1238 { NV04_PGRAPH_NSTATUS_INVALID_STATE
, "INVALID_STATE" },
1239 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT
, "BAD_ARGUMENT" },
1240 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT
, "PROTECTION_FAULT" },
1244 struct nouveau_bitfield nv04_graph_nsource
[] =
1246 { NV03_PGRAPH_NSOURCE_NOTIFICATION
, "NOTIFICATION" },
1247 { NV03_PGRAPH_NSOURCE_DATA_ERROR
, "DATA_ERROR" },
1248 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR
, "PROTECTION_ERROR" },
1249 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION
, "RANGE_EXCEPTION" },
1250 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR
, "LIMIT_COLOR" },
1251 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA
, "LIMIT_ZETA" },
1252 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD
, "ILLEGAL_MTHD" },
1253 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION
, "DMA_R_PROTECTION" },
1254 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION
, "DMA_W_PROTECTION" },
1255 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION
, "FORMAT_EXCEPTION" },
1256 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION
, "PATCH_EXCEPTION" },
1257 { NV03_PGRAPH_NSOURCE_STATE_INVALID
, "STATE_INVALID" },
1258 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY
, "DOUBLE_NOTIFY" },
1259 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE
, "NOTIFY_IN_USE" },
1260 { NV03_PGRAPH_NSOURCE_METHOD_CNT
, "METHOD_CNT" },
1261 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION
, "BFR_NOTIFICATION" },
1262 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION
, "DMA_VTX_PROTECTION" },
1263 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A
, "DMA_WIDTH_A" },
1264 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B
, "DMA_WIDTH_B" },
1269 nv04_graph_isr(struct drm_device
*dev
)
1273 while ((stat
= nv_rd32(dev
, NV03_PGRAPH_INTR
))) {
1274 u32 nsource
= nv_rd32(dev
, NV03_PGRAPH_NSOURCE
);
1275 u32 nstatus
= nv_rd32(dev
, NV03_PGRAPH_NSTATUS
);
1276 u32 addr
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
);
1277 u32 chid
= (addr
& 0x0f000000) >> 24;
1278 u32 subc
= (addr
& 0x0000e000) >> 13;
1279 u32 mthd
= (addr
& 0x00001ffc);
1280 u32 data
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_DATA
);
1281 u32
class = nv_rd32(dev
, 0x400180 + subc
* 4) & 0xff;
1284 if (stat
& NV_PGRAPH_INTR_NOTIFY
) {
1285 if (nsource
& NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD
) {
1286 if (!nouveau_gpuobj_mthd_call2(dev
, chid
, class, mthd
, data
))
1287 show
&= ~NV_PGRAPH_INTR_NOTIFY
;
1291 if (stat
& NV_PGRAPH_INTR_CONTEXT_SWITCH
) {
1292 nv_wr32(dev
, NV03_PGRAPH_INTR
, NV_PGRAPH_INTR_CONTEXT_SWITCH
);
1293 stat
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1294 show
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1295 nv04_graph_context_switch(dev
);
1298 nv_wr32(dev
, NV03_PGRAPH_INTR
, stat
);
1299 nv_wr32(dev
, NV04_PGRAPH_FIFO
, 0x00000001);
1301 if (show
&& nouveau_ratelimit()) {
1302 NV_INFO(dev
, "PGRAPH -");
1303 nouveau_bitfield_print(nv04_graph_intr
, show
);
1304 printk(" nsource:");
1305 nouveau_bitfield_print(nv04_graph_nsource
, nsource
);
1306 printk(" nstatus:");
1307 nouveau_bitfield_print(nv04_graph_nstatus
, nstatus
);
1309 NV_INFO(dev
, "PGRAPH - ch %d/%d class 0x%04x "
1310 "mthd 0x%04x data 0x%08x\n",
1311 chid
, subc
, class, mthd
, data
);