2 * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_util.h"
31 static int nv10_graph_register(struct drm_device
*);
32 static void nv10_graph_isr(struct drm_device
*);
34 #define NV10_FIFO_NUMBER 32
37 uint32_t pipe_0x0000
[0x040/4];
38 uint32_t pipe_0x0040
[0x010/4];
39 uint32_t pipe_0x0200
[0x0c0/4];
40 uint32_t pipe_0x4400
[0x080/4];
41 uint32_t pipe_0x6400
[0x3b0/4];
42 uint32_t pipe_0x6800
[0x2f0/4];
43 uint32_t pipe_0x6c00
[0x030/4];
44 uint32_t pipe_0x7000
[0x130/4];
45 uint32_t pipe_0x7400
[0x0c0/4];
46 uint32_t pipe_0x7800
[0x0c0/4];
49 static int nv10_graph_ctx_regs
[] = {
50 NV10_PGRAPH_CTX_SWITCH(0),
51 NV10_PGRAPH_CTX_SWITCH(1),
52 NV10_PGRAPH_CTX_SWITCH(2),
53 NV10_PGRAPH_CTX_SWITCH(3),
54 NV10_PGRAPH_CTX_SWITCH(4),
55 NV10_PGRAPH_CTX_CACHE(0, 0),
56 NV10_PGRAPH_CTX_CACHE(0, 1),
57 NV10_PGRAPH_CTX_CACHE(0, 2),
58 NV10_PGRAPH_CTX_CACHE(0, 3),
59 NV10_PGRAPH_CTX_CACHE(0, 4),
60 NV10_PGRAPH_CTX_CACHE(1, 0),
61 NV10_PGRAPH_CTX_CACHE(1, 1),
62 NV10_PGRAPH_CTX_CACHE(1, 2),
63 NV10_PGRAPH_CTX_CACHE(1, 3),
64 NV10_PGRAPH_CTX_CACHE(1, 4),
65 NV10_PGRAPH_CTX_CACHE(2, 0),
66 NV10_PGRAPH_CTX_CACHE(2, 1),
67 NV10_PGRAPH_CTX_CACHE(2, 2),
68 NV10_PGRAPH_CTX_CACHE(2, 3),
69 NV10_PGRAPH_CTX_CACHE(2, 4),
70 NV10_PGRAPH_CTX_CACHE(3, 0),
71 NV10_PGRAPH_CTX_CACHE(3, 1),
72 NV10_PGRAPH_CTX_CACHE(3, 2),
73 NV10_PGRAPH_CTX_CACHE(3, 3),
74 NV10_PGRAPH_CTX_CACHE(3, 4),
75 NV10_PGRAPH_CTX_CACHE(4, 0),
76 NV10_PGRAPH_CTX_CACHE(4, 1),
77 NV10_PGRAPH_CTX_CACHE(4, 2),
78 NV10_PGRAPH_CTX_CACHE(4, 3),
79 NV10_PGRAPH_CTX_CACHE(4, 4),
80 NV10_PGRAPH_CTX_CACHE(5, 0),
81 NV10_PGRAPH_CTX_CACHE(5, 1),
82 NV10_PGRAPH_CTX_CACHE(5, 2),
83 NV10_PGRAPH_CTX_CACHE(5, 3),
84 NV10_PGRAPH_CTX_CACHE(5, 4),
85 NV10_PGRAPH_CTX_CACHE(6, 0),
86 NV10_PGRAPH_CTX_CACHE(6, 1),
87 NV10_PGRAPH_CTX_CACHE(6, 2),
88 NV10_PGRAPH_CTX_CACHE(6, 3),
89 NV10_PGRAPH_CTX_CACHE(6, 4),
90 NV10_PGRAPH_CTX_CACHE(7, 0),
91 NV10_PGRAPH_CTX_CACHE(7, 1),
92 NV10_PGRAPH_CTX_CACHE(7, 2),
93 NV10_PGRAPH_CTX_CACHE(7, 3),
94 NV10_PGRAPH_CTX_CACHE(7, 4),
96 NV04_PGRAPH_DMA_START_0
,
97 NV04_PGRAPH_DMA_START_1
,
98 NV04_PGRAPH_DMA_LENGTH
,
100 NV10_PGRAPH_DMA_PITCH
,
101 NV04_PGRAPH_BOFFSET0
,
104 NV04_PGRAPH_BOFFSET1
,
107 NV04_PGRAPH_BOFFSET2
,
110 NV04_PGRAPH_BOFFSET3
,
113 NV04_PGRAPH_BOFFSET4
,
116 NV04_PGRAPH_BOFFSET5
,
126 NV04_PGRAPH_BSWIZZLE2
,
127 NV04_PGRAPH_BSWIZZLE5
,
130 NV04_PGRAPH_PATT_COLOR0
,
131 NV04_PGRAPH_PATT_COLOR1
,
132 NV04_PGRAPH_PATT_COLORRAM
, /* 64 values from 0x400900 to 0x4009fc */
196 NV04_PGRAPH_PATTERN
, /* 2 values from 0x400808 to 0x40080c */
198 NV04_PGRAPH_PATTERN_SHAPE
,
199 NV03_PGRAPH_MONO_COLOR0
,
202 NV04_PGRAPH_BETA_AND
,
203 NV04_PGRAPH_BETA_PREMULT
,
219 NV10_PGRAPH_WINDOWCLIP_HORIZONTAL
, /* 8 values from 0x400f00-0x400f1c */
220 NV10_PGRAPH_WINDOWCLIP_VERTICAL
, /* 8 values from 0x400f20-0x400f3c */
237 NV10_PGRAPH_GLOBALSTATE0
,
238 NV10_PGRAPH_GLOBALSTATE1
,
239 NV04_PGRAPH_STORED_FMT
,
240 NV04_PGRAPH_SOURCE_COLOR
,
241 NV03_PGRAPH_ABS_X_RAM
, /* 32 values from 0x400400 to 0x40047c */
242 NV03_PGRAPH_ABS_Y_RAM
, /* 32 values from 0x400480 to 0x4004fc */
305 NV03_PGRAPH_ABS_UCLIP_XMIN
,
306 NV03_PGRAPH_ABS_UCLIP_XMAX
,
307 NV03_PGRAPH_ABS_UCLIP_YMIN
,
308 NV03_PGRAPH_ABS_UCLIP_YMAX
,
313 NV03_PGRAPH_ABS_UCLIPA_XMIN
,
314 NV03_PGRAPH_ABS_UCLIPA_XMAX
,
315 NV03_PGRAPH_ABS_UCLIPA_YMIN
,
316 NV03_PGRAPH_ABS_UCLIPA_YMAX
,
317 NV03_PGRAPH_ABS_ICLIP_XMAX
,
318 NV03_PGRAPH_ABS_ICLIP_YMAX
,
319 NV03_PGRAPH_XY_LOGIC_MISC0
,
320 NV03_PGRAPH_XY_LOGIC_MISC1
,
321 NV03_PGRAPH_XY_LOGIC_MISC2
,
322 NV03_PGRAPH_XY_LOGIC_MISC3
,
327 NV10_PGRAPH_COMBINER0_IN_ALPHA
,
328 NV10_PGRAPH_COMBINER1_IN_ALPHA
,
329 NV10_PGRAPH_COMBINER0_IN_RGB
,
330 NV10_PGRAPH_COMBINER1_IN_RGB
,
331 NV10_PGRAPH_COMBINER_COLOR0
,
332 NV10_PGRAPH_COMBINER_COLOR1
,
333 NV10_PGRAPH_COMBINER0_OUT_ALPHA
,
334 NV10_PGRAPH_COMBINER1_OUT_ALPHA
,
335 NV10_PGRAPH_COMBINER0_OUT_RGB
,
336 NV10_PGRAPH_COMBINER1_OUT_RGB
,
337 NV10_PGRAPH_COMBINER_FINAL0
,
338 NV10_PGRAPH_COMBINER_FINAL1
,
355 NV04_PGRAPH_PASSTHRU_0
,
356 NV04_PGRAPH_PASSTHRU_1
,
357 NV04_PGRAPH_PASSTHRU_2
,
358 NV10_PGRAPH_DIMX_TEXTURE
,
359 NV10_PGRAPH_WDIMX_TEXTURE
,
360 NV10_PGRAPH_DVD_COLORFMT
,
361 NV10_PGRAPH_SCALED_FORMAT
,
362 NV04_PGRAPH_MISC24_0
,
363 NV04_PGRAPH_MISC24_1
,
364 NV04_PGRAPH_MISC24_2
,
371 static int nv17_graph_ctx_regs
[] = {
393 int nv10
[ARRAY_SIZE(nv10_graph_ctx_regs
)];
394 int nv17
[ARRAY_SIZE(nv17_graph_ctx_regs
)];
395 struct pipe_state pipe_state
;
396 uint32_t lma_window
[4];
399 #define PIPE_SAVE(dev, state, addr) \
402 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
403 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
404 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
407 #define PIPE_RESTORE(dev, state, addr) \
410 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
411 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
412 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
415 static void nv10_graph_save_pipe(struct nouveau_channel
*chan
)
417 struct drm_device
*dev
= chan
->dev
;
418 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
419 struct pipe_state
*pipe
= &pgraph_ctx
->pipe_state
;
421 PIPE_SAVE(dev
, pipe
->pipe_0x4400
, 0x4400);
422 PIPE_SAVE(dev
, pipe
->pipe_0x0200
, 0x0200);
423 PIPE_SAVE(dev
, pipe
->pipe_0x6400
, 0x6400);
424 PIPE_SAVE(dev
, pipe
->pipe_0x6800
, 0x6800);
425 PIPE_SAVE(dev
, pipe
->pipe_0x6c00
, 0x6c00);
426 PIPE_SAVE(dev
, pipe
->pipe_0x7000
, 0x7000);
427 PIPE_SAVE(dev
, pipe
->pipe_0x7400
, 0x7400);
428 PIPE_SAVE(dev
, pipe
->pipe_0x7800
, 0x7800);
429 PIPE_SAVE(dev
, pipe
->pipe_0x0040
, 0x0040);
430 PIPE_SAVE(dev
, pipe
->pipe_0x0000
, 0x0000);
433 static void nv10_graph_load_pipe(struct nouveau_channel
*chan
)
435 struct drm_device
*dev
= chan
->dev
;
436 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
437 struct pipe_state
*pipe
= &pgraph_ctx
->pipe_state
;
438 uint32_t xfmode0
, xfmode1
;
441 nouveau_wait_for_idle(dev
);
442 /* XXX check haiku comments */
443 xfmode0
= nv_rd32(dev
, NV10_PGRAPH_XFMODE0
);
444 xfmode1
= nv_rd32(dev
, NV10_PGRAPH_XFMODE1
);
445 nv_wr32(dev
, NV10_PGRAPH_XFMODE0
, 0x10000000);
446 nv_wr32(dev
, NV10_PGRAPH_XFMODE1
, 0x00000000);
447 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x000064c0);
448 for (i
= 0; i
< 4; i
++)
449 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x3f800000);
450 for (i
= 0; i
< 4; i
++)
451 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000000);
453 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00006ab0);
454 for (i
= 0; i
< 3; i
++)
455 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x3f800000);
457 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00006a80);
458 for (i
= 0; i
< 3; i
++)
459 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000000);
461 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00000040);
462 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000008);
465 PIPE_RESTORE(dev
, pipe
->pipe_0x0200
, 0x0200);
466 nouveau_wait_for_idle(dev
);
469 nv_wr32(dev
, NV10_PGRAPH_XFMODE0
, xfmode0
);
470 nv_wr32(dev
, NV10_PGRAPH_XFMODE1
, xfmode1
);
471 PIPE_RESTORE(dev
, pipe
->pipe_0x6400
, 0x6400);
472 PIPE_RESTORE(dev
, pipe
->pipe_0x6800
, 0x6800);
473 PIPE_RESTORE(dev
, pipe
->pipe_0x6c00
, 0x6c00);
474 PIPE_RESTORE(dev
, pipe
->pipe_0x7000
, 0x7000);
475 PIPE_RESTORE(dev
, pipe
->pipe_0x7400
, 0x7400);
476 PIPE_RESTORE(dev
, pipe
->pipe_0x7800
, 0x7800);
477 PIPE_RESTORE(dev
, pipe
->pipe_0x4400
, 0x4400);
478 PIPE_RESTORE(dev
, pipe
->pipe_0x0000
, 0x0000);
479 PIPE_RESTORE(dev
, pipe
->pipe_0x0040
, 0x0040);
480 nouveau_wait_for_idle(dev
);
483 static void nv10_graph_create_pipe(struct nouveau_channel
*chan
)
485 struct drm_device
*dev
= chan
->dev
;
486 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
487 struct pipe_state
*fifo_pipe_state
= &pgraph_ctx
->pipe_state
;
488 uint32_t *fifo_pipe_state_addr
;
490 #define PIPE_INIT(addr) \
492 fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
494 #define PIPE_INIT_END(addr) \
496 uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
497 ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
498 if (fifo_pipe_state_addr != __end_addr) \
499 NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
500 addr, fifo_pipe_state_addr, __end_addr); \
502 #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
505 for (i
= 0; i
< 48; i
++)
506 NV_WRITE_PIPE_INIT(0x00000000);
507 PIPE_INIT_END(0x0200);
510 for (i
= 0; i
< 211; i
++)
511 NV_WRITE_PIPE_INIT(0x00000000);
512 NV_WRITE_PIPE_INIT(0x3f800000);
513 NV_WRITE_PIPE_INIT(0x40000000);
514 NV_WRITE_PIPE_INIT(0x40000000);
515 NV_WRITE_PIPE_INIT(0x40000000);
516 NV_WRITE_PIPE_INIT(0x40000000);
517 NV_WRITE_PIPE_INIT(0x00000000);
518 NV_WRITE_PIPE_INIT(0x00000000);
519 NV_WRITE_PIPE_INIT(0x3f800000);
520 NV_WRITE_PIPE_INIT(0x00000000);
521 NV_WRITE_PIPE_INIT(0x3f000000);
522 NV_WRITE_PIPE_INIT(0x3f000000);
523 NV_WRITE_PIPE_INIT(0x00000000);
524 NV_WRITE_PIPE_INIT(0x00000000);
525 NV_WRITE_PIPE_INIT(0x00000000);
526 NV_WRITE_PIPE_INIT(0x00000000);
527 NV_WRITE_PIPE_INIT(0x3f800000);
528 NV_WRITE_PIPE_INIT(0x00000000);
529 NV_WRITE_PIPE_INIT(0x00000000);
530 NV_WRITE_PIPE_INIT(0x00000000);
531 NV_WRITE_PIPE_INIT(0x00000000);
532 NV_WRITE_PIPE_INIT(0x00000000);
533 NV_WRITE_PIPE_INIT(0x3f800000);
534 NV_WRITE_PIPE_INIT(0x3f800000);
535 NV_WRITE_PIPE_INIT(0x3f800000);
536 NV_WRITE_PIPE_INIT(0x3f800000);
537 PIPE_INIT_END(0x6400);
540 for (i
= 0; i
< 162; i
++)
541 NV_WRITE_PIPE_INIT(0x00000000);
542 NV_WRITE_PIPE_INIT(0x3f800000);
543 for (i
= 0; i
< 25; i
++)
544 NV_WRITE_PIPE_INIT(0x00000000);
545 PIPE_INIT_END(0x6800);
548 NV_WRITE_PIPE_INIT(0x00000000);
549 NV_WRITE_PIPE_INIT(0x00000000);
550 NV_WRITE_PIPE_INIT(0x00000000);
551 NV_WRITE_PIPE_INIT(0x00000000);
552 NV_WRITE_PIPE_INIT(0xbf800000);
553 NV_WRITE_PIPE_INIT(0x00000000);
554 NV_WRITE_PIPE_INIT(0x00000000);
555 NV_WRITE_PIPE_INIT(0x00000000);
556 NV_WRITE_PIPE_INIT(0x00000000);
557 NV_WRITE_PIPE_INIT(0x00000000);
558 NV_WRITE_PIPE_INIT(0x00000000);
559 NV_WRITE_PIPE_INIT(0x00000000);
560 PIPE_INIT_END(0x6c00);
563 NV_WRITE_PIPE_INIT(0x00000000);
564 NV_WRITE_PIPE_INIT(0x00000000);
565 NV_WRITE_PIPE_INIT(0x00000000);
566 NV_WRITE_PIPE_INIT(0x00000000);
567 NV_WRITE_PIPE_INIT(0x00000000);
568 NV_WRITE_PIPE_INIT(0x00000000);
569 NV_WRITE_PIPE_INIT(0x00000000);
570 NV_WRITE_PIPE_INIT(0x00000000);
571 NV_WRITE_PIPE_INIT(0x00000000);
572 NV_WRITE_PIPE_INIT(0x00000000);
573 NV_WRITE_PIPE_INIT(0x00000000);
574 NV_WRITE_PIPE_INIT(0x00000000);
575 NV_WRITE_PIPE_INIT(0x7149f2ca);
576 NV_WRITE_PIPE_INIT(0x00000000);
577 NV_WRITE_PIPE_INIT(0x00000000);
578 NV_WRITE_PIPE_INIT(0x00000000);
579 NV_WRITE_PIPE_INIT(0x7149f2ca);
580 NV_WRITE_PIPE_INIT(0x00000000);
581 NV_WRITE_PIPE_INIT(0x00000000);
582 NV_WRITE_PIPE_INIT(0x00000000);
583 NV_WRITE_PIPE_INIT(0x7149f2ca);
584 NV_WRITE_PIPE_INIT(0x00000000);
585 NV_WRITE_PIPE_INIT(0x00000000);
586 NV_WRITE_PIPE_INIT(0x00000000);
587 NV_WRITE_PIPE_INIT(0x7149f2ca);
588 NV_WRITE_PIPE_INIT(0x00000000);
589 NV_WRITE_PIPE_INIT(0x00000000);
590 NV_WRITE_PIPE_INIT(0x00000000);
591 NV_WRITE_PIPE_INIT(0x7149f2ca);
592 NV_WRITE_PIPE_INIT(0x00000000);
593 NV_WRITE_PIPE_INIT(0x00000000);
594 NV_WRITE_PIPE_INIT(0x00000000);
595 NV_WRITE_PIPE_INIT(0x7149f2ca);
596 NV_WRITE_PIPE_INIT(0x00000000);
597 NV_WRITE_PIPE_INIT(0x00000000);
598 NV_WRITE_PIPE_INIT(0x00000000);
599 NV_WRITE_PIPE_INIT(0x7149f2ca);
600 NV_WRITE_PIPE_INIT(0x00000000);
601 NV_WRITE_PIPE_INIT(0x00000000);
602 NV_WRITE_PIPE_INIT(0x00000000);
603 NV_WRITE_PIPE_INIT(0x7149f2ca);
604 for (i
= 0; i
< 35; i
++)
605 NV_WRITE_PIPE_INIT(0x00000000);
606 PIPE_INIT_END(0x7000);
609 for (i
= 0; i
< 48; i
++)
610 NV_WRITE_PIPE_INIT(0x00000000);
611 PIPE_INIT_END(0x7400);
614 for (i
= 0; i
< 48; i
++)
615 NV_WRITE_PIPE_INIT(0x00000000);
616 PIPE_INIT_END(0x7800);
619 for (i
= 0; i
< 32; i
++)
620 NV_WRITE_PIPE_INIT(0x00000000);
621 PIPE_INIT_END(0x4400);
624 for (i
= 0; i
< 16; i
++)
625 NV_WRITE_PIPE_INIT(0x00000000);
626 PIPE_INIT_END(0x0000);
629 for (i
= 0; i
< 4; i
++)
630 NV_WRITE_PIPE_INIT(0x00000000);
631 PIPE_INIT_END(0x0040);
635 #undef NV_WRITE_PIPE_INIT
638 static int nv10_graph_ctx_regs_find_offset(struct drm_device
*dev
, int reg
)
641 for (i
= 0; i
< ARRAY_SIZE(nv10_graph_ctx_regs
); i
++) {
642 if (nv10_graph_ctx_regs
[i
] == reg
)
645 NV_ERROR(dev
, "unknow offset nv10_ctx_regs %d\n", reg
);
649 static int nv17_graph_ctx_regs_find_offset(struct drm_device
*dev
, int reg
)
652 for (i
= 0; i
< ARRAY_SIZE(nv17_graph_ctx_regs
); i
++) {
653 if (nv17_graph_ctx_regs
[i
] == reg
)
656 NV_ERROR(dev
, "unknow offset nv17_ctx_regs %d\n", reg
);
660 static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel
*chan
,
663 struct drm_device
*dev
= chan
->dev
;
664 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
665 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
666 uint32_t st2
, st2_dl
, st2_dh
, fifo_ptr
, fifo
[0x60/4];
667 uint32_t ctx_user
, ctx_switch
[5];
670 /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
671 * that cannot be restored via MMIO. Do it through the FIFO
675 /* Look for a celsius object */
676 for (i
= 0; i
< 8; i
++) {
677 int class = nv_rd32(dev
, NV10_PGRAPH_CTX_CACHE(i
, 0)) & 0xfff;
679 if (class == 0x56 || class == 0x96 || class == 0x99) {
685 if (subchan
< 0 || !inst
)
688 /* Save the current ctx object */
689 ctx_user
= nv_rd32(dev
, NV10_PGRAPH_CTX_USER
);
690 for (i
= 0; i
< 5; i
++)
691 ctx_switch
[i
] = nv_rd32(dev
, NV10_PGRAPH_CTX_SWITCH(i
));
693 /* Save the FIFO state */
694 st2
= nv_rd32(dev
, NV10_PGRAPH_FFINTFC_ST2
);
695 st2_dl
= nv_rd32(dev
, NV10_PGRAPH_FFINTFC_ST2_DL
);
696 st2_dh
= nv_rd32(dev
, NV10_PGRAPH_FFINTFC_ST2_DH
);
697 fifo_ptr
= nv_rd32(dev
, NV10_PGRAPH_FFINTFC_FIFO_PTR
);
699 for (i
= 0; i
< ARRAY_SIZE(fifo
); i
++)
700 fifo
[i
] = nv_rd32(dev
, 0x4007a0 + 4 * i
);
702 /* Switch to the celsius subchannel */
703 for (i
= 0; i
< 5; i
++)
704 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(i
),
705 nv_rd32(dev
, NV10_PGRAPH_CTX_CACHE(subchan
, i
)));
706 nv_mask(dev
, NV10_PGRAPH_CTX_USER
, 0xe000, subchan
<< 13);
708 /* Inject NV10TCL_DMA_VTXBUF */
709 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_FIFO_PTR
, 0);
710 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2
,
711 0x2c000000 | chan
->id
<< 20 | subchan
<< 16 | 0x18c);
712 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2_DL
, inst
);
713 nv_mask(dev
, NV10_PGRAPH_CTX_CONTROL
, 0, 0x10000);
714 pgraph
->fifo_access(dev
, true);
715 pgraph
->fifo_access(dev
, false);
717 /* Restore the FIFO state */
718 for (i
= 0; i
< ARRAY_SIZE(fifo
); i
++)
719 nv_wr32(dev
, 0x4007a0 + 4 * i
, fifo
[i
]);
721 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_FIFO_PTR
, fifo_ptr
);
722 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2
, st2
);
723 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2_DL
, st2_dl
);
724 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2_DH
, st2_dh
);
726 /* Restore the current ctx object */
727 for (i
= 0; i
< 5; i
++)
728 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(i
), ctx_switch
[i
]);
729 nv_wr32(dev
, NV10_PGRAPH_CTX_USER
, ctx_user
);
732 int nv10_graph_load_context(struct nouveau_channel
*chan
)
734 struct drm_device
*dev
= chan
->dev
;
735 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
736 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
740 for (i
= 0; i
< ARRAY_SIZE(nv10_graph_ctx_regs
); i
++)
741 nv_wr32(dev
, nv10_graph_ctx_regs
[i
], pgraph_ctx
->nv10
[i
]);
742 if (dev_priv
->chipset
>= 0x17) {
743 for (i
= 0; i
< ARRAY_SIZE(nv17_graph_ctx_regs
); i
++)
744 nv_wr32(dev
, nv17_graph_ctx_regs
[i
],
745 pgraph_ctx
->nv17
[i
]);
748 nv10_graph_load_pipe(chan
);
749 nv10_graph_load_dma_vtxbuf(chan
, (nv_rd32(dev
, NV10_PGRAPH_GLOBALSTATE1
)
752 nv_wr32(dev
, NV10_PGRAPH_CTX_CONTROL
, 0x10010100);
753 tmp
= nv_rd32(dev
, NV10_PGRAPH_CTX_USER
);
754 nv_wr32(dev
, NV10_PGRAPH_CTX_USER
, (tmp
& 0xffffff) | chan
->id
<< 24);
755 tmp
= nv_rd32(dev
, NV10_PGRAPH_FFINTFC_ST2
);
756 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2
, tmp
& 0xcfffffff);
761 nv10_graph_unload_context(struct drm_device
*dev
)
763 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
764 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
765 struct nouveau_fifo_engine
*pfifo
= &dev_priv
->engine
.fifo
;
766 struct nouveau_channel
*chan
;
767 struct graph_state
*ctx
;
771 chan
= pgraph
->channel(dev
);
774 ctx
= chan
->pgraph_ctx
;
776 for (i
= 0; i
< ARRAY_SIZE(nv10_graph_ctx_regs
); i
++)
777 ctx
->nv10
[i
] = nv_rd32(dev
, nv10_graph_ctx_regs
[i
]);
779 if (dev_priv
->chipset
>= 0x17) {
780 for (i
= 0; i
< ARRAY_SIZE(nv17_graph_ctx_regs
); i
++)
781 ctx
->nv17
[i
] = nv_rd32(dev
, nv17_graph_ctx_regs
[i
]);
784 nv10_graph_save_pipe(chan
);
786 nv_wr32(dev
, NV10_PGRAPH_CTX_CONTROL
, 0x10000000);
787 tmp
= nv_rd32(dev
, NV10_PGRAPH_CTX_USER
) & 0x00ffffff;
788 tmp
|= (pfifo
->channels
- 1) << 24;
789 nv_wr32(dev
, NV10_PGRAPH_CTX_USER
, tmp
);
794 nv10_graph_context_switch(struct drm_device
*dev
)
796 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
797 struct nouveau_channel
*chan
= NULL
;
800 nouveau_wait_for_idle(dev
);
802 /* If previous context is valid, we need to save it */
803 nv10_graph_unload_context(dev
);
805 /* Load context for next channel */
806 chid
= (nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
) >> 20) & 0x1f;
807 chan
= dev_priv
->channels
.ptr
[chid
];
808 if (chan
&& chan
->pgraph_ctx
)
809 nv10_graph_load_context(chan
);
812 #define NV_WRITE_CTX(reg, val) do { \
813 int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
815 pgraph_ctx->nv10[offset] = val; \
818 #define NV17_WRITE_CTX(reg, val) do { \
819 int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
821 pgraph_ctx->nv17[offset] = val; \
824 struct nouveau_channel
*
825 nv10_graph_channel(struct drm_device
*dev
)
827 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
828 int chid
= dev_priv
->engine
.fifo
.channels
;
830 if (nv_rd32(dev
, NV10_PGRAPH_CTX_CONTROL
) & 0x00010000)
831 chid
= nv_rd32(dev
, NV10_PGRAPH_CTX_USER
) >> 24;
833 if (chid
>= dev_priv
->engine
.fifo
.channels
)
836 return dev_priv
->channels
.ptr
[chid
];
839 int nv10_graph_create_context(struct nouveau_channel
*chan
)
841 struct drm_device
*dev
= chan
->dev
;
842 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
843 struct graph_state
*pgraph_ctx
;
845 NV_DEBUG(dev
, "nv10_graph_context_create %d\n", chan
->id
);
847 chan
->pgraph_ctx
= pgraph_ctx
= kzalloc(sizeof(*pgraph_ctx
),
849 if (pgraph_ctx
== NULL
)
853 NV_WRITE_CTX(0x00400e88, 0x08000000);
854 NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
855 NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0
, 0x0001ffff);
856 NV_WRITE_CTX(0x00400e10, 0x00001000);
857 NV_WRITE_CTX(0x00400e14, 0x00001000);
858 NV_WRITE_CTX(0x00400e30, 0x00080008);
859 NV_WRITE_CTX(0x00400e34, 0x00080008);
860 if (dev_priv
->chipset
>= 0x17) {
861 /* is it really needed ??? */
862 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4
,
863 nv_rd32(dev
, NV10_PGRAPH_DEBUG_4
));
864 NV17_WRITE_CTX(0x004006b0, nv_rd32(dev
, 0x004006b0));
865 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
866 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
867 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
868 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
870 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER
, chan
->id
<< 24);
872 nv10_graph_create_pipe(chan
);
876 void nv10_graph_destroy_context(struct nouveau_channel
*chan
)
878 struct drm_device
*dev
= chan
->dev
;
879 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
880 struct nouveau_pgraph_engine
*pgraph
= &dev_priv
->engine
.graph
;
881 struct graph_state
*pgraph_ctx
= chan
->pgraph_ctx
;
884 spin_lock_irqsave(&dev_priv
->context_switch_lock
, flags
);
885 pgraph
->fifo_access(dev
, false);
887 /* Unload the context if it's the currently active one */
888 if (pgraph
->channel(dev
) == chan
)
889 pgraph
->unload_context(dev
);
891 /* Free the context resources */
893 chan
->pgraph_ctx
= NULL
;
895 pgraph
->fifo_access(dev
, true);
896 spin_unlock_irqrestore(&dev_priv
->context_switch_lock
, flags
);
900 nv10_graph_set_tile_region(struct drm_device
*dev
, int i
)
902 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
903 struct nouveau_tile_reg
*tile
= &dev_priv
->tile
.reg
[i
];
905 nv_wr32(dev
, NV10_PGRAPH_TLIMIT(i
), tile
->limit
);
906 nv_wr32(dev
, NV10_PGRAPH_TSIZE(i
), tile
->pitch
);
907 nv_wr32(dev
, NV10_PGRAPH_TILE(i
), tile
->addr
);
910 int nv10_graph_init(struct drm_device
*dev
)
912 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
916 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) &
917 ~NV_PMC_ENABLE_PGRAPH
);
918 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) |
919 NV_PMC_ENABLE_PGRAPH
);
921 ret
= nv10_graph_register(dev
);
925 nouveau_irq_register(dev
, 12, nv10_graph_isr
);
926 nv_wr32(dev
, NV03_PGRAPH_INTR
, 0xFFFFFFFF);
927 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0xFFFFFFFF);
929 nv_wr32(dev
, NV04_PGRAPH_DEBUG_0
, 0xFFFFFFFF);
930 nv_wr32(dev
, NV04_PGRAPH_DEBUG_0
, 0x00000000);
931 nv_wr32(dev
, NV04_PGRAPH_DEBUG_1
, 0x00118700);
932 /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
933 nv_wr32(dev
, NV04_PGRAPH_DEBUG_2
, 0x25f92ad9);
934 nv_wr32(dev
, NV04_PGRAPH_DEBUG_3
, 0x55DE0830 |
937 if (dev_priv
->chipset
>= 0x17) {
938 nv_wr32(dev
, NV10_PGRAPH_DEBUG_4
, 0x1f000000);
939 nv_wr32(dev
, 0x400a10, 0x3ff3fb6);
940 nv_wr32(dev
, 0x400838, 0x2f8684);
941 nv_wr32(dev
, 0x40083c, 0x115f3f);
942 nv_wr32(dev
, 0x004006b0, 0x40000020);
944 nv_wr32(dev
, NV10_PGRAPH_DEBUG_4
, 0x00000000);
946 /* Turn all the tiling regions off. */
947 for (i
= 0; i
< NV10_PFB_TILE__SIZE
; i
++)
948 nv10_graph_set_tile_region(dev
, i
);
950 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
951 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
952 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
953 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
954 nv_wr32(dev
, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
955 nv_wr32(dev
, NV10_PGRAPH_STATE
, 0xFFFFFFFF);
957 tmp
= nv_rd32(dev
, NV10_PGRAPH_CTX_USER
) & 0x00ffffff;
958 tmp
|= (dev_priv
->engine
.fifo
.channels
- 1) << 24;
959 nv_wr32(dev
, NV10_PGRAPH_CTX_USER
, tmp
);
960 nv_wr32(dev
, NV10_PGRAPH_CTX_CONTROL
, 0x10000100);
961 nv_wr32(dev
, NV10_PGRAPH_FFINTFC_ST2
, 0x08000000);
966 void nv10_graph_takedown(struct drm_device
*dev
)
968 nv_wr32(dev
, NV03_PGRAPH_INTR_EN
, 0x00000000);
969 nouveau_irq_unregister(dev
, 12);
973 nv17_graph_mthd_lma_window(struct nouveau_channel
*chan
,
974 u32
class, u32 mthd
, u32 data
)
976 struct drm_device
*dev
= chan
->dev
;
977 struct graph_state
*ctx
= chan
->pgraph_ctx
;
978 struct pipe_state
*pipe
= &ctx
->pipe_state
;
979 uint32_t pipe_0x0040
[1], pipe_0x64c0
[8], pipe_0x6a80
[3], pipe_0x6ab0
[3];
980 uint32_t xfmode0
, xfmode1
;
983 ctx
->lma_window
[(mthd
- 0x1638) / 4] = data
;
988 nouveau_wait_for_idle(dev
);
990 PIPE_SAVE(dev
, pipe_0x0040
, 0x0040);
991 PIPE_SAVE(dev
, pipe
->pipe_0x0200
, 0x0200);
993 PIPE_RESTORE(dev
, ctx
->lma_window
, 0x6790);
995 nouveau_wait_for_idle(dev
);
997 xfmode0
= nv_rd32(dev
, NV10_PGRAPH_XFMODE0
);
998 xfmode1
= nv_rd32(dev
, NV10_PGRAPH_XFMODE1
);
1000 PIPE_SAVE(dev
, pipe
->pipe_0x4400
, 0x4400);
1001 PIPE_SAVE(dev
, pipe_0x64c0
, 0x64c0);
1002 PIPE_SAVE(dev
, pipe_0x6ab0
, 0x6ab0);
1003 PIPE_SAVE(dev
, pipe_0x6a80
, 0x6a80);
1005 nouveau_wait_for_idle(dev
);
1007 nv_wr32(dev
, NV10_PGRAPH_XFMODE0
, 0x10000000);
1008 nv_wr32(dev
, NV10_PGRAPH_XFMODE1
, 0x00000000);
1009 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x000064c0);
1010 for (i
= 0; i
< 4; i
++)
1011 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x3f800000);
1012 for (i
= 0; i
< 4; i
++)
1013 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000000);
1015 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00006ab0);
1016 for (i
= 0; i
< 3; i
++)
1017 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x3f800000);
1019 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00006a80);
1020 for (i
= 0; i
< 3; i
++)
1021 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000000);
1023 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x00000040);
1024 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000008);
1026 PIPE_RESTORE(dev
, pipe
->pipe_0x0200
, 0x0200);
1028 nouveau_wait_for_idle(dev
);
1030 PIPE_RESTORE(dev
, pipe_0x0040
, 0x0040);
1032 nv_wr32(dev
, NV10_PGRAPH_XFMODE0
, xfmode0
);
1033 nv_wr32(dev
, NV10_PGRAPH_XFMODE1
, xfmode1
);
1035 PIPE_RESTORE(dev
, pipe_0x64c0
, 0x64c0);
1036 PIPE_RESTORE(dev
, pipe_0x6ab0
, 0x6ab0);
1037 PIPE_RESTORE(dev
, pipe_0x6a80
, 0x6a80);
1038 PIPE_RESTORE(dev
, pipe
->pipe_0x4400
, 0x4400);
1040 nv_wr32(dev
, NV10_PGRAPH_PIPE_ADDRESS
, 0x000000c0);
1041 nv_wr32(dev
, NV10_PGRAPH_PIPE_DATA
, 0x00000000);
1043 nouveau_wait_for_idle(dev
);
1049 nv17_graph_mthd_lma_enable(struct nouveau_channel
*chan
,
1050 u32
class, u32 mthd
, u32 data
)
1052 struct drm_device
*dev
= chan
->dev
;
1054 nouveau_wait_for_idle(dev
);
1056 nv_wr32(dev
, NV10_PGRAPH_DEBUG_4
,
1057 nv_rd32(dev
, NV10_PGRAPH_DEBUG_4
) | 0x1 << 8);
1058 nv_wr32(dev
, 0x004006b0,
1059 nv_rd32(dev
, 0x004006b0) | 0x8 << 24);
1065 nv10_graph_register(struct drm_device
*dev
)
1067 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
1069 if (dev_priv
->engine
.graph
.registered
)
1072 NVOBJ_CLASS(dev
, 0x506e, SW
); /* nvsw */
1073 NVOBJ_CLASS(dev
, 0x0030, GR
); /* null */
1074 NVOBJ_CLASS(dev
, 0x0039, GR
); /* m2mf */
1075 NVOBJ_CLASS(dev
, 0x004a, GR
); /* gdirect */
1076 NVOBJ_CLASS(dev
, 0x005f, GR
); /* imageblit */
1077 NVOBJ_CLASS(dev
, 0x009f, GR
); /* imageblit (nv12) */
1078 NVOBJ_CLASS(dev
, 0x008a, GR
); /* ifc */
1079 NVOBJ_CLASS(dev
, 0x0089, GR
); /* sifm */
1080 NVOBJ_CLASS(dev
, 0x0062, GR
); /* surf2d */
1081 NVOBJ_CLASS(dev
, 0x0043, GR
); /* rop */
1082 NVOBJ_CLASS(dev
, 0x0012, GR
); /* beta1 */
1083 NVOBJ_CLASS(dev
, 0x0072, GR
); /* beta4 */
1084 NVOBJ_CLASS(dev
, 0x0019, GR
); /* cliprect */
1085 NVOBJ_CLASS(dev
, 0x0044, GR
); /* pattern */
1086 NVOBJ_CLASS(dev
, 0x0052, GR
); /* swzsurf */
1087 NVOBJ_CLASS(dev
, 0x0093, GR
); /* surf3d */
1088 NVOBJ_CLASS(dev
, 0x0094, GR
); /* tex_tri */
1089 NVOBJ_CLASS(dev
, 0x0095, GR
); /* multitex_tri */
1092 if (dev_priv
->chipset
<= 0x10) {
1093 NVOBJ_CLASS(dev
, 0x0056, GR
);
1095 if (dev_priv
->chipset
< 0x17 || dev_priv
->chipset
== 0x1a) {
1096 NVOBJ_CLASS(dev
, 0x0096, GR
);
1098 NVOBJ_CLASS(dev
, 0x0099, GR
);
1099 NVOBJ_MTHD (dev
, 0x0099, 0x1638, nv17_graph_mthd_lma_window
);
1100 NVOBJ_MTHD (dev
, 0x0099, 0x163c, nv17_graph_mthd_lma_window
);
1101 NVOBJ_MTHD (dev
, 0x0099, 0x1640, nv17_graph_mthd_lma_window
);
1102 NVOBJ_MTHD (dev
, 0x0099, 0x1644, nv17_graph_mthd_lma_window
);
1103 NVOBJ_MTHD (dev
, 0x0099, 0x1658, nv17_graph_mthd_lma_enable
);
1107 NVOBJ_CLASS(dev
, 0x506e, SW
);
1108 NVOBJ_MTHD (dev
, 0x506e, 0x0500, nv04_graph_mthd_page_flip
);
1110 dev_priv
->engine
.graph
.registered
= true;
1114 struct nouveau_bitfield nv10_graph_intr
[] = {
1115 { NV_PGRAPH_INTR_NOTIFY
, "NOTIFY" },
1116 { NV_PGRAPH_INTR_ERROR
, "ERROR" },
1120 struct nouveau_bitfield nv10_graph_nstatus
[] =
1122 { NV10_PGRAPH_NSTATUS_STATE_IN_USE
, "STATE_IN_USE" },
1123 { NV10_PGRAPH_NSTATUS_INVALID_STATE
, "INVALID_STATE" },
1124 { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT
, "BAD_ARGUMENT" },
1125 { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT
, "PROTECTION_FAULT" },
1130 nv10_graph_isr(struct drm_device
*dev
)
1134 while ((stat
= nv_rd32(dev
, NV03_PGRAPH_INTR
))) {
1135 u32 nsource
= nv_rd32(dev
, NV03_PGRAPH_NSOURCE
);
1136 u32 nstatus
= nv_rd32(dev
, NV03_PGRAPH_NSTATUS
);
1137 u32 addr
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_ADDR
);
1138 u32 chid
= (addr
& 0x01f00000) >> 20;
1139 u32 subc
= (addr
& 0x00070000) >> 16;
1140 u32 mthd
= (addr
& 0x00001ffc);
1141 u32 data
= nv_rd32(dev
, NV04_PGRAPH_TRAPPED_DATA
);
1142 u32
class = nv_rd32(dev
, 0x400160 + subc
* 4) & 0xfff;
1145 if (stat
& NV_PGRAPH_INTR_ERROR
) {
1146 if (nsource
& NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD
) {
1147 if (!nouveau_gpuobj_mthd_call2(dev
, chid
, class, mthd
, data
))
1148 show
&= ~NV_PGRAPH_INTR_ERROR
;
1152 if (stat
& NV_PGRAPH_INTR_CONTEXT_SWITCH
) {
1153 nv_wr32(dev
, NV03_PGRAPH_INTR
, NV_PGRAPH_INTR_CONTEXT_SWITCH
);
1154 stat
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1155 show
&= ~NV_PGRAPH_INTR_CONTEXT_SWITCH
;
1156 nv10_graph_context_switch(dev
);
1159 nv_wr32(dev
, NV03_PGRAPH_INTR
, stat
);
1160 nv_wr32(dev
, NV04_PGRAPH_FIFO
, 0x00000001);
1162 if (show
&& nouveau_ratelimit()) {
1163 NV_INFO(dev
, "PGRAPH -");
1164 nouveau_bitfield_print(nv10_graph_intr
, show
);
1165 printk(" nsource:");
1166 nouveau_bitfield_print(nv04_graph_nsource
, nsource
);
1167 printk(" nstatus:");
1168 nouveau_bitfield_print(nv10_graph_nstatus
, nstatus
);
1170 NV_INFO(dev
, "PGRAPH - ch %d/%d class 0x%04x "
1171 "mthd 0x%04x data 0x%08x\n",
1172 chid
, subc
, class, mthd
, data
);