percpu, x86: don't use PMD_SIZE as embedded atom_size on 32bit
[zen-stable.git] / drivers / gpu / drm / nouveau / nv10_graph.c
blob7255e4a4d3f3dca38001ff23a163c0efefb39e20
1 /*
2 * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
3 * All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
25 #include "drmP.h"
26 #include "drm.h"
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_util.h"
31 struct nv10_graph_engine {
32 struct nouveau_exec_engine base;
35 struct pipe_state {
36 uint32_t pipe_0x0000[0x040/4];
37 uint32_t pipe_0x0040[0x010/4];
38 uint32_t pipe_0x0200[0x0c0/4];
39 uint32_t pipe_0x4400[0x080/4];
40 uint32_t pipe_0x6400[0x3b0/4];
41 uint32_t pipe_0x6800[0x2f0/4];
42 uint32_t pipe_0x6c00[0x030/4];
43 uint32_t pipe_0x7000[0x130/4];
44 uint32_t pipe_0x7400[0x0c0/4];
45 uint32_t pipe_0x7800[0x0c0/4];
48 static int nv10_graph_ctx_regs[] = {
49 NV10_PGRAPH_CTX_SWITCH(0),
50 NV10_PGRAPH_CTX_SWITCH(1),
51 NV10_PGRAPH_CTX_SWITCH(2),
52 NV10_PGRAPH_CTX_SWITCH(3),
53 NV10_PGRAPH_CTX_SWITCH(4),
54 NV10_PGRAPH_CTX_CACHE(0, 0),
55 NV10_PGRAPH_CTX_CACHE(0, 1),
56 NV10_PGRAPH_CTX_CACHE(0, 2),
57 NV10_PGRAPH_CTX_CACHE(0, 3),
58 NV10_PGRAPH_CTX_CACHE(0, 4),
59 NV10_PGRAPH_CTX_CACHE(1, 0),
60 NV10_PGRAPH_CTX_CACHE(1, 1),
61 NV10_PGRAPH_CTX_CACHE(1, 2),
62 NV10_PGRAPH_CTX_CACHE(1, 3),
63 NV10_PGRAPH_CTX_CACHE(1, 4),
64 NV10_PGRAPH_CTX_CACHE(2, 0),
65 NV10_PGRAPH_CTX_CACHE(2, 1),
66 NV10_PGRAPH_CTX_CACHE(2, 2),
67 NV10_PGRAPH_CTX_CACHE(2, 3),
68 NV10_PGRAPH_CTX_CACHE(2, 4),
69 NV10_PGRAPH_CTX_CACHE(3, 0),
70 NV10_PGRAPH_CTX_CACHE(3, 1),
71 NV10_PGRAPH_CTX_CACHE(3, 2),
72 NV10_PGRAPH_CTX_CACHE(3, 3),
73 NV10_PGRAPH_CTX_CACHE(3, 4),
74 NV10_PGRAPH_CTX_CACHE(4, 0),
75 NV10_PGRAPH_CTX_CACHE(4, 1),
76 NV10_PGRAPH_CTX_CACHE(4, 2),
77 NV10_PGRAPH_CTX_CACHE(4, 3),
78 NV10_PGRAPH_CTX_CACHE(4, 4),
79 NV10_PGRAPH_CTX_CACHE(5, 0),
80 NV10_PGRAPH_CTX_CACHE(5, 1),
81 NV10_PGRAPH_CTX_CACHE(5, 2),
82 NV10_PGRAPH_CTX_CACHE(5, 3),
83 NV10_PGRAPH_CTX_CACHE(5, 4),
84 NV10_PGRAPH_CTX_CACHE(6, 0),
85 NV10_PGRAPH_CTX_CACHE(6, 1),
86 NV10_PGRAPH_CTX_CACHE(6, 2),
87 NV10_PGRAPH_CTX_CACHE(6, 3),
88 NV10_PGRAPH_CTX_CACHE(6, 4),
89 NV10_PGRAPH_CTX_CACHE(7, 0),
90 NV10_PGRAPH_CTX_CACHE(7, 1),
91 NV10_PGRAPH_CTX_CACHE(7, 2),
92 NV10_PGRAPH_CTX_CACHE(7, 3),
93 NV10_PGRAPH_CTX_CACHE(7, 4),
94 NV10_PGRAPH_CTX_USER,
95 NV04_PGRAPH_DMA_START_0,
96 NV04_PGRAPH_DMA_START_1,
97 NV04_PGRAPH_DMA_LENGTH,
98 NV04_PGRAPH_DMA_MISC,
99 NV10_PGRAPH_DMA_PITCH,
100 NV04_PGRAPH_BOFFSET0,
101 NV04_PGRAPH_BBASE0,
102 NV04_PGRAPH_BLIMIT0,
103 NV04_PGRAPH_BOFFSET1,
104 NV04_PGRAPH_BBASE1,
105 NV04_PGRAPH_BLIMIT1,
106 NV04_PGRAPH_BOFFSET2,
107 NV04_PGRAPH_BBASE2,
108 NV04_PGRAPH_BLIMIT2,
109 NV04_PGRAPH_BOFFSET3,
110 NV04_PGRAPH_BBASE3,
111 NV04_PGRAPH_BLIMIT3,
112 NV04_PGRAPH_BOFFSET4,
113 NV04_PGRAPH_BBASE4,
114 NV04_PGRAPH_BLIMIT4,
115 NV04_PGRAPH_BOFFSET5,
116 NV04_PGRAPH_BBASE5,
117 NV04_PGRAPH_BLIMIT5,
118 NV04_PGRAPH_BPITCH0,
119 NV04_PGRAPH_BPITCH1,
120 NV04_PGRAPH_BPITCH2,
121 NV04_PGRAPH_BPITCH3,
122 NV04_PGRAPH_BPITCH4,
123 NV10_PGRAPH_SURFACE,
124 NV10_PGRAPH_STATE,
125 NV04_PGRAPH_BSWIZZLE2,
126 NV04_PGRAPH_BSWIZZLE5,
127 NV04_PGRAPH_BPIXEL,
128 NV10_PGRAPH_NOTIFY,
129 NV04_PGRAPH_PATT_COLOR0,
130 NV04_PGRAPH_PATT_COLOR1,
131 NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
132 0x00400904,
133 0x00400908,
134 0x0040090c,
135 0x00400910,
136 0x00400914,
137 0x00400918,
138 0x0040091c,
139 0x00400920,
140 0x00400924,
141 0x00400928,
142 0x0040092c,
143 0x00400930,
144 0x00400934,
145 0x00400938,
146 0x0040093c,
147 0x00400940,
148 0x00400944,
149 0x00400948,
150 0x0040094c,
151 0x00400950,
152 0x00400954,
153 0x00400958,
154 0x0040095c,
155 0x00400960,
156 0x00400964,
157 0x00400968,
158 0x0040096c,
159 0x00400970,
160 0x00400974,
161 0x00400978,
162 0x0040097c,
163 0x00400980,
164 0x00400984,
165 0x00400988,
166 0x0040098c,
167 0x00400990,
168 0x00400994,
169 0x00400998,
170 0x0040099c,
171 0x004009a0,
172 0x004009a4,
173 0x004009a8,
174 0x004009ac,
175 0x004009b0,
176 0x004009b4,
177 0x004009b8,
178 0x004009bc,
179 0x004009c0,
180 0x004009c4,
181 0x004009c8,
182 0x004009cc,
183 0x004009d0,
184 0x004009d4,
185 0x004009d8,
186 0x004009dc,
187 0x004009e0,
188 0x004009e4,
189 0x004009e8,
190 0x004009ec,
191 0x004009f0,
192 0x004009f4,
193 0x004009f8,
194 0x004009fc,
195 NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
196 0x0040080c,
197 NV04_PGRAPH_PATTERN_SHAPE,
198 NV03_PGRAPH_MONO_COLOR0,
199 NV04_PGRAPH_ROP3,
200 NV04_PGRAPH_CHROMA,
201 NV04_PGRAPH_BETA_AND,
202 NV04_PGRAPH_BETA_PREMULT,
203 0x00400e70,
204 0x00400e74,
205 0x00400e78,
206 0x00400e7c,
207 0x00400e80,
208 0x00400e84,
209 0x00400e88,
210 0x00400e8c,
211 0x00400ea0,
212 0x00400ea4,
213 0x00400ea8,
214 0x00400e90,
215 0x00400e94,
216 0x00400e98,
217 0x00400e9c,
218 NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
219 NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
220 0x00400f04,
221 0x00400f24,
222 0x00400f08,
223 0x00400f28,
224 0x00400f0c,
225 0x00400f2c,
226 0x00400f10,
227 0x00400f30,
228 0x00400f14,
229 0x00400f34,
230 0x00400f18,
231 0x00400f38,
232 0x00400f1c,
233 0x00400f3c,
234 NV10_PGRAPH_XFMODE0,
235 NV10_PGRAPH_XFMODE1,
236 NV10_PGRAPH_GLOBALSTATE0,
237 NV10_PGRAPH_GLOBALSTATE1,
238 NV04_PGRAPH_STORED_FMT,
239 NV04_PGRAPH_SOURCE_COLOR,
240 NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
241 NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
242 0x00400404,
243 0x00400484,
244 0x00400408,
245 0x00400488,
246 0x0040040c,
247 0x0040048c,
248 0x00400410,
249 0x00400490,
250 0x00400414,
251 0x00400494,
252 0x00400418,
253 0x00400498,
254 0x0040041c,
255 0x0040049c,
256 0x00400420,
257 0x004004a0,
258 0x00400424,
259 0x004004a4,
260 0x00400428,
261 0x004004a8,
262 0x0040042c,
263 0x004004ac,
264 0x00400430,
265 0x004004b0,
266 0x00400434,
267 0x004004b4,
268 0x00400438,
269 0x004004b8,
270 0x0040043c,
271 0x004004bc,
272 0x00400440,
273 0x004004c0,
274 0x00400444,
275 0x004004c4,
276 0x00400448,
277 0x004004c8,
278 0x0040044c,
279 0x004004cc,
280 0x00400450,
281 0x004004d0,
282 0x00400454,
283 0x004004d4,
284 0x00400458,
285 0x004004d8,
286 0x0040045c,
287 0x004004dc,
288 0x00400460,
289 0x004004e0,
290 0x00400464,
291 0x004004e4,
292 0x00400468,
293 0x004004e8,
294 0x0040046c,
295 0x004004ec,
296 0x00400470,
297 0x004004f0,
298 0x00400474,
299 0x004004f4,
300 0x00400478,
301 0x004004f8,
302 0x0040047c,
303 0x004004fc,
304 NV03_PGRAPH_ABS_UCLIP_XMIN,
305 NV03_PGRAPH_ABS_UCLIP_XMAX,
306 NV03_PGRAPH_ABS_UCLIP_YMIN,
307 NV03_PGRAPH_ABS_UCLIP_YMAX,
308 0x00400550,
309 0x00400558,
310 0x00400554,
311 0x0040055c,
312 NV03_PGRAPH_ABS_UCLIPA_XMIN,
313 NV03_PGRAPH_ABS_UCLIPA_XMAX,
314 NV03_PGRAPH_ABS_UCLIPA_YMIN,
315 NV03_PGRAPH_ABS_UCLIPA_YMAX,
316 NV03_PGRAPH_ABS_ICLIP_XMAX,
317 NV03_PGRAPH_ABS_ICLIP_YMAX,
318 NV03_PGRAPH_XY_LOGIC_MISC0,
319 NV03_PGRAPH_XY_LOGIC_MISC1,
320 NV03_PGRAPH_XY_LOGIC_MISC2,
321 NV03_PGRAPH_XY_LOGIC_MISC3,
322 NV03_PGRAPH_CLIPX_0,
323 NV03_PGRAPH_CLIPX_1,
324 NV03_PGRAPH_CLIPY_0,
325 NV03_PGRAPH_CLIPY_1,
326 NV10_PGRAPH_COMBINER0_IN_ALPHA,
327 NV10_PGRAPH_COMBINER1_IN_ALPHA,
328 NV10_PGRAPH_COMBINER0_IN_RGB,
329 NV10_PGRAPH_COMBINER1_IN_RGB,
330 NV10_PGRAPH_COMBINER_COLOR0,
331 NV10_PGRAPH_COMBINER_COLOR1,
332 NV10_PGRAPH_COMBINER0_OUT_ALPHA,
333 NV10_PGRAPH_COMBINER1_OUT_ALPHA,
334 NV10_PGRAPH_COMBINER0_OUT_RGB,
335 NV10_PGRAPH_COMBINER1_OUT_RGB,
336 NV10_PGRAPH_COMBINER_FINAL0,
337 NV10_PGRAPH_COMBINER_FINAL1,
338 0x00400e00,
339 0x00400e04,
340 0x00400e08,
341 0x00400e0c,
342 0x00400e10,
343 0x00400e14,
344 0x00400e18,
345 0x00400e1c,
346 0x00400e20,
347 0x00400e24,
348 0x00400e28,
349 0x00400e2c,
350 0x00400e30,
351 0x00400e34,
352 0x00400e38,
353 0x00400e3c,
354 NV04_PGRAPH_PASSTHRU_0,
355 NV04_PGRAPH_PASSTHRU_1,
356 NV04_PGRAPH_PASSTHRU_2,
357 NV10_PGRAPH_DIMX_TEXTURE,
358 NV10_PGRAPH_WDIMX_TEXTURE,
359 NV10_PGRAPH_DVD_COLORFMT,
360 NV10_PGRAPH_SCALED_FORMAT,
361 NV04_PGRAPH_MISC24_0,
362 NV04_PGRAPH_MISC24_1,
363 NV04_PGRAPH_MISC24_2,
364 NV03_PGRAPH_X_MISC,
365 NV03_PGRAPH_Y_MISC,
366 NV04_PGRAPH_VALID1,
367 NV04_PGRAPH_VALID2,
370 static int nv17_graph_ctx_regs[] = {
371 NV10_PGRAPH_DEBUG_4,
372 0x004006b0,
373 0x00400eac,
374 0x00400eb0,
375 0x00400eb4,
376 0x00400eb8,
377 0x00400ebc,
378 0x00400ec0,
379 0x00400ec4,
380 0x00400ec8,
381 0x00400ecc,
382 0x00400ed0,
383 0x00400ed4,
384 0x00400ed8,
385 0x00400edc,
386 0x00400ee0,
387 0x00400a00,
388 0x00400a04,
391 struct graph_state {
392 int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
393 int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
394 struct pipe_state pipe_state;
395 uint32_t lma_window[4];
398 #define PIPE_SAVE(dev, state, addr) \
399 do { \
400 int __i; \
401 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
402 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
403 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
404 } while (0)
406 #define PIPE_RESTORE(dev, state, addr) \
407 do { \
408 int __i; \
409 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
410 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
411 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
412 } while (0)
414 static void nv10_graph_save_pipe(struct nouveau_channel *chan)
416 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
417 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
418 struct drm_device *dev = chan->dev;
420 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
421 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
422 PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
423 PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
424 PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
425 PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
426 PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
427 PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
428 PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
429 PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
432 static void nv10_graph_load_pipe(struct nouveau_channel *chan)
434 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
435 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
436 struct drm_device *dev = chan->dev;
437 uint32_t xfmode0, xfmode1;
438 int i;
440 nouveau_wait_for_idle(dev);
441 /* XXX check haiku comments */
442 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
443 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
444 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
445 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
446 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
447 for (i = 0; i < 4; i++)
448 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
449 for (i = 0; i < 4; i++)
450 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
452 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
453 for (i = 0; i < 3; i++)
454 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
456 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
457 for (i = 0; i < 3; i++)
458 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
460 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
461 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
464 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
465 nouveau_wait_for_idle(dev);
467 /* restore XFMODE */
468 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
469 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
470 PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
471 PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
472 PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
473 PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
474 PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
475 PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
476 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
477 PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
478 PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
479 nouveau_wait_for_idle(dev);
482 static void nv10_graph_create_pipe(struct nouveau_channel *chan)
484 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
485 struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
486 struct drm_device *dev = chan->dev;
487 uint32_t *fifo_pipe_state_addr;
488 int i;
489 #define PIPE_INIT(addr) \
490 do { \
491 fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
492 } while (0)
493 #define PIPE_INIT_END(addr) \
494 do { \
495 uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
496 ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
497 if (fifo_pipe_state_addr != __end_addr) \
498 NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
499 addr, fifo_pipe_state_addr, __end_addr); \
500 } while (0)
501 #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
503 PIPE_INIT(0x0200);
504 for (i = 0; i < 48; i++)
505 NV_WRITE_PIPE_INIT(0x00000000);
506 PIPE_INIT_END(0x0200);
508 PIPE_INIT(0x6400);
509 for (i = 0; i < 211; i++)
510 NV_WRITE_PIPE_INIT(0x00000000);
511 NV_WRITE_PIPE_INIT(0x3f800000);
512 NV_WRITE_PIPE_INIT(0x40000000);
513 NV_WRITE_PIPE_INIT(0x40000000);
514 NV_WRITE_PIPE_INIT(0x40000000);
515 NV_WRITE_PIPE_INIT(0x40000000);
516 NV_WRITE_PIPE_INIT(0x00000000);
517 NV_WRITE_PIPE_INIT(0x00000000);
518 NV_WRITE_PIPE_INIT(0x3f800000);
519 NV_WRITE_PIPE_INIT(0x00000000);
520 NV_WRITE_PIPE_INIT(0x3f000000);
521 NV_WRITE_PIPE_INIT(0x3f000000);
522 NV_WRITE_PIPE_INIT(0x00000000);
523 NV_WRITE_PIPE_INIT(0x00000000);
524 NV_WRITE_PIPE_INIT(0x00000000);
525 NV_WRITE_PIPE_INIT(0x00000000);
526 NV_WRITE_PIPE_INIT(0x3f800000);
527 NV_WRITE_PIPE_INIT(0x00000000);
528 NV_WRITE_PIPE_INIT(0x00000000);
529 NV_WRITE_PIPE_INIT(0x00000000);
530 NV_WRITE_PIPE_INIT(0x00000000);
531 NV_WRITE_PIPE_INIT(0x00000000);
532 NV_WRITE_PIPE_INIT(0x3f800000);
533 NV_WRITE_PIPE_INIT(0x3f800000);
534 NV_WRITE_PIPE_INIT(0x3f800000);
535 NV_WRITE_PIPE_INIT(0x3f800000);
536 PIPE_INIT_END(0x6400);
538 PIPE_INIT(0x6800);
539 for (i = 0; i < 162; i++)
540 NV_WRITE_PIPE_INIT(0x00000000);
541 NV_WRITE_PIPE_INIT(0x3f800000);
542 for (i = 0; i < 25; i++)
543 NV_WRITE_PIPE_INIT(0x00000000);
544 PIPE_INIT_END(0x6800);
546 PIPE_INIT(0x6c00);
547 NV_WRITE_PIPE_INIT(0x00000000);
548 NV_WRITE_PIPE_INIT(0x00000000);
549 NV_WRITE_PIPE_INIT(0x00000000);
550 NV_WRITE_PIPE_INIT(0x00000000);
551 NV_WRITE_PIPE_INIT(0xbf800000);
552 NV_WRITE_PIPE_INIT(0x00000000);
553 NV_WRITE_PIPE_INIT(0x00000000);
554 NV_WRITE_PIPE_INIT(0x00000000);
555 NV_WRITE_PIPE_INIT(0x00000000);
556 NV_WRITE_PIPE_INIT(0x00000000);
557 NV_WRITE_PIPE_INIT(0x00000000);
558 NV_WRITE_PIPE_INIT(0x00000000);
559 PIPE_INIT_END(0x6c00);
561 PIPE_INIT(0x7000);
562 NV_WRITE_PIPE_INIT(0x00000000);
563 NV_WRITE_PIPE_INIT(0x00000000);
564 NV_WRITE_PIPE_INIT(0x00000000);
565 NV_WRITE_PIPE_INIT(0x00000000);
566 NV_WRITE_PIPE_INIT(0x00000000);
567 NV_WRITE_PIPE_INIT(0x00000000);
568 NV_WRITE_PIPE_INIT(0x00000000);
569 NV_WRITE_PIPE_INIT(0x00000000);
570 NV_WRITE_PIPE_INIT(0x00000000);
571 NV_WRITE_PIPE_INIT(0x00000000);
572 NV_WRITE_PIPE_INIT(0x00000000);
573 NV_WRITE_PIPE_INIT(0x00000000);
574 NV_WRITE_PIPE_INIT(0x7149f2ca);
575 NV_WRITE_PIPE_INIT(0x00000000);
576 NV_WRITE_PIPE_INIT(0x00000000);
577 NV_WRITE_PIPE_INIT(0x00000000);
578 NV_WRITE_PIPE_INIT(0x7149f2ca);
579 NV_WRITE_PIPE_INIT(0x00000000);
580 NV_WRITE_PIPE_INIT(0x00000000);
581 NV_WRITE_PIPE_INIT(0x00000000);
582 NV_WRITE_PIPE_INIT(0x7149f2ca);
583 NV_WRITE_PIPE_INIT(0x00000000);
584 NV_WRITE_PIPE_INIT(0x00000000);
585 NV_WRITE_PIPE_INIT(0x00000000);
586 NV_WRITE_PIPE_INIT(0x7149f2ca);
587 NV_WRITE_PIPE_INIT(0x00000000);
588 NV_WRITE_PIPE_INIT(0x00000000);
589 NV_WRITE_PIPE_INIT(0x00000000);
590 NV_WRITE_PIPE_INIT(0x7149f2ca);
591 NV_WRITE_PIPE_INIT(0x00000000);
592 NV_WRITE_PIPE_INIT(0x00000000);
593 NV_WRITE_PIPE_INIT(0x00000000);
594 NV_WRITE_PIPE_INIT(0x7149f2ca);
595 NV_WRITE_PIPE_INIT(0x00000000);
596 NV_WRITE_PIPE_INIT(0x00000000);
597 NV_WRITE_PIPE_INIT(0x00000000);
598 NV_WRITE_PIPE_INIT(0x7149f2ca);
599 NV_WRITE_PIPE_INIT(0x00000000);
600 NV_WRITE_PIPE_INIT(0x00000000);
601 NV_WRITE_PIPE_INIT(0x00000000);
602 NV_WRITE_PIPE_INIT(0x7149f2ca);
603 for (i = 0; i < 35; i++)
604 NV_WRITE_PIPE_INIT(0x00000000);
605 PIPE_INIT_END(0x7000);
607 PIPE_INIT(0x7400);
608 for (i = 0; i < 48; i++)
609 NV_WRITE_PIPE_INIT(0x00000000);
610 PIPE_INIT_END(0x7400);
612 PIPE_INIT(0x7800);
613 for (i = 0; i < 48; i++)
614 NV_WRITE_PIPE_INIT(0x00000000);
615 PIPE_INIT_END(0x7800);
617 PIPE_INIT(0x4400);
618 for (i = 0; i < 32; i++)
619 NV_WRITE_PIPE_INIT(0x00000000);
620 PIPE_INIT_END(0x4400);
622 PIPE_INIT(0x0000);
623 for (i = 0; i < 16; i++)
624 NV_WRITE_PIPE_INIT(0x00000000);
625 PIPE_INIT_END(0x0000);
627 PIPE_INIT(0x0040);
628 for (i = 0; i < 4; i++)
629 NV_WRITE_PIPE_INIT(0x00000000);
630 PIPE_INIT_END(0x0040);
632 #undef PIPE_INIT
633 #undef PIPE_INIT_END
634 #undef NV_WRITE_PIPE_INIT
637 static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
639 int i;
640 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
641 if (nv10_graph_ctx_regs[i] == reg)
642 return i;
644 NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
645 return -1;
648 static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
650 int i;
651 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
652 if (nv17_graph_ctx_regs[i] == reg)
653 return i;
655 NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
656 return -1;
659 static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
660 uint32_t inst)
662 struct drm_device *dev = chan->dev;
663 uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
664 uint32_t ctx_user, ctx_switch[5];
665 int i, subchan = -1;
667 /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
668 * that cannot be restored via MMIO. Do it through the FIFO
669 * instead.
672 /* Look for a celsius object */
673 for (i = 0; i < 8; i++) {
674 int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
676 if (class == 0x56 || class == 0x96 || class == 0x99) {
677 subchan = i;
678 break;
682 if (subchan < 0 || !inst)
683 return;
685 /* Save the current ctx object */
686 ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
687 for (i = 0; i < 5; i++)
688 ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
690 /* Save the FIFO state */
691 st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
692 st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
693 st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
694 fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
696 for (i = 0; i < ARRAY_SIZE(fifo); i++)
697 fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
699 /* Switch to the celsius subchannel */
700 for (i = 0; i < 5; i++)
701 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
702 nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
703 nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
705 /* Inject NV10TCL_DMA_VTXBUF */
706 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
707 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
708 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
709 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
710 nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
711 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
712 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
714 /* Restore the FIFO state */
715 for (i = 0; i < ARRAY_SIZE(fifo); i++)
716 nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
718 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
719 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
720 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
721 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
723 /* Restore the current ctx object */
724 for (i = 0; i < 5; i++)
725 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
726 nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
729 static int
730 nv10_graph_load_context(struct nouveau_channel *chan)
732 struct drm_device *dev = chan->dev;
733 struct drm_nouveau_private *dev_priv = dev->dev_private;
734 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
735 uint32_t tmp;
736 int i;
738 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
739 nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
740 if (dev_priv->chipset >= 0x17) {
741 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
742 nv_wr32(dev, nv17_graph_ctx_regs[i],
743 pgraph_ctx->nv17[i]);
746 nv10_graph_load_pipe(chan);
747 nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
748 & 0xffff));
750 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
751 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
752 nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
753 tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
754 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
755 return 0;
758 static int
759 nv10_graph_unload_context(struct drm_device *dev)
761 struct drm_nouveau_private *dev_priv = dev->dev_private;
762 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
763 struct nouveau_channel *chan;
764 struct graph_state *ctx;
765 uint32_t tmp;
766 int i;
768 chan = nv10_graph_channel(dev);
769 if (!chan)
770 return 0;
771 ctx = chan->engctx[NVOBJ_ENGINE_GR];
773 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
774 ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
776 if (dev_priv->chipset >= 0x17) {
777 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
778 ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
781 nv10_graph_save_pipe(chan);
783 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
784 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
785 tmp |= (pfifo->channels - 1) << 24;
786 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
787 return 0;
790 static void
791 nv10_graph_context_switch(struct drm_device *dev)
793 struct drm_nouveau_private *dev_priv = dev->dev_private;
794 struct nouveau_channel *chan = NULL;
795 int chid;
797 nouveau_wait_for_idle(dev);
799 /* If previous context is valid, we need to save it */
800 nv10_graph_unload_context(dev);
802 /* Load context for next channel */
803 chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
804 chan = dev_priv->channels.ptr[chid];
805 if (chan && chan->engctx[NVOBJ_ENGINE_GR])
806 nv10_graph_load_context(chan);
809 #define NV_WRITE_CTX(reg, val) do { \
810 int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
811 if (offset > 0) \
812 pgraph_ctx->nv10[offset] = val; \
813 } while (0)
815 #define NV17_WRITE_CTX(reg, val) do { \
816 int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
817 if (offset > 0) \
818 pgraph_ctx->nv17[offset] = val; \
819 } while (0)
821 struct nouveau_channel *
822 nv10_graph_channel(struct drm_device *dev)
824 struct drm_nouveau_private *dev_priv = dev->dev_private;
825 int chid = dev_priv->engine.fifo.channels;
827 if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
828 chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
830 if (chid >= dev_priv->engine.fifo.channels)
831 return NULL;
833 return dev_priv->channels.ptr[chid];
836 static int
837 nv10_graph_context_new(struct nouveau_channel *chan, int engine)
839 struct drm_device *dev = chan->dev;
840 struct drm_nouveau_private *dev_priv = dev->dev_private;
841 struct graph_state *pgraph_ctx;
843 NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
845 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
846 if (pgraph_ctx == NULL)
847 return -ENOMEM;
848 chan->engctx[engine] = pgraph_ctx;
850 NV_WRITE_CTX(0x00400e88, 0x08000000);
851 NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
852 NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
853 NV_WRITE_CTX(0x00400e10, 0x00001000);
854 NV_WRITE_CTX(0x00400e14, 0x00001000);
855 NV_WRITE_CTX(0x00400e30, 0x00080008);
856 NV_WRITE_CTX(0x00400e34, 0x00080008);
857 if (dev_priv->chipset >= 0x17) {
858 /* is it really needed ??? */
859 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
860 nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
861 NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
862 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
863 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
864 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
865 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
867 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
869 nv10_graph_create_pipe(chan);
870 return 0;
873 static void
874 nv10_graph_context_del(struct nouveau_channel *chan, int engine)
876 struct drm_device *dev = chan->dev;
877 struct drm_nouveau_private *dev_priv = dev->dev_private;
878 struct graph_state *pgraph_ctx = chan->engctx[engine];
879 unsigned long flags;
881 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
882 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
884 /* Unload the context if it's the currently active one */
885 if (nv10_graph_channel(dev) == chan)
886 nv10_graph_unload_context(dev);
888 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
889 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
891 /* Free the context resources */
892 chan->engctx[engine] = NULL;
893 kfree(pgraph_ctx);
896 static void
897 nv10_graph_set_tile_region(struct drm_device *dev, int i)
899 struct drm_nouveau_private *dev_priv = dev->dev_private;
900 struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
902 nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
903 nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
904 nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
907 static int
908 nv10_graph_init(struct drm_device *dev, int engine)
910 struct drm_nouveau_private *dev_priv = dev->dev_private;
911 u32 tmp;
912 int i;
914 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
915 ~NV_PMC_ENABLE_PGRAPH);
916 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
917 NV_PMC_ENABLE_PGRAPH);
919 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
920 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
922 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
923 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
924 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
925 /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
926 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
927 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
928 (1<<29) |
929 (1<<31));
930 if (dev_priv->chipset >= 0x17) {
931 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
932 nv_wr32(dev, 0x400a10, 0x3ff3fb6);
933 nv_wr32(dev, 0x400838, 0x2f8684);
934 nv_wr32(dev, 0x40083c, 0x115f3f);
935 nv_wr32(dev, 0x004006b0, 0x40000020);
936 } else
937 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
939 /* Turn all the tiling regions off. */
940 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
941 nv10_graph_set_tile_region(dev, i);
943 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
944 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
945 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
946 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
947 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
948 nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
950 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
951 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
952 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
953 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
954 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
956 return 0;
959 static int
960 nv10_graph_fini(struct drm_device *dev, int engine, bool suspend)
962 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
963 if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
964 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
965 return -EBUSY;
967 nv10_graph_unload_context(dev);
968 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
969 return 0;
972 static int
973 nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
974 u32 class, u32 mthd, u32 data)
976 struct graph_state *ctx = chan->engctx[NVOBJ_ENGINE_GR];
977 struct drm_device *dev = chan->dev;
978 struct pipe_state *pipe = &ctx->pipe_state;
979 uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
980 uint32_t xfmode0, xfmode1;
981 int i;
983 ctx->lma_window[(mthd - 0x1638) / 4] = data;
985 if (mthd != 0x1644)
986 return 0;
988 nouveau_wait_for_idle(dev);
990 PIPE_SAVE(dev, pipe_0x0040, 0x0040);
991 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
993 PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
995 nouveau_wait_for_idle(dev);
997 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
998 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
1000 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
1001 PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
1002 PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
1003 PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
1005 nouveau_wait_for_idle(dev);
1007 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
1008 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
1009 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
1010 for (i = 0; i < 4; i++)
1011 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1012 for (i = 0; i < 4; i++)
1013 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1015 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
1016 for (i = 0; i < 3; i++)
1017 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1019 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
1020 for (i = 0; i < 3; i++)
1021 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1023 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
1024 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
1026 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
1028 nouveau_wait_for_idle(dev);
1030 PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
1032 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
1033 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
1035 PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
1036 PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
1037 PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
1038 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
1040 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
1041 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1043 nouveau_wait_for_idle(dev);
1045 return 0;
1048 static int
1049 nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
1050 u32 class, u32 mthd, u32 data)
1052 struct drm_device *dev = chan->dev;
1054 nouveau_wait_for_idle(dev);
1056 nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
1057 nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
1058 nv_wr32(dev, 0x004006b0,
1059 nv_rd32(dev, 0x004006b0) | 0x8 << 24);
1061 return 0;
1064 struct nouveau_bitfield nv10_graph_intr[] = {
1065 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1066 { NV_PGRAPH_INTR_ERROR, "ERROR" },
1070 struct nouveau_bitfield nv10_graph_nstatus[] = {
1071 { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1072 { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1073 { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1074 { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1078 static void
1079 nv10_graph_isr(struct drm_device *dev)
1081 u32 stat;
1083 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1084 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1085 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1086 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1087 u32 chid = (addr & 0x01f00000) >> 20;
1088 u32 subc = (addr & 0x00070000) >> 16;
1089 u32 mthd = (addr & 0x00001ffc);
1090 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1091 u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
1092 u32 show = stat;
1094 if (stat & NV_PGRAPH_INTR_ERROR) {
1095 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1096 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1097 show &= ~NV_PGRAPH_INTR_ERROR;
1101 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1102 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1103 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1104 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1105 nv10_graph_context_switch(dev);
1108 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1109 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1111 if (show && nouveau_ratelimit()) {
1112 NV_INFO(dev, "PGRAPH -");
1113 nouveau_bitfield_print(nv10_graph_intr, show);
1114 printk(" nsource:");
1115 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1116 printk(" nstatus:");
1117 nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
1118 printk("\n");
1119 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1120 "mthd 0x%04x data 0x%08x\n",
1121 chid, subc, class, mthd, data);
1126 static void
1127 nv10_graph_destroy(struct drm_device *dev, int engine)
1129 struct nv10_graph_engine *pgraph = nv_engine(dev, engine);
1131 nouveau_irq_unregister(dev, 12);
1132 kfree(pgraph);
1136 nv10_graph_create(struct drm_device *dev)
1138 struct drm_nouveau_private *dev_priv = dev->dev_private;
1139 struct nv10_graph_engine *pgraph;
1141 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1142 if (!pgraph)
1143 return -ENOMEM;
1145 pgraph->base.destroy = nv10_graph_destroy;
1146 pgraph->base.init = nv10_graph_init;
1147 pgraph->base.fini = nv10_graph_fini;
1148 pgraph->base.context_new = nv10_graph_context_new;
1149 pgraph->base.context_del = nv10_graph_context_del;
1150 pgraph->base.object_new = nv04_graph_object_new;
1151 pgraph->base.set_tile_region = nv10_graph_set_tile_region;
1153 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1154 nouveau_irq_register(dev, 12, nv10_graph_isr);
1156 /* nvsw */
1157 NVOBJ_CLASS(dev, 0x506e, SW);
1158 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1160 NVOBJ_CLASS(dev, 0x0030, GR); /* null */
1161 NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
1162 NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
1163 NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
1164 NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
1165 NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
1166 NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
1167 NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
1168 NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
1169 NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
1170 NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
1171 NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
1172 NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
1173 NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
1174 NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
1175 NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
1176 NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
1178 /* celcius */
1179 if (dev_priv->chipset <= 0x10) {
1180 NVOBJ_CLASS(dev, 0x0056, GR);
1181 } else
1182 if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
1183 NVOBJ_CLASS(dev, 0x0096, GR);
1184 } else {
1185 NVOBJ_CLASS(dev, 0x0099, GR);
1186 NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
1187 NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
1188 NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
1189 NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
1190 NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);
1193 return 0;