2 * Copyright (C) 2007 Ben Skeggs.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
29 #include "nouveau_drv.h"
31 MODULE_FIRMWARE("nouveau/nv50.ctxprog");
32 MODULE_FIRMWARE("nouveau/nv50.ctxvals");
33 MODULE_FIRMWARE("nouveau/nv84.ctxprog");
34 MODULE_FIRMWARE("nouveau/nv84.ctxvals");
35 MODULE_FIRMWARE("nouveau/nv86.ctxprog");
36 MODULE_FIRMWARE("nouveau/nv86.ctxvals");
37 MODULE_FIRMWARE("nouveau/nv92.ctxprog");
38 MODULE_FIRMWARE("nouveau/nv92.ctxvals");
39 MODULE_FIRMWARE("nouveau/nv94.ctxprog");
40 MODULE_FIRMWARE("nouveau/nv94.ctxvals");
41 MODULE_FIRMWARE("nouveau/nv96.ctxprog");
42 MODULE_FIRMWARE("nouveau/nv96.ctxvals");
43 MODULE_FIRMWARE("nouveau/nv98.ctxprog");
44 MODULE_FIRMWARE("nouveau/nv98.ctxvals");
45 MODULE_FIRMWARE("nouveau/nva0.ctxprog");
46 MODULE_FIRMWARE("nouveau/nva0.ctxvals");
47 MODULE_FIRMWARE("nouveau/nva5.ctxprog");
48 MODULE_FIRMWARE("nouveau/nva5.ctxvals");
49 MODULE_FIRMWARE("nouveau/nva8.ctxprog");
50 MODULE_FIRMWARE("nouveau/nva8.ctxvals");
51 MODULE_FIRMWARE("nouveau/nvaa.ctxprog");
52 MODULE_FIRMWARE("nouveau/nvaa.ctxvals");
53 MODULE_FIRMWARE("nouveau/nvac.ctxprog");
54 MODULE_FIRMWARE("nouveau/nvac.ctxvals");
56 #define IS_G80 ((dev_priv->chipset & 0xf0) == 0x50)
59 nv50_graph_init_reset(struct drm_device
*dev
)
61 uint32_t pmc_e
= NV_PMC_ENABLE_PGRAPH
| (1 << 21);
65 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) & ~pmc_e
);
66 nv_wr32(dev
, NV03_PMC_ENABLE
, nv_rd32(dev
, NV03_PMC_ENABLE
) | pmc_e
);
70 nv50_graph_init_intr(struct drm_device
*dev
)
74 nv_wr32(dev
, NV03_PGRAPH_INTR
, 0xffffffff);
75 nv_wr32(dev
, 0x400138, 0xffffffff);
76 nv_wr32(dev
, NV40_PGRAPH_INTR_EN
, 0xffffffff);
80 nv50_graph_init_regs__nv(struct drm_device
*dev
)
84 nv_wr32(dev
, 0x400804, 0xc0000000);
85 nv_wr32(dev
, 0x406800, 0xc0000000);
86 nv_wr32(dev
, 0x400c04, 0xc0000000);
87 nv_wr32(dev
, 0x401800, 0xc0000000);
88 nv_wr32(dev
, 0x405018, 0xc0000000);
89 nv_wr32(dev
, 0x402000, 0xc0000000);
91 nv_wr32(dev
, 0x400108, 0xffffffff);
93 nv_wr32(dev
, 0x400824, 0x00004000);
94 nv_wr32(dev
, 0x400500, 0x00010001);
98 nv50_graph_init_regs(struct drm_device
*dev
)
102 nv_wr32(dev
, NV04_PGRAPH_DEBUG_3
,
103 (1 << 2) /* HW_CONTEXT_SWITCH_ENABLED */);
104 nv_wr32(dev
, 0x402ca8, 0x800);
108 nv50_graph_init_ctxctl(struct drm_device
*dev
)
110 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
114 nouveau_grctx_prog_load(dev
);
115 if (!dev_priv
->engine
.graph
.ctxprog
)
116 dev_priv
->engine
.graph
.accel_blocked
= true;
118 nv_wr32(dev
, 0x400320, 4);
119 nv_wr32(dev
, NV40_PGRAPH_CTXCTL_CUR
, 0);
120 nv_wr32(dev
, NV20_PGRAPH_CHANNEL_CTX_POINTER
, 0);
125 nv50_graph_init(struct drm_device
*dev
)
131 nv50_graph_init_reset(dev
);
132 nv50_graph_init_regs__nv(dev
);
133 nv50_graph_init_regs(dev
);
134 nv50_graph_init_intr(dev
);
136 ret
= nv50_graph_init_ctxctl(dev
);
144 nv50_graph_takedown(struct drm_device
*dev
)
147 nouveau_grctx_fini(dev
);
151 nv50_graph_fifo_access(struct drm_device
*dev
, bool enabled
)
153 const uint32_t mask
= 0x00010001;
156 nv_wr32(dev
, 0x400500, nv_rd32(dev
, 0x400500) | mask
);
158 nv_wr32(dev
, 0x400500, nv_rd32(dev
, 0x400500) & ~mask
);
161 struct nouveau_channel
*
162 nv50_graph_channel(struct drm_device
*dev
)
164 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
168 inst
= nv_rd32(dev
, NV50_PGRAPH_CTXCTL_CUR
);
169 if (!(inst
& NV50_PGRAPH_CTXCTL_CUR_LOADED
))
171 inst
= (inst
& NV50_PGRAPH_CTXCTL_CUR_INSTANCE
) << 12;
173 for (i
= 0; i
< dev_priv
->engine
.fifo
.channels
; i
++) {
174 struct nouveau_channel
*chan
= dev_priv
->fifos
[i
];
176 if (chan
&& chan
->ramin
&& chan
->ramin
->instance
== inst
)
184 nv50_graph_create_context(struct nouveau_channel
*chan
)
186 struct drm_device
*dev
= chan
->dev
;
187 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
188 struct nouveau_gpuobj
*ramin
= chan
->ramin
->gpuobj
;
189 struct nouveau_gpuobj
*ctx
;
190 uint32_t grctx_size
= 0x70000;
193 NV_DEBUG(dev
, "ch%d\n", chan
->id
);
195 ret
= nouveau_gpuobj_new_ref(dev
, chan
, NULL
, 0, grctx_size
, 0x1000,
196 NVOBJ_FLAG_ZERO_ALLOC
|
197 NVOBJ_FLAG_ZERO_FREE
, &chan
->ramin_grctx
);
200 ctx
= chan
->ramin_grctx
->gpuobj
;
202 hdr
= IS_G80
? 0x200 : 0x20;
203 dev_priv
->engine
.instmem
.prepare_access(dev
, true);
204 nv_wo32(dev
, ramin
, (hdr
+ 0x00)/4, 0x00190002);
205 nv_wo32(dev
, ramin
, (hdr
+ 0x04)/4, chan
->ramin_grctx
->instance
+
207 nv_wo32(dev
, ramin
, (hdr
+ 0x08)/4, chan
->ramin_grctx
->instance
);
208 nv_wo32(dev
, ramin
, (hdr
+ 0x0c)/4, 0);
209 nv_wo32(dev
, ramin
, (hdr
+ 0x10)/4, 0);
210 nv_wo32(dev
, ramin
, (hdr
+ 0x14)/4, 0x00010000);
211 dev_priv
->engine
.instmem
.finish_access(dev
);
213 dev_priv
->engine
.instmem
.prepare_access(dev
, true);
214 nouveau_grctx_vals_load(dev
, ctx
);
215 nv_wo32(dev
, ctx
, 0x00000/4, chan
->ramin
->instance
>> 12);
216 if ((dev_priv
->chipset
& 0xf0) == 0xa0)
217 nv_wo32(dev
, ctx
, 0x00004/4, 0x00000000);
219 nv_wo32(dev
, ctx
, 0x0011c/4, 0x00000000);
220 dev_priv
->engine
.instmem
.finish_access(dev
);
226 nv50_graph_destroy_context(struct nouveau_channel
*chan
)
228 struct drm_device
*dev
= chan
->dev
;
229 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
230 int i
, hdr
= IS_G80
? 0x200 : 0x20;
232 NV_DEBUG(dev
, "ch%d\n", chan
->id
);
234 if (!chan
->ramin
|| !chan
->ramin
->gpuobj
)
237 dev_priv
->engine
.instmem
.prepare_access(dev
, true);
238 for (i
= hdr
; i
< hdr
+ 24; i
+= 4)
239 nv_wo32(dev
, chan
->ramin
->gpuobj
, i
/4, 0);
240 dev_priv
->engine
.instmem
.finish_access(dev
);
242 nouveau_gpuobj_ref_del(dev
, &chan
->ramin_grctx
);
246 nv50_graph_do_load_context(struct drm_device
*dev
, uint32_t inst
)
248 uint32_t fifo
= nv_rd32(dev
, 0x400500);
250 nv_wr32(dev
, 0x400500, fifo
& ~1);
251 nv_wr32(dev
, 0x400784, inst
);
252 nv_wr32(dev
, 0x400824, nv_rd32(dev
, 0x400824) | 0x40);
253 nv_wr32(dev
, 0x400320, nv_rd32(dev
, 0x400320) | 0x11);
254 nv_wr32(dev
, 0x400040, 0xffffffff);
255 (void)nv_rd32(dev
, 0x400040);
256 nv_wr32(dev
, 0x400040, 0x00000000);
257 nv_wr32(dev
, 0x400304, nv_rd32(dev
, 0x400304) | 1);
259 if (nouveau_wait_for_idle(dev
))
260 nv_wr32(dev
, 0x40032c, inst
| (1<<31));
261 nv_wr32(dev
, 0x400500, fifo
);
267 nv50_graph_load_context(struct nouveau_channel
*chan
)
269 uint32_t inst
= chan
->ramin
->instance
>> 12;
271 NV_DEBUG(chan
->dev
, "ch%d\n", chan
->id
);
272 return nv50_graph_do_load_context(chan
->dev
, inst
);
276 nv50_graph_unload_context(struct drm_device
*dev
)
278 uint32_t inst
, fifo
= nv_rd32(dev
, 0x400500);
280 inst
= nv_rd32(dev
, NV50_PGRAPH_CTXCTL_CUR
);
281 if (!(inst
& NV50_PGRAPH_CTXCTL_CUR_LOADED
))
283 inst
&= NV50_PGRAPH_CTXCTL_CUR_INSTANCE
;
285 nouveau_wait_for_idle(dev
);
286 nv_wr32(dev
, 0x400500, fifo
& ~1);
287 nv_wr32(dev
, 0x400784, inst
);
288 nv_wr32(dev
, 0x400824, nv_rd32(dev
, 0x400824) | 0x20);
289 nv_wr32(dev
, 0x400304, nv_rd32(dev
, 0x400304) | 0x01);
290 nouveau_wait_for_idle(dev
);
291 nv_wr32(dev
, 0x400500, fifo
);
293 nv_wr32(dev
, NV50_PGRAPH_CTXCTL_CUR
, inst
);
298 nv50_graph_context_switch(struct drm_device
*dev
)
302 nv50_graph_unload_context(dev
);
304 inst
= nv_rd32(dev
, NV50_PGRAPH_CTXCTL_NEXT
);
305 inst
&= NV50_PGRAPH_CTXCTL_NEXT_INSTANCE
;
306 nv50_graph_do_load_context(dev
, inst
);
308 nv_wr32(dev
, NV40_PGRAPH_INTR_EN
, nv_rd32(dev
,
309 NV40_PGRAPH_INTR_EN
) | NV_PGRAPH_INTR_CONTEXT_SWITCH
);
313 nv50_graph_nvsw_dma_vblsem(struct nouveau_channel
*chan
, int grclass
,
314 int mthd
, uint32_t data
)
316 struct nouveau_gpuobj_ref
*ref
= NULL
;
318 if (nouveau_gpuobj_ref_find(chan
, data
, &ref
))
321 if (nouveau_notifier_offset(ref
->gpuobj
, NULL
))
324 chan
->nvsw
.vblsem
= ref
->gpuobj
;
325 chan
->nvsw
.vblsem_offset
= ~0;
330 nv50_graph_nvsw_vblsem_offset(struct nouveau_channel
*chan
, int grclass
,
331 int mthd
, uint32_t data
)
333 if (nouveau_notifier_offset(chan
->nvsw
.vblsem
, &data
))
336 chan
->nvsw
.vblsem_offset
= data
>> 2;
341 nv50_graph_nvsw_vblsem_release_val(struct nouveau_channel
*chan
, int grclass
,
342 int mthd
, uint32_t data
)
344 chan
->nvsw
.vblsem_rval
= data
;
349 nv50_graph_nvsw_vblsem_release(struct nouveau_channel
*chan
, int grclass
,
350 int mthd
, uint32_t data
)
352 struct drm_device
*dev
= chan
->dev
;
353 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
355 if (!chan
->nvsw
.vblsem
|| chan
->nvsw
.vblsem_offset
== ~0 || data
> 1)
358 if (!(nv_rd32(dev
, NV50_PDISPLAY_INTR_EN
) &
359 NV50_PDISPLAY_INTR_EN_VBLANK_CRTC_(data
))) {
360 nv_wr32(dev
, NV50_PDISPLAY_INTR_1
,
361 NV50_PDISPLAY_INTR_1_VBLANK_CRTC_(data
));
362 nv_wr32(dev
, NV50_PDISPLAY_INTR_EN
, nv_rd32(dev
,
363 NV50_PDISPLAY_INTR_EN
) |
364 NV50_PDISPLAY_INTR_EN_VBLANK_CRTC_(data
));
367 list_add(&chan
->nvsw
.vbl_wait
, &dev_priv
->vbl_waiting
);
371 static struct nouveau_pgraph_object_method nv50_graph_nvsw_methods
[] = {
372 { 0x018c, nv50_graph_nvsw_dma_vblsem
},
373 { 0x0400, nv50_graph_nvsw_vblsem_offset
},
374 { 0x0404, nv50_graph_nvsw_vblsem_release_val
},
375 { 0x0408, nv50_graph_nvsw_vblsem_release
},
379 struct nouveau_pgraph_object_class nv50_graph_grclass
[] = {
380 { 0x506e, true, nv50_graph_nvsw_methods
}, /* nvsw */
381 { 0x0030, false, NULL
}, /* null */
382 { 0x5039, false, NULL
}, /* m2mf */
383 { 0x502d, false, NULL
}, /* 2d */
384 { 0x50c0, false, NULL
}, /* compute */
385 { 0x5097, false, NULL
}, /* tesla (nv50) */
386 { 0x8297, false, NULL
}, /* tesla (nv80/nv90) */
387 { 0x8397, false, NULL
}, /* tesla (nva0) */
388 { 0x8597, false, NULL
}, /* tesla (nva8) */