OMAPDSS: VENC: fix NULL pointer dereference in DSS2 VENC sysfs debug attr on OMAP4
[zen-stable.git] / drivers / gpu / drm / nouveau / nvd0_display.c
blobd2ba2f07400bff423673ae9f28a729d7bfb52b4a
1 /*
2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Ben Skeggs
25 #include <linux/dma-mapping.h>
27 #include "drmP.h"
28 #include "drm_crtc_helper.h"
30 #include "nouveau_drv.h"
31 #include "nouveau_connector.h"
32 #include "nouveau_encoder.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_fb.h"
36 #include "nv50_display.h"
38 #define EVO_DMA_NR 9
40 #define EVO_MASTER (0x00)
41 #define EVO_FLIP(c) (0x01 + (c))
42 #define EVO_OVLY(c) (0x05 + (c))
43 #define EVO_OIMM(c) (0x09 + (c))
44 #define EVO_CURS(c) (0x0d + (c))
46 /* offsets in shared sync bo of various structures */
47 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
48 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
49 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
50 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
52 struct evo {
53 int idx;
54 dma_addr_t handle;
55 u32 *ptr;
56 struct {
57 u32 offset;
58 u16 value;
59 } sem;
62 struct nvd0_display {
63 struct nouveau_gpuobj *mem;
64 struct nouveau_bo *sync;
65 struct evo evo[9];
67 struct tasklet_struct tasklet;
68 u32 modeset;
71 static struct nvd0_display *
72 nvd0_display(struct drm_device *dev)
74 struct drm_nouveau_private *dev_priv = dev->dev_private;
75 return dev_priv->engine.display.priv;
78 static struct drm_crtc *
79 nvd0_display_crtc_get(struct drm_encoder *encoder)
81 return nouveau_encoder(encoder)->crtc;
84 /******************************************************************************
85 * EVO channel helpers
86 *****************************************************************************/
87 static inline int
88 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
90 int ret = 0;
91 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
92 nv_wr32(dev, 0x610704 + (id * 0x10), data);
93 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
94 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
95 ret = -EBUSY;
96 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
97 return ret;
100 static u32 *
101 evo_wait(struct drm_device *dev, int id, int nr)
103 struct nvd0_display *disp = nvd0_display(dev);
104 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
106 if (put + nr >= (PAGE_SIZE / 4)) {
107 disp->evo[id].ptr[put] = 0x20000000;
109 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
110 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
111 NV_ERROR(dev, "evo %d dma stalled\n", id);
112 return NULL;
115 put = 0;
118 if (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
119 NV_INFO(dev, "Evo%d: %p START\n", id, disp->evo[id].ptr + put);
121 return disp->evo[id].ptr + put;
124 static void
125 evo_kick(u32 *push, struct drm_device *dev, int id)
127 struct nvd0_display *disp = nvd0_display(dev);
129 if (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO) {
130 u32 curp = nv_rd32(dev, 0x640000 + (id * 0x1000)) >> 2;
131 u32 *cur = disp->evo[id].ptr + curp;
133 while (cur < push)
134 NV_INFO(dev, "Evo%d: 0x%08x\n", id, *cur++);
135 NV_INFO(dev, "Evo%d: %p KICK!\n", id, push);
138 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
141 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
142 #define evo_data(p,d) *((p)++) = (d)
144 static int
145 evo_init_dma(struct drm_device *dev, int ch)
147 struct nvd0_display *disp = nvd0_display(dev);
148 u32 flags;
150 flags = 0x00000000;
151 if (ch == EVO_MASTER)
152 flags |= 0x01000000;
154 nv_wr32(dev, 0x610494 + (ch * 0x0010), (disp->evo[ch].handle >> 8) | 3);
155 nv_wr32(dev, 0x610498 + (ch * 0x0010), 0x00010000);
156 nv_wr32(dev, 0x61049c + (ch * 0x0010), 0x00000001);
157 nv_mask(dev, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
158 nv_wr32(dev, 0x640000 + (ch * 0x1000), 0x00000000);
159 nv_wr32(dev, 0x610490 + (ch * 0x0010), 0x00000013 | flags);
160 if (!nv_wait(dev, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000)) {
161 NV_ERROR(dev, "PDISP: ch%d 0x%08x\n", ch,
162 nv_rd32(dev, 0x610490 + (ch * 0x0010)));
163 return -EBUSY;
166 nv_mask(dev, 0x610090, (1 << ch), (1 << ch));
167 nv_mask(dev, 0x6100a0, (1 << ch), (1 << ch));
168 return 0;
171 static void
172 evo_fini_dma(struct drm_device *dev, int ch)
174 if (!(nv_rd32(dev, 0x610490 + (ch * 0x0010)) & 0x00000010))
175 return;
177 nv_mask(dev, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000000);
178 nv_mask(dev, 0x610490 + (ch * 0x0010), 0x00000003, 0x00000000);
179 nv_wait(dev, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000);
180 nv_mask(dev, 0x610090, (1 << ch), 0x00000000);
181 nv_mask(dev, 0x6100a0, (1 << ch), 0x00000000);
184 static inline void
185 evo_piow(struct drm_device *dev, int ch, u16 mthd, u32 data)
187 nv_wr32(dev, 0x640000 + (ch * 0x1000) + mthd, data);
190 static int
191 evo_init_pio(struct drm_device *dev, int ch)
193 nv_wr32(dev, 0x610490 + (ch * 0x0010), 0x00000001);
194 if (!nv_wait(dev, 0x610490 + (ch * 0x0010), 0x00010000, 0x00010000)) {
195 NV_ERROR(dev, "PDISP: ch%d 0x%08x\n", ch,
196 nv_rd32(dev, 0x610490 + (ch * 0x0010)));
197 return -EBUSY;
200 nv_mask(dev, 0x610090, (1 << ch), (1 << ch));
201 nv_mask(dev, 0x6100a0, (1 << ch), (1 << ch));
202 return 0;
205 static void
206 evo_fini_pio(struct drm_device *dev, int ch)
208 if (!(nv_rd32(dev, 0x610490 + (ch * 0x0010)) & 0x00000001))
209 return;
211 nv_mask(dev, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
212 nv_mask(dev, 0x610490 + (ch * 0x0010), 0x00000001, 0x00000000);
213 nv_wait(dev, 0x610490 + (ch * 0x0010), 0x00010000, 0x00000000);
214 nv_mask(dev, 0x610090, (1 << ch), 0x00000000);
215 nv_mask(dev, 0x6100a0, (1 << ch), 0x00000000);
218 static bool
219 evo_sync_wait(void *data)
221 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
224 static int
225 evo_sync(struct drm_device *dev, int ch)
227 struct nvd0_display *disp = nvd0_display(dev);
228 u32 *push = evo_wait(dev, ch, 8);
229 if (push) {
230 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
231 evo_mthd(push, 0x0084, 1);
232 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
233 evo_mthd(push, 0x0080, 2);
234 evo_data(push, 0x00000000);
235 evo_data(push, 0x00000000);
236 evo_kick(push, dev, ch);
237 if (nv_wait_cb(dev, evo_sync_wait, disp->sync))
238 return 0;
241 return -EBUSY;
244 /******************************************************************************
245 * Page flipping channel
246 *****************************************************************************/
247 struct nouveau_bo *
248 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
250 return nvd0_display(dev)->sync;
253 void
254 nvd0_display_flip_stop(struct drm_crtc *crtc)
256 struct nvd0_display *disp = nvd0_display(crtc->dev);
257 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
258 struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
259 u32 *push;
261 push = evo_wait(crtc->dev, evo->idx, 8);
262 if (push) {
263 evo_mthd(push, 0x0084, 1);
264 evo_data(push, 0x00000000);
265 evo_mthd(push, 0x0094, 1);
266 evo_data(push, 0x00000000);
267 evo_mthd(push, 0x00c0, 1);
268 evo_data(push, 0x00000000);
269 evo_mthd(push, 0x0080, 1);
270 evo_data(push, 0x00000000);
271 evo_kick(push, crtc->dev, evo->idx);
276 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
277 struct nouveau_channel *chan, u32 swap_interval)
279 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
280 struct nvd0_display *disp = nvd0_display(crtc->dev);
281 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
282 struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
283 u64 offset;
284 u32 *push;
285 int ret;
287 swap_interval <<= 4;
288 if (swap_interval == 0)
289 swap_interval |= 0x100;
291 push = evo_wait(crtc->dev, evo->idx, 128);
292 if (unlikely(push == NULL))
293 return -EBUSY;
295 /* synchronise with the rendering channel, if necessary */
296 if (likely(chan)) {
297 ret = RING_SPACE(chan, 10);
298 if (ret)
299 return ret;
301 offset = chan->dispc_vma[nv_crtc->index].offset;
302 offset += evo->sem.offset;
304 BEGIN_NVC0(chan, 2, NvSubM2MF, 0x0010, 4);
305 OUT_RING (chan, upper_32_bits(offset));
306 OUT_RING (chan, lower_32_bits(offset));
307 OUT_RING (chan, 0xf00d0000 | evo->sem.value);
308 OUT_RING (chan, 0x1002);
309 BEGIN_NVC0(chan, 2, NvSubM2MF, 0x0010, 4);
310 OUT_RING (chan, upper_32_bits(offset));
311 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
312 OUT_RING (chan, 0x74b1e000);
313 OUT_RING (chan, 0x1001);
314 FIRE_RING (chan);
315 } else {
316 nouveau_bo_wr32(disp->sync, evo->sem.offset / 4,
317 0xf00d0000 | evo->sem.value);
318 evo_sync(crtc->dev, EVO_MASTER);
321 /* queue the flip */
322 evo_mthd(push, 0x0100, 1);
323 evo_data(push, 0xfffe0000);
324 evo_mthd(push, 0x0084, 1);
325 evo_data(push, swap_interval);
326 if (!(swap_interval & 0x00000100)) {
327 evo_mthd(push, 0x00e0, 1);
328 evo_data(push, 0x40000000);
330 evo_mthd(push, 0x0088, 4);
331 evo_data(push, evo->sem.offset);
332 evo_data(push, 0xf00d0000 | evo->sem.value);
333 evo_data(push, 0x74b1e000);
334 evo_data(push, NvEvoSync);
335 evo_mthd(push, 0x00a0, 2);
336 evo_data(push, 0x00000000);
337 evo_data(push, 0x00000000);
338 evo_mthd(push, 0x00c0, 1);
339 evo_data(push, nv_fb->r_dma);
340 evo_mthd(push, 0x0110, 2);
341 evo_data(push, 0x00000000);
342 evo_data(push, 0x00000000);
343 evo_mthd(push, 0x0400, 5);
344 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
345 evo_data(push, 0);
346 evo_data(push, (fb->height << 16) | fb->width);
347 evo_data(push, nv_fb->r_pitch);
348 evo_data(push, nv_fb->r_format);
349 evo_mthd(push, 0x0080, 1);
350 evo_data(push, 0x00000000);
351 evo_kick(push, crtc->dev, evo->idx);
353 evo->sem.offset ^= 0x10;
354 evo->sem.value++;
355 return 0;
358 /******************************************************************************
359 * CRTC
360 *****************************************************************************/
361 static int
362 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
364 struct drm_device *dev = nv_crtc->base.dev;
365 struct nouveau_connector *nv_connector;
366 struct drm_connector *connector;
367 u32 *push, mode = 0x00;
369 nv_connector = nouveau_crtc_connector_get(nv_crtc);
370 connector = &nv_connector->base;
371 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
372 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
373 mode = DITHERING_MODE_DYNAMIC2X2;
374 } else {
375 mode = nv_connector->dithering_mode;
378 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
379 if (connector->display_info.bpc >= 8)
380 mode |= DITHERING_DEPTH_8BPC;
381 } else {
382 mode |= nv_connector->dithering_depth;
385 push = evo_wait(dev, EVO_MASTER, 4);
386 if (push) {
387 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
388 evo_data(push, mode);
389 if (update) {
390 evo_mthd(push, 0x0080, 1);
391 evo_data(push, 0x00000000);
393 evo_kick(push, dev, EVO_MASTER);
396 return 0;
399 static int
400 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
402 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
403 struct drm_device *dev = nv_crtc->base.dev;
404 struct drm_crtc *crtc = &nv_crtc->base;
405 struct nouveau_connector *nv_connector;
406 int mode = DRM_MODE_SCALE_NONE;
407 u32 oX, oY, *push;
409 /* start off at the resolution we programmed the crtc for, this
410 * effectively handles NONE/FULL scaling
412 nv_connector = nouveau_crtc_connector_get(nv_crtc);
413 if (nv_connector && nv_connector->native_mode)
414 mode = nv_connector->scaling_mode;
416 if (mode != DRM_MODE_SCALE_NONE)
417 omode = nv_connector->native_mode;
418 else
419 omode = umode;
421 oX = omode->hdisplay;
422 oY = omode->vdisplay;
423 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
424 oY *= 2;
426 /* add overscan compensation if necessary, will keep the aspect
427 * ratio the same as the backend mode unless overridden by the
428 * user setting both hborder and vborder properties.
430 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
431 (nv_connector->underscan == UNDERSCAN_AUTO &&
432 nv_connector->edid &&
433 drm_detect_hdmi_monitor(nv_connector->edid)))) {
434 u32 bX = nv_connector->underscan_hborder;
435 u32 bY = nv_connector->underscan_vborder;
436 u32 aspect = (oY << 19) / oX;
438 if (bX) {
439 oX -= (bX * 2);
440 if (bY) oY -= (bY * 2);
441 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
442 } else {
443 oX -= (oX >> 4) + 32;
444 if (bY) oY -= (bY * 2);
445 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
449 /* handle CENTER/ASPECT scaling, taking into account the areas
450 * removed already for overscan compensation
452 switch (mode) {
453 case DRM_MODE_SCALE_CENTER:
454 oX = min((u32)umode->hdisplay, oX);
455 oY = min((u32)umode->vdisplay, oY);
456 /* fall-through */
457 case DRM_MODE_SCALE_ASPECT:
458 if (oY < oX) {
459 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
460 oX = ((oY * aspect) + (aspect / 2)) >> 19;
461 } else {
462 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
463 oY = ((oX * aspect) + (aspect / 2)) >> 19;
465 break;
466 default:
467 break;
470 push = evo_wait(dev, EVO_MASTER, 8);
471 if (push) {
472 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
473 evo_data(push, (oY << 16) | oX);
474 evo_data(push, (oY << 16) | oX);
475 evo_data(push, (oY << 16) | oX);
476 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
477 evo_data(push, 0x00000000);
478 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
479 evo_data(push, (umode->vdisplay << 16) | umode->hdisplay);
480 evo_kick(push, dev, EVO_MASTER);
481 if (update) {
482 nvd0_display_flip_stop(crtc);
483 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
487 return 0;
490 static int
491 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
492 int x, int y, bool update)
494 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
495 u32 *push;
497 push = evo_wait(fb->dev, EVO_MASTER, 16);
498 if (push) {
499 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
500 evo_data(push, nvfb->nvbo->bo.offset >> 8);
501 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
502 evo_data(push, (fb->height << 16) | fb->width);
503 evo_data(push, nvfb->r_pitch);
504 evo_data(push, nvfb->r_format);
505 evo_data(push, nvfb->r_dma);
506 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
507 evo_data(push, (y << 16) | x);
508 if (update) {
509 evo_mthd(push, 0x0080, 1);
510 evo_data(push, 0x00000000);
512 evo_kick(push, fb->dev, EVO_MASTER);
515 nv_crtc->fb.tile_flags = nvfb->r_dma;
516 return 0;
519 static void
520 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
522 struct drm_device *dev = nv_crtc->base.dev;
523 u32 *push = evo_wait(dev, EVO_MASTER, 16);
524 if (push) {
525 if (show) {
526 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
527 evo_data(push, 0x85000000);
528 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
529 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
530 evo_data(push, NvEvoVRAM);
531 } else {
532 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
533 evo_data(push, 0x05000000);
534 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
535 evo_data(push, 0x00000000);
538 if (update) {
539 evo_mthd(push, 0x0080, 1);
540 evo_data(push, 0x00000000);
543 evo_kick(push, dev, EVO_MASTER);
547 static void
548 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
552 static void
553 nvd0_crtc_prepare(struct drm_crtc *crtc)
555 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
556 u32 *push;
558 nvd0_display_flip_stop(crtc);
560 push = evo_wait(crtc->dev, EVO_MASTER, 2);
561 if (push) {
562 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
563 evo_data(push, 0x00000000);
564 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
565 evo_data(push, 0x03000000);
566 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
567 evo_data(push, 0x00000000);
568 evo_kick(push, crtc->dev, EVO_MASTER);
571 nvd0_crtc_cursor_show(nv_crtc, false, false);
574 static void
575 nvd0_crtc_commit(struct drm_crtc *crtc)
577 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
578 u32 *push;
580 push = evo_wait(crtc->dev, EVO_MASTER, 32);
581 if (push) {
582 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
583 evo_data(push, nv_crtc->fb.tile_flags);
584 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
585 evo_data(push, 0x83000000);
586 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
587 evo_data(push, 0x00000000);
588 evo_data(push, 0x00000000);
589 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
590 evo_data(push, NvEvoVRAM);
591 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
592 evo_data(push, 0xffffff00);
593 evo_kick(push, crtc->dev, EVO_MASTER);
596 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, false);
597 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
600 static bool
601 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
602 struct drm_display_mode *adjusted_mode)
604 return true;
607 static int
608 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
610 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
611 int ret;
613 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
614 if (ret)
615 return ret;
617 if (old_fb) {
618 nvfb = nouveau_framebuffer(old_fb);
619 nouveau_bo_unpin(nvfb->nvbo);
622 return 0;
625 static int
626 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
627 struct drm_display_mode *mode, int x, int y,
628 struct drm_framebuffer *old_fb)
630 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
631 struct nouveau_connector *nv_connector;
632 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
633 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
634 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
635 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
636 u32 vblan2e = 0, vblan2s = 1;
637 u32 magic = 0x31ec6000;
638 u32 syncs, *push;
639 int ret;
641 hactive = mode->htotal;
642 hsynce = mode->hsync_end - mode->hsync_start - 1;
643 hbackp = mode->htotal - mode->hsync_end;
644 hblanke = hsynce + hbackp;
645 hfrontp = mode->hsync_start - mode->hdisplay;
646 hblanks = mode->htotal - hfrontp - 1;
648 vactive = mode->vtotal * vscan / ilace;
649 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
650 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
651 vblanke = vsynce + vbackp;
652 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
653 vblanks = vactive - vfrontp - 1;
654 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
655 vblan2e = vactive + vsynce + vbackp;
656 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
657 vactive = (vactive * 2) + 1;
658 magic |= 0x00000001;
661 syncs = 0x00000001;
662 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
663 syncs |= 0x00000008;
664 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
665 syncs |= 0x00000010;
667 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
668 if (ret)
669 return ret;
671 push = evo_wait(crtc->dev, EVO_MASTER, 64);
672 if (push) {
673 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
674 evo_data(push, 0x00000000);
675 evo_data(push, (vactive << 16) | hactive);
676 evo_data(push, ( vsynce << 16) | hsynce);
677 evo_data(push, (vblanke << 16) | hblanke);
678 evo_data(push, (vblanks << 16) | hblanks);
679 evo_data(push, (vblan2e << 16) | vblan2s);
680 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
681 evo_data(push, 0x00000000); /* ??? */
682 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
683 evo_data(push, mode->clock * 1000);
684 evo_data(push, 0x00200000); /* ??? */
685 evo_data(push, mode->clock * 1000);
686 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
687 evo_data(push, syncs);
688 evo_data(push, magic);
689 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
690 evo_data(push, 0x00000311);
691 evo_data(push, 0x00000100);
692 evo_kick(push, crtc->dev, EVO_MASTER);
695 nv_connector = nouveau_crtc_connector_get(nv_crtc);
696 nvd0_crtc_set_dither(nv_crtc, false);
697 nvd0_crtc_set_scale(nv_crtc, false);
698 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
699 return 0;
702 static int
703 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
704 struct drm_framebuffer *old_fb)
706 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
707 int ret;
709 if (!crtc->fb) {
710 NV_DEBUG_KMS(crtc->dev, "No FB bound\n");
711 return 0;
714 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
715 if (ret)
716 return ret;
718 nvd0_display_flip_stop(crtc);
719 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
720 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
721 return 0;
724 static int
725 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
726 struct drm_framebuffer *fb, int x, int y,
727 enum mode_set_atomic state)
729 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
730 nvd0_display_flip_stop(crtc);
731 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
732 return 0;
735 static void
736 nvd0_crtc_lut_load(struct drm_crtc *crtc)
738 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
739 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
740 int i;
742 for (i = 0; i < 256; i++) {
743 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
744 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
745 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
749 static int
750 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
751 uint32_t handle, uint32_t width, uint32_t height)
753 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
754 struct drm_device *dev = crtc->dev;
755 struct drm_gem_object *gem;
756 struct nouveau_bo *nvbo;
757 bool visible = (handle != 0);
758 int i, ret = 0;
760 if (visible) {
761 if (width != 64 || height != 64)
762 return -EINVAL;
764 gem = drm_gem_object_lookup(dev, file_priv, handle);
765 if (unlikely(!gem))
766 return -ENOENT;
767 nvbo = nouveau_gem_object(gem);
769 ret = nouveau_bo_map(nvbo);
770 if (ret == 0) {
771 for (i = 0; i < 64 * 64; i++) {
772 u32 v = nouveau_bo_rd32(nvbo, i);
773 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
775 nouveau_bo_unmap(nvbo);
778 drm_gem_object_unreference_unlocked(gem);
781 if (visible != nv_crtc->cursor.visible) {
782 nvd0_crtc_cursor_show(nv_crtc, visible, true);
783 nv_crtc->cursor.visible = visible;
786 return ret;
789 static int
790 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
792 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
793 int ch = EVO_CURS(nv_crtc->index);
795 evo_piow(crtc->dev, ch, 0x0084, (y << 16) | x);
796 evo_piow(crtc->dev, ch, 0x0080, 0x00000000);
797 return 0;
800 static void
801 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
802 uint32_t start, uint32_t size)
804 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
805 u32 end = max(start + size, (u32)256);
806 u32 i;
808 for (i = start; i < end; i++) {
809 nv_crtc->lut.r[i] = r[i];
810 nv_crtc->lut.g[i] = g[i];
811 nv_crtc->lut.b[i] = b[i];
814 nvd0_crtc_lut_load(crtc);
817 static void
818 nvd0_crtc_destroy(struct drm_crtc *crtc)
820 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
821 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
822 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
823 nouveau_bo_unmap(nv_crtc->lut.nvbo);
824 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
825 drm_crtc_cleanup(crtc);
826 kfree(crtc);
829 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
830 .dpms = nvd0_crtc_dpms,
831 .prepare = nvd0_crtc_prepare,
832 .commit = nvd0_crtc_commit,
833 .mode_fixup = nvd0_crtc_mode_fixup,
834 .mode_set = nvd0_crtc_mode_set,
835 .mode_set_base = nvd0_crtc_mode_set_base,
836 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
837 .load_lut = nvd0_crtc_lut_load,
840 static const struct drm_crtc_funcs nvd0_crtc_func = {
841 .cursor_set = nvd0_crtc_cursor_set,
842 .cursor_move = nvd0_crtc_cursor_move,
843 .gamma_set = nvd0_crtc_gamma_set,
844 .set_config = drm_crtc_helper_set_config,
845 .destroy = nvd0_crtc_destroy,
846 .page_flip = nouveau_crtc_page_flip,
849 static void
850 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
854 static void
855 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
859 static int
860 nvd0_crtc_create(struct drm_device *dev, int index)
862 struct nouveau_crtc *nv_crtc;
863 struct drm_crtc *crtc;
864 int ret, i;
866 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
867 if (!nv_crtc)
868 return -ENOMEM;
870 nv_crtc->index = index;
871 nv_crtc->set_dither = nvd0_crtc_set_dither;
872 nv_crtc->set_scale = nvd0_crtc_set_scale;
873 nv_crtc->cursor.set_offset = nvd0_cursor_set_offset;
874 nv_crtc->cursor.set_pos = nvd0_cursor_set_pos;
875 for (i = 0; i < 256; i++) {
876 nv_crtc->lut.r[i] = i << 8;
877 nv_crtc->lut.g[i] = i << 8;
878 nv_crtc->lut.b[i] = i << 8;
881 crtc = &nv_crtc->base;
882 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
883 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
884 drm_mode_crtc_set_gamma_size(crtc, 256);
886 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
887 0, 0x0000, &nv_crtc->cursor.nvbo);
888 if (!ret) {
889 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
890 if (!ret)
891 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
892 if (ret)
893 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
896 if (ret)
897 goto out;
899 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
900 0, 0x0000, &nv_crtc->lut.nvbo);
901 if (!ret) {
902 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
903 if (!ret)
904 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
905 if (ret)
906 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
909 if (ret)
910 goto out;
912 nvd0_crtc_lut_load(crtc);
914 out:
915 if (ret)
916 nvd0_crtc_destroy(crtc);
917 return ret;
920 /******************************************************************************
921 * DAC
922 *****************************************************************************/
923 static void
924 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
926 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
927 struct drm_device *dev = encoder->dev;
928 int or = nv_encoder->or;
929 u32 dpms_ctrl;
931 dpms_ctrl = 0x80000000;
932 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
933 dpms_ctrl |= 0x00000001;
934 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
935 dpms_ctrl |= 0x00000004;
937 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
938 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
939 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
942 static bool
943 nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
944 struct drm_display_mode *adjusted_mode)
946 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
947 struct nouveau_connector *nv_connector;
949 nv_connector = nouveau_encoder_connector_get(nv_encoder);
950 if (nv_connector && nv_connector->native_mode) {
951 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
952 int id = adjusted_mode->base.id;
953 *adjusted_mode = *nv_connector->native_mode;
954 adjusted_mode->base.id = id;
958 return true;
961 static void
962 nvd0_dac_prepare(struct drm_encoder *encoder)
966 static void
967 nvd0_dac_commit(struct drm_encoder *encoder)
971 static void
972 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
973 struct drm_display_mode *adjusted_mode)
975 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
976 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
977 u32 *push;
979 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
981 push = evo_wait(encoder->dev, EVO_MASTER, 4);
982 if (push) {
983 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 2);
984 evo_data(push, 1 << nv_crtc->index);
985 evo_data(push, 0x00ff);
986 evo_kick(push, encoder->dev, EVO_MASTER);
989 nv_encoder->crtc = encoder->crtc;
992 static void
993 nvd0_dac_disconnect(struct drm_encoder *encoder)
995 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
996 struct drm_device *dev = encoder->dev;
997 u32 *push;
999 if (nv_encoder->crtc) {
1000 nvd0_crtc_prepare(nv_encoder->crtc);
1002 push = evo_wait(dev, EVO_MASTER, 4);
1003 if (push) {
1004 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
1005 evo_data(push, 0x00000000);
1006 evo_mthd(push, 0x0080, 1);
1007 evo_data(push, 0x00000000);
1008 evo_kick(push, dev, EVO_MASTER);
1011 nv_encoder->crtc = NULL;
1015 static enum drm_connector_status
1016 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1018 enum drm_connector_status status = connector_status_disconnected;
1019 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1020 struct drm_device *dev = encoder->dev;
1021 int or = nv_encoder->or;
1022 u32 load;
1024 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00100000);
1025 udelay(9500);
1026 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x80000000);
1028 load = nv_rd32(dev, 0x61a00c + (or * 0x800));
1029 if ((load & 0x38000000) == 0x38000000)
1030 status = connector_status_connected;
1032 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00000000);
1033 return status;
1036 static void
1037 nvd0_dac_destroy(struct drm_encoder *encoder)
1039 drm_encoder_cleanup(encoder);
1040 kfree(encoder);
1043 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1044 .dpms = nvd0_dac_dpms,
1045 .mode_fixup = nvd0_dac_mode_fixup,
1046 .prepare = nvd0_dac_prepare,
1047 .commit = nvd0_dac_commit,
1048 .mode_set = nvd0_dac_mode_set,
1049 .disable = nvd0_dac_disconnect,
1050 .get_crtc = nvd0_display_crtc_get,
1051 .detect = nvd0_dac_detect
1054 static const struct drm_encoder_funcs nvd0_dac_func = {
1055 .destroy = nvd0_dac_destroy,
1058 static int
1059 nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
1061 struct drm_device *dev = connector->dev;
1062 struct nouveau_encoder *nv_encoder;
1063 struct drm_encoder *encoder;
1065 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1066 if (!nv_encoder)
1067 return -ENOMEM;
1068 nv_encoder->dcb = dcbe;
1069 nv_encoder->or = ffs(dcbe->or) - 1;
1071 encoder = to_drm_encoder(nv_encoder);
1072 encoder->possible_crtcs = dcbe->heads;
1073 encoder->possible_clones = 0;
1074 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1075 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1077 drm_mode_connector_attach_encoder(connector, encoder);
1078 return 0;
1081 /******************************************************************************
1082 * Audio
1083 *****************************************************************************/
1084 static void
1085 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1087 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1088 struct nouveau_connector *nv_connector;
1089 struct drm_device *dev = encoder->dev;
1090 int i, or = nv_encoder->or * 0x30;
1092 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1093 if (!drm_detect_monitor_audio(nv_connector->edid))
1094 return;
1096 nv_mask(dev, 0x10ec10 + or, 0x80000003, 0x80000001);
1098 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1099 if (nv_connector->base.eld[0]) {
1100 u8 *eld = nv_connector->base.eld;
1102 for (i = 0; i < eld[2] * 4; i++)
1103 nv_wr32(dev, 0x10ec00 + or, (i << 8) | eld[i]);
1104 for (i = eld[2] * 4; i < 0x60; i++)
1105 nv_wr32(dev, 0x10ec00 + or, (i << 8) | 0x00);
1107 nv_mask(dev, 0x10ec10 + or, 0x80000002, 0x80000002);
1111 static void
1112 nvd0_audio_disconnect(struct drm_encoder *encoder)
1114 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1115 struct drm_device *dev = encoder->dev;
1116 int or = nv_encoder->or * 0x30;
1118 nv_mask(dev, 0x10ec10 + or, 0x80000003, 0x80000000);
1121 /******************************************************************************
1122 * HDMI
1123 *****************************************************************************/
1124 static void
1125 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1127 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1128 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1129 struct nouveau_connector *nv_connector;
1130 struct drm_device *dev = encoder->dev;
1131 int head = nv_crtc->index * 0x800;
1132 u32 rekey = 56; /* binary driver, and tegra constant */
1133 u32 max_ac_packet;
1135 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1136 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1137 return;
1139 max_ac_packet = mode->htotal - mode->hdisplay;
1140 max_ac_packet -= rekey;
1141 max_ac_packet -= 18; /* constant from tegra */
1142 max_ac_packet /= 32;
1144 /* AVI InfoFrame */
1145 nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000000);
1146 nv_wr32(dev, 0x61671c + head, 0x000d0282);
1147 nv_wr32(dev, 0x616720 + head, 0x0000006f);
1148 nv_wr32(dev, 0x616724 + head, 0x00000000);
1149 nv_wr32(dev, 0x616728 + head, 0x00000000);
1150 nv_wr32(dev, 0x61672c + head, 0x00000000);
1151 nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000001);
1153 /* ??? InfoFrame? */
1154 nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000000);
1155 nv_wr32(dev, 0x6167ac + head, 0x00000010);
1156 nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000001);
1158 /* HDMI_CTRL */
1159 nv_mask(dev, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1160 max_ac_packet << 16);
1162 /* NFI, audio doesn't work without it though.. */
1163 nv_mask(dev, 0x616548 + head, 0x00000070, 0x00000000);
1165 nvd0_audio_mode_set(encoder, mode);
1168 static void
1169 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1171 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1172 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1173 struct drm_device *dev = encoder->dev;
1174 int head = nv_crtc->index * 0x800;
1176 nvd0_audio_disconnect(encoder);
1178 nv_mask(dev, 0x616798 + head, 0x40000000, 0x00000000);
1179 nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000000);
1180 nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000000);
1183 /******************************************************************************
1184 * SOR
1185 *****************************************************************************/
1186 static void
1187 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1189 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1190 struct drm_device *dev = encoder->dev;
1191 struct drm_encoder *partner;
1192 int or = nv_encoder->or;
1193 u32 dpms_ctrl;
1195 nv_encoder->last_dpms = mode;
1197 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1198 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1200 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1201 continue;
1203 if (nv_partner != nv_encoder &&
1204 nv_partner->dcb->or == nv_encoder->dcb->or) {
1205 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1206 return;
1207 break;
1211 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
1212 dpms_ctrl |= 0x80000000;
1214 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1215 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
1216 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1217 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
1220 static bool
1221 nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
1222 struct drm_display_mode *adjusted_mode)
1224 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1225 struct nouveau_connector *nv_connector;
1227 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1228 if (nv_connector && nv_connector->native_mode) {
1229 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1230 int id = adjusted_mode->base.id;
1231 *adjusted_mode = *nv_connector->native_mode;
1232 adjusted_mode->base.id = id;
1236 return true;
1239 static void
1240 nvd0_sor_prepare(struct drm_encoder *encoder)
1244 static void
1245 nvd0_sor_commit(struct drm_encoder *encoder)
1249 static void
1250 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1251 struct drm_display_mode *mode)
1253 struct drm_device *dev = encoder->dev;
1254 struct drm_nouveau_private *dev_priv = dev->dev_private;
1255 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1256 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1257 struct nouveau_connector *nv_connector;
1258 struct nvbios *bios = &dev_priv->vbios;
1259 u32 mode_ctrl = (1 << nv_crtc->index);
1260 u32 *push, or_config;
1262 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1263 switch (nv_encoder->dcb->type) {
1264 case OUTPUT_TMDS:
1265 if (nv_encoder->dcb->sorconf.link & 1) {
1266 if (mode->clock < 165000)
1267 mode_ctrl |= 0x00000100;
1268 else
1269 mode_ctrl |= 0x00000500;
1270 } else {
1271 mode_ctrl |= 0x00000200;
1274 or_config = (mode_ctrl & 0x00000f00) >> 8;
1275 if (mode->clock >= 165000)
1276 or_config |= 0x0100;
1278 nvd0_hdmi_mode_set(encoder, mode);
1279 break;
1280 case OUTPUT_LVDS:
1281 or_config = (mode_ctrl & 0x00000f00) >> 8;
1282 if (bios->fp_no_ddc) {
1283 if (bios->fp.dual_link)
1284 or_config |= 0x0100;
1285 if (bios->fp.if_is_24bit)
1286 or_config |= 0x0200;
1287 } else {
1288 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1289 if (((u8 *)nv_connector->edid)[121] == 2)
1290 or_config |= 0x0100;
1291 } else
1292 if (mode->clock >= bios->fp.duallink_transition_clk) {
1293 or_config |= 0x0100;
1296 if (or_config & 0x0100) {
1297 if (bios->fp.strapless_is_24bit & 2)
1298 or_config |= 0x0200;
1299 } else {
1300 if (bios->fp.strapless_is_24bit & 1)
1301 or_config |= 0x0200;
1304 if (nv_connector->base.display_info.bpc == 8)
1305 or_config |= 0x0200;
1308 break;
1309 default:
1310 BUG_ON(1);
1311 break;
1314 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1316 push = evo_wait(dev, EVO_MASTER, 4);
1317 if (push) {
1318 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 2);
1319 evo_data(push, mode_ctrl);
1320 evo_data(push, or_config);
1321 evo_kick(push, dev, EVO_MASTER);
1324 nv_encoder->crtc = encoder->crtc;
1327 static void
1328 nvd0_sor_disconnect(struct drm_encoder *encoder)
1330 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1331 struct drm_device *dev = encoder->dev;
1332 u32 *push;
1334 if (nv_encoder->crtc) {
1335 nvd0_crtc_prepare(nv_encoder->crtc);
1337 push = evo_wait(dev, EVO_MASTER, 4);
1338 if (push) {
1339 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1340 evo_data(push, 0x00000000);
1341 evo_mthd(push, 0x0080, 1);
1342 evo_data(push, 0x00000000);
1343 evo_kick(push, dev, EVO_MASTER);
1346 nvd0_hdmi_disconnect(encoder);
1348 nv_encoder->crtc = NULL;
1349 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1353 static void
1354 nvd0_sor_destroy(struct drm_encoder *encoder)
1356 drm_encoder_cleanup(encoder);
1357 kfree(encoder);
1360 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1361 .dpms = nvd0_sor_dpms,
1362 .mode_fixup = nvd0_sor_mode_fixup,
1363 .prepare = nvd0_sor_prepare,
1364 .commit = nvd0_sor_commit,
1365 .mode_set = nvd0_sor_mode_set,
1366 .disable = nvd0_sor_disconnect,
1367 .get_crtc = nvd0_display_crtc_get,
1370 static const struct drm_encoder_funcs nvd0_sor_func = {
1371 .destroy = nvd0_sor_destroy,
1374 static int
1375 nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
1377 struct drm_device *dev = connector->dev;
1378 struct nouveau_encoder *nv_encoder;
1379 struct drm_encoder *encoder;
1381 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1382 if (!nv_encoder)
1383 return -ENOMEM;
1384 nv_encoder->dcb = dcbe;
1385 nv_encoder->or = ffs(dcbe->or) - 1;
1386 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1388 encoder = to_drm_encoder(nv_encoder);
1389 encoder->possible_crtcs = dcbe->heads;
1390 encoder->possible_clones = 0;
1391 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1392 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1394 drm_mode_connector_attach_encoder(connector, encoder);
1395 return 0;
1398 /******************************************************************************
1399 * IRQ
1400 *****************************************************************************/
1401 static struct dcb_entry *
1402 lookup_dcb(struct drm_device *dev, int id, u32 mc)
1404 struct drm_nouveau_private *dev_priv = dev->dev_private;
1405 int type, or, i;
1407 if (id < 4) {
1408 type = OUTPUT_ANALOG;
1409 or = id;
1410 } else {
1411 switch (mc & 0x00000f00) {
1412 case 0x00000000: type = OUTPUT_LVDS; break;
1413 case 0x00000100: type = OUTPUT_TMDS; break;
1414 case 0x00000200: type = OUTPUT_TMDS; break;
1415 case 0x00000500: type = OUTPUT_TMDS; break;
1416 default:
1417 NV_ERROR(dev, "PDISP: unknown SOR mc 0x%08x\n", mc);
1418 return NULL;
1421 or = id - 4;
1424 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
1425 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
1426 if (dcb->type == type && (dcb->or & (1 << or)))
1427 return dcb;
1430 NV_ERROR(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
1431 return NULL;
1434 static void
1435 nvd0_display_unk1_handler(struct drm_device *dev, u32 crtc, u32 mask)
1437 struct dcb_entry *dcb;
1438 int i;
1440 for (i = 0; mask && i < 8; i++) {
1441 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
1442 if (!(mcc & (1 << crtc)))
1443 continue;
1445 dcb = lookup_dcb(dev, i, mcc);
1446 if (!dcb)
1447 continue;
1449 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
1452 nv_wr32(dev, 0x6101d4, 0x00000000);
1453 nv_wr32(dev, 0x6109d4, 0x00000000);
1454 nv_wr32(dev, 0x6101d0, 0x80000000);
1457 static void
1458 nvd0_display_unk2_handler(struct drm_device *dev, u32 crtc, u32 mask)
1460 struct dcb_entry *dcb;
1461 u32 or, tmp, pclk;
1462 int i;
1464 for (i = 0; mask && i < 8; i++) {
1465 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
1466 if (!(mcc & (1 << crtc)))
1467 continue;
1469 dcb = lookup_dcb(dev, i, mcc);
1470 if (!dcb)
1471 continue;
1473 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
1476 pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
1477 if (mask & 0x00010000) {
1478 nv50_crtc_set_clock(dev, crtc, pclk);
1481 for (i = 0; mask && i < 8; i++) {
1482 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
1483 u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
1484 if (!(mcp & (1 << crtc)))
1485 continue;
1487 dcb = lookup_dcb(dev, i, mcp);
1488 if (!dcb)
1489 continue;
1490 or = ffs(dcb->or) - 1;
1492 nouveau_bios_run_display_table(dev, cfg, pclk, dcb, crtc);
1494 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
1495 switch (dcb->type) {
1496 case OUTPUT_ANALOG:
1497 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
1498 break;
1499 case OUTPUT_TMDS:
1500 case OUTPUT_LVDS:
1501 if (cfg & 0x00000100)
1502 tmp = 0x00000101;
1503 else
1504 tmp = 0x00000000;
1506 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
1507 break;
1508 default:
1509 break;
1512 break;
1515 nv_wr32(dev, 0x6101d4, 0x00000000);
1516 nv_wr32(dev, 0x6109d4, 0x00000000);
1517 nv_wr32(dev, 0x6101d0, 0x80000000);
1520 static void
1521 nvd0_display_unk4_handler(struct drm_device *dev, u32 crtc, u32 mask)
1523 struct dcb_entry *dcb;
1524 int pclk, i;
1526 pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
1528 for (i = 0; mask && i < 8; i++) {
1529 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
1530 u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
1531 if (!(mcp & (1 << crtc)))
1532 continue;
1534 dcb = lookup_dcb(dev, i, mcp);
1535 if (!dcb)
1536 continue;
1538 nouveau_bios_run_display_table(dev, cfg, -pclk, dcb, crtc);
1541 nv_wr32(dev, 0x6101d4, 0x00000000);
1542 nv_wr32(dev, 0x6109d4, 0x00000000);
1543 nv_wr32(dev, 0x6101d0, 0x80000000);
1546 static void
1547 nvd0_display_bh(unsigned long data)
1549 struct drm_device *dev = (struct drm_device *)data;
1550 struct nvd0_display *disp = nvd0_display(dev);
1551 u32 mask, crtc;
1552 int i;
1554 if (drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
1555 NV_INFO(dev, "PDISP: modeset req %d\n", disp->modeset);
1556 NV_INFO(dev, " STAT: 0x%08x 0x%08x 0x%08x\n",
1557 nv_rd32(dev, 0x6101d0),
1558 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
1559 for (i = 0; i < 8; i++) {
1560 NV_INFO(dev, " %s%d: 0x%08x 0x%08x\n",
1561 i < 4 ? "DAC" : "SOR", i,
1562 nv_rd32(dev, 0x640180 + (i * 0x20)),
1563 nv_rd32(dev, 0x660180 + (i * 0x20)));
1567 mask = nv_rd32(dev, 0x6101d4);
1568 crtc = 0;
1569 if (!mask) {
1570 mask = nv_rd32(dev, 0x6109d4);
1571 crtc = 1;
1574 if (disp->modeset & 0x00000001)
1575 nvd0_display_unk1_handler(dev, crtc, mask);
1576 if (disp->modeset & 0x00000002)
1577 nvd0_display_unk2_handler(dev, crtc, mask);
1578 if (disp->modeset & 0x00000004)
1579 nvd0_display_unk4_handler(dev, crtc, mask);
1582 static void
1583 nvd0_display_intr(struct drm_device *dev)
1585 struct nvd0_display *disp = nvd0_display(dev);
1586 u32 intr = nv_rd32(dev, 0x610088);
1588 if (intr & 0x00000001) {
1589 u32 stat = nv_rd32(dev, 0x61008c);
1590 nv_wr32(dev, 0x61008c, stat);
1591 intr &= ~0x00000001;
1594 if (intr & 0x00000002) {
1595 u32 stat = nv_rd32(dev, 0x61009c);
1596 int chid = ffs(stat) - 1;
1597 if (chid >= 0) {
1598 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1599 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1600 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1602 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1603 "0x%08x 0x%08x\n",
1604 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1605 nv_wr32(dev, 0x61009c, (1 << chid));
1606 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1609 intr &= ~0x00000002;
1612 if (intr & 0x00100000) {
1613 u32 stat = nv_rd32(dev, 0x6100ac);
1615 if (stat & 0x00000007) {
1616 disp->modeset = stat;
1617 tasklet_schedule(&disp->tasklet);
1619 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1620 stat &= ~0x00000007;
1623 if (stat) {
1624 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1625 nv_wr32(dev, 0x6100ac, stat);
1628 intr &= ~0x00100000;
1631 if (intr & 0x01000000) {
1632 u32 stat = nv_rd32(dev, 0x6100bc);
1633 nv_wr32(dev, 0x6100bc, stat);
1634 intr &= ~0x01000000;
1637 if (intr & 0x02000000) {
1638 u32 stat = nv_rd32(dev, 0x6108bc);
1639 nv_wr32(dev, 0x6108bc, stat);
1640 intr &= ~0x02000000;
1643 if (intr)
1644 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1647 /******************************************************************************
1648 * Init
1649 *****************************************************************************/
1650 void
1651 nvd0_display_fini(struct drm_device *dev)
1653 int i;
1655 /* fini cursors + overlays + flips */
1656 for (i = 1; i >= 0; i--) {
1657 evo_fini_pio(dev, EVO_CURS(i));
1658 evo_fini_pio(dev, EVO_OIMM(i));
1659 evo_fini_dma(dev, EVO_OVLY(i));
1660 evo_fini_dma(dev, EVO_FLIP(i));
1663 /* fini master */
1664 evo_fini_dma(dev, EVO_MASTER);
1668 nvd0_display_init(struct drm_device *dev)
1670 struct nvd0_display *disp = nvd0_display(dev);
1671 int ret, i;
1672 u32 *push;
1674 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1675 nv_wr32(dev, 0x6100ac, 0x00000100);
1676 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1677 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1678 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1679 nv_rd32(dev, 0x6194e8));
1680 return -EBUSY;
1684 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1685 * work at all unless you do the SOR part below.
1687 for (i = 0; i < 3; i++) {
1688 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1689 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1692 for (i = 0; i < 4; i++) {
1693 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1694 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1697 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1698 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1699 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1700 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1701 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1702 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1703 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1706 /* point at our hash table / objects, enable interrupts */
1707 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
1708 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
1710 /* init master */
1711 ret = evo_init_dma(dev, EVO_MASTER);
1712 if (ret)
1713 goto error;
1715 /* init flips + overlays + cursors */
1716 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1717 if ((ret = evo_init_dma(dev, EVO_FLIP(i))) ||
1718 (ret = evo_init_dma(dev, EVO_OVLY(i))) ||
1719 (ret = evo_init_pio(dev, EVO_OIMM(i))) ||
1720 (ret = evo_init_pio(dev, EVO_CURS(i))))
1721 goto error;
1724 push = evo_wait(dev, EVO_MASTER, 32);
1725 if (!push) {
1726 ret = -EBUSY;
1727 goto error;
1729 evo_mthd(push, 0x0088, 1);
1730 evo_data(push, NvEvoSync);
1731 evo_mthd(push, 0x0084, 1);
1732 evo_data(push, 0x00000000);
1733 evo_mthd(push, 0x0084, 1);
1734 evo_data(push, 0x80000000);
1735 evo_mthd(push, 0x008c, 1);
1736 evo_data(push, 0x00000000);
1737 evo_kick(push, dev, EVO_MASTER);
1739 error:
1740 if (ret)
1741 nvd0_display_fini(dev);
1742 return ret;
1745 void
1746 nvd0_display_destroy(struct drm_device *dev)
1748 struct drm_nouveau_private *dev_priv = dev->dev_private;
1749 struct nvd0_display *disp = nvd0_display(dev);
1750 struct pci_dev *pdev = dev->pdev;
1751 int i;
1753 for (i = 0; i < EVO_DMA_NR; i++) {
1754 struct evo *evo = &disp->evo[i];
1755 pci_free_consistent(pdev, PAGE_SIZE, evo->ptr, evo->handle);
1758 nouveau_gpuobj_ref(NULL, &disp->mem);
1759 nouveau_bo_unmap(disp->sync);
1760 nouveau_bo_ref(NULL, &disp->sync);
1761 nouveau_irq_unregister(dev, 26);
1763 dev_priv->engine.display.priv = NULL;
1764 kfree(disp);
1768 nvd0_display_create(struct drm_device *dev)
1770 struct drm_nouveau_private *dev_priv = dev->dev_private;
1771 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
1772 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1773 struct drm_connector *connector, *tmp;
1774 struct pci_dev *pdev = dev->pdev;
1775 struct nvd0_display *disp;
1776 struct dcb_entry *dcbe;
1777 int ret, i;
1779 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1780 if (!disp)
1781 return -ENOMEM;
1782 dev_priv->engine.display.priv = disp;
1784 /* create crtc objects to represent the hw heads */
1785 for (i = 0; i < 2; i++) {
1786 ret = nvd0_crtc_create(dev, i);
1787 if (ret)
1788 goto out;
1791 /* create encoder/connector objects based on VBIOS DCB table */
1792 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1793 connector = nouveau_connector_create(dev, dcbe->connector);
1794 if (IS_ERR(connector))
1795 continue;
1797 if (dcbe->location != DCB_LOC_ON_CHIP) {
1798 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1799 dcbe->type, ffs(dcbe->or) - 1);
1800 continue;
1803 switch (dcbe->type) {
1804 case OUTPUT_TMDS:
1805 case OUTPUT_LVDS:
1806 nvd0_sor_create(connector, dcbe);
1807 break;
1808 case OUTPUT_ANALOG:
1809 nvd0_dac_create(connector, dcbe);
1810 break;
1811 default:
1812 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1813 dcbe->type, ffs(dcbe->or) - 1);
1814 continue;
1818 /* cull any connectors we created that don't have an encoder */
1819 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1820 if (connector->encoder_ids[0])
1821 continue;
1823 NV_WARN(dev, "%s has no encoders, removing\n",
1824 drm_get_connector_name(connector));
1825 connector->funcs->destroy(connector);
1828 /* setup interrupt handling */
1829 tasklet_init(&disp->tasklet, nvd0_display_bh, (unsigned long)dev);
1830 nouveau_irq_register(dev, 26, nvd0_display_intr);
1832 /* small shared memory area we use for notifiers and semaphores */
1833 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1834 0, 0x0000, &disp->sync);
1835 if (!ret) {
1836 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1837 if (!ret)
1838 ret = nouveau_bo_map(disp->sync);
1839 if (ret)
1840 nouveau_bo_ref(NULL, &disp->sync);
1843 if (ret)
1844 goto out;
1846 /* hash table and dma objects for the memory areas we care about */
1847 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1848 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
1849 if (ret)
1850 goto out;
1852 /* create evo dma channels */
1853 for (i = 0; i < EVO_DMA_NR; i++) {
1854 struct evo *evo = &disp->evo[i];
1855 u64 offset = disp->sync->bo.offset;
1856 u32 dmao = 0x1000 + (i * 0x100);
1857 u32 hash = 0x0000 + (i * 0x040);
1859 evo->idx = i;
1860 evo->sem.offset = EVO_SYNC(evo->idx, 0x00);
1861 evo->ptr = pci_alloc_consistent(pdev, PAGE_SIZE, &evo->handle);
1862 if (!evo->ptr) {
1863 ret = -ENOMEM;
1864 goto out;
1867 nv_wo32(disp->mem, dmao + 0x00, 0x00000049);
1868 nv_wo32(disp->mem, dmao + 0x04, (offset + 0x0000) >> 8);
1869 nv_wo32(disp->mem, dmao + 0x08, (offset + 0x0fff) >> 8);
1870 nv_wo32(disp->mem, dmao + 0x0c, 0x00000000);
1871 nv_wo32(disp->mem, dmao + 0x10, 0x00000000);
1872 nv_wo32(disp->mem, dmao + 0x14, 0x00000000);
1873 nv_wo32(disp->mem, hash + 0x00, NvEvoSync);
1874 nv_wo32(disp->mem, hash + 0x04, 0x00000001 | (i << 27) |
1875 ((dmao + 0x00) << 9));
1877 nv_wo32(disp->mem, dmao + 0x20, 0x00000049);
1878 nv_wo32(disp->mem, dmao + 0x24, 0x00000000);
1879 nv_wo32(disp->mem, dmao + 0x28, (dev_priv->vram_size - 1) >> 8);
1880 nv_wo32(disp->mem, dmao + 0x2c, 0x00000000);
1881 nv_wo32(disp->mem, dmao + 0x30, 0x00000000);
1882 nv_wo32(disp->mem, dmao + 0x34, 0x00000000);
1883 nv_wo32(disp->mem, hash + 0x08, NvEvoVRAM);
1884 nv_wo32(disp->mem, hash + 0x0c, 0x00000001 | (i << 27) |
1885 ((dmao + 0x20) << 9));
1887 nv_wo32(disp->mem, dmao + 0x40, 0x00000009);
1888 nv_wo32(disp->mem, dmao + 0x44, 0x00000000);
1889 nv_wo32(disp->mem, dmao + 0x48, (dev_priv->vram_size - 1) >> 8);
1890 nv_wo32(disp->mem, dmao + 0x4c, 0x00000000);
1891 nv_wo32(disp->mem, dmao + 0x50, 0x00000000);
1892 nv_wo32(disp->mem, dmao + 0x54, 0x00000000);
1893 nv_wo32(disp->mem, hash + 0x10, NvEvoVRAM_LP);
1894 nv_wo32(disp->mem, hash + 0x14, 0x00000001 | (i << 27) |
1895 ((dmao + 0x40) << 9));
1897 nv_wo32(disp->mem, dmao + 0x60, 0x0fe00009);
1898 nv_wo32(disp->mem, dmao + 0x64, 0x00000000);
1899 nv_wo32(disp->mem, dmao + 0x68, (dev_priv->vram_size - 1) >> 8);
1900 nv_wo32(disp->mem, dmao + 0x6c, 0x00000000);
1901 nv_wo32(disp->mem, dmao + 0x70, 0x00000000);
1902 nv_wo32(disp->mem, dmao + 0x74, 0x00000000);
1903 nv_wo32(disp->mem, hash + 0x18, NvEvoFB32);
1904 nv_wo32(disp->mem, hash + 0x1c, 0x00000001 | (i << 27) |
1905 ((dmao + 0x60) << 9));
1908 pinstmem->flush(dev);
1910 out:
1911 if (ret)
1912 nvd0_display_destroy(dev);
1913 return ret;