[S390] Remove error checking from copy_oldmem_page()
[linux/fpc-iii.git] / drivers / gpu / drm / nouveau / nvd0_display.c
blob23d63b4b3d77078ce4a351f82b8d7cba16d280b7
1 /*
2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Ben Skeggs
25 #include <linux/dma-mapping.h>
27 #include "drmP.h"
28 #include "drm_crtc_helper.h"
30 #include "nouveau_drv.h"
31 #include "nouveau_connector.h"
32 #include "nouveau_encoder.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_fb.h"
36 #include "nv50_display.h"
38 struct nvd0_display {
39 struct nouveau_gpuobj *mem;
40 struct {
41 dma_addr_t handle;
42 u32 *ptr;
43 } evo[1];
45 struct tasklet_struct tasklet;
46 u32 modeset;
49 static struct nvd0_display *
50 nvd0_display(struct drm_device *dev)
52 struct drm_nouveau_private *dev_priv = dev->dev_private;
53 return dev_priv->engine.display.priv;
56 static inline int
57 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
59 int ret = 0;
60 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
61 nv_wr32(dev, 0x610704 + (id * 0x10), data);
62 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
63 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
64 ret = -EBUSY;
65 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
66 return ret;
69 static u32 *
70 evo_wait(struct drm_device *dev, int id, int nr)
72 struct nvd0_display *disp = nvd0_display(dev);
73 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
75 if (put + nr >= (PAGE_SIZE / 4)) {
76 disp->evo[id].ptr[put] = 0x20000000;
78 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
79 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
80 NV_ERROR(dev, "evo %d dma stalled\n", id);
81 return NULL;
84 put = 0;
87 return disp->evo[id].ptr + put;
90 static void
91 evo_kick(u32 *push, struct drm_device *dev, int id)
93 struct nvd0_display *disp = nvd0_display(dev);
94 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
97 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
98 #define evo_data(p,d) *((p)++) = (d)
100 static struct drm_crtc *
101 nvd0_display_crtc_get(struct drm_encoder *encoder)
103 return nouveau_encoder(encoder)->crtc;
106 /******************************************************************************
107 * CRTC
108 *****************************************************************************/
109 static int
110 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
112 struct drm_device *dev = nv_crtc->base.dev;
113 u32 *push, mode;
115 mode = 0x00000000;
116 if (on) {
117 /* 0x11: 6bpc dynamic 2x2
118 * 0x13: 8bpc dynamic 2x2
119 * 0x19: 6bpc static 2x2
120 * 0x1b: 8bpc static 2x2
121 * 0x21: 6bpc temporal
122 * 0x23: 8bpc temporal
124 mode = 0x00000011;
127 push = evo_wait(dev, 0, 4);
128 if (push) {
129 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
130 evo_data(push, mode);
131 if (update) {
132 evo_mthd(push, 0x0080, 1);
133 evo_data(push, 0x00000000);
135 evo_kick(push, dev, 0);
138 return 0;
141 static int
142 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
144 struct drm_display_mode *mode = &nv_crtc->base.mode;
145 struct drm_device *dev = nv_crtc->base.dev;
146 struct nouveau_connector *nv_connector;
147 u32 *push, outX, outY;
149 outX = mode->hdisplay;
150 outY = mode->vdisplay;
152 nv_connector = nouveau_crtc_connector_get(nv_crtc);
153 if (nv_connector && nv_connector->native_mode) {
154 struct drm_display_mode *native = nv_connector->native_mode;
155 u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
156 u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
158 switch (type) {
159 case DRM_MODE_SCALE_ASPECT:
160 if (xratio > yratio) {
161 outX = (mode->hdisplay * yratio) >> 19;
162 outY = (mode->vdisplay * yratio) >> 19;
163 } else {
164 outX = (mode->hdisplay * xratio) >> 19;
165 outY = (mode->vdisplay * xratio) >> 19;
167 break;
168 case DRM_MODE_SCALE_FULLSCREEN:
169 outX = native->hdisplay;
170 outY = native->vdisplay;
171 break;
172 default:
173 break;
177 push = evo_wait(dev, 0, 16);
178 if (push) {
179 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
180 evo_data(push, (outY << 16) | outX);
181 evo_data(push, (outY << 16) | outX);
182 evo_data(push, (outY << 16) | outX);
183 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
184 evo_data(push, 0x00000000);
185 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
186 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
187 if (update) {
188 evo_mthd(push, 0x0080, 1);
189 evo_data(push, 0x00000000);
191 evo_kick(push, dev, 0);
194 return 0;
197 static int
198 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
199 int x, int y, bool update)
201 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
202 u32 *push;
204 push = evo_wait(fb->dev, 0, 16);
205 if (push) {
206 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
207 evo_data(push, nvfb->nvbo->bo.offset >> 8);
208 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
209 evo_data(push, (fb->height << 16) | fb->width);
210 evo_data(push, nvfb->r_pitch);
211 evo_data(push, nvfb->r_format);
212 evo_data(push, nvfb->r_dma);
213 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
214 evo_data(push, (y << 16) | x);
215 if (update) {
216 evo_mthd(push, 0x0080, 1);
217 evo_data(push, 0x00000000);
219 evo_kick(push, fb->dev, 0);
222 nv_crtc->fb.tile_flags = nvfb->r_dma;
223 return 0;
226 static void
227 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
229 struct drm_device *dev = nv_crtc->base.dev;
230 u32 *push = evo_wait(dev, 0, 16);
231 if (push) {
232 if (show) {
233 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
234 evo_data(push, 0x85000000);
235 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
236 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
237 evo_data(push, NvEvoVRAM);
238 } else {
239 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
240 evo_data(push, 0x05000000);
241 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
242 evo_data(push, 0x00000000);
245 if (update) {
246 evo_mthd(push, 0x0080, 1);
247 evo_data(push, 0x00000000);
250 evo_kick(push, dev, 0);
254 static void
255 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
259 static void
260 nvd0_crtc_prepare(struct drm_crtc *crtc)
262 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
263 u32 *push;
265 push = evo_wait(crtc->dev, 0, 2);
266 if (push) {
267 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
268 evo_data(push, 0x00000000);
269 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
270 evo_data(push, 0x03000000);
271 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
272 evo_data(push, 0x00000000);
273 evo_kick(push, crtc->dev, 0);
276 nvd0_crtc_cursor_show(nv_crtc, false, false);
279 static void
280 nvd0_crtc_commit(struct drm_crtc *crtc)
282 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
283 u32 *push;
285 push = evo_wait(crtc->dev, 0, 32);
286 if (push) {
287 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
288 evo_data(push, nv_crtc->fb.tile_flags);
289 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
290 evo_data(push, 0x83000000);
291 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
292 evo_data(push, 0x00000000);
293 evo_data(push, 0x00000000);
294 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
295 evo_data(push, NvEvoVRAM);
296 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
297 evo_data(push, 0xffffff00);
298 evo_kick(push, crtc->dev, 0);
301 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
304 static bool
305 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
306 struct drm_display_mode *adjusted_mode)
308 return true;
311 static int
312 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
314 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
315 int ret;
317 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
318 if (ret)
319 return ret;
321 if (old_fb) {
322 nvfb = nouveau_framebuffer(old_fb);
323 nouveau_bo_unpin(nvfb->nvbo);
326 return 0;
329 static int
330 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
331 struct drm_display_mode *mode, int x, int y,
332 struct drm_framebuffer *old_fb)
334 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
335 struct nouveau_connector *nv_connector;
336 u32 htotal = mode->htotal;
337 u32 vtotal = mode->vtotal;
338 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
339 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
340 u32 hfrntp = mode->hsync_start - mode->hdisplay;
341 u32 vfrntp = mode->vsync_start - mode->vdisplay;
342 u32 hbackp = mode->htotal - mode->hsync_end;
343 u32 vbackp = mode->vtotal - mode->vsync_end;
344 u32 hss2be = hsyncw + hbackp;
345 u32 vss2be = vsyncw + vbackp;
346 u32 hss2de = htotal - hfrntp;
347 u32 vss2de = vtotal - vfrntp;
348 u32 syncs, *push;
349 int ret;
351 syncs = 0x00000001;
352 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
353 syncs |= 0x00000008;
354 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
355 syncs |= 0x00000010;
357 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
358 if (ret)
359 return ret;
361 push = evo_wait(crtc->dev, 0, 64);
362 if (push) {
363 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
364 evo_data(push, 0x00000000);
365 evo_data(push, (vtotal << 16) | htotal);
366 evo_data(push, (vsyncw << 16) | hsyncw);
367 evo_data(push, (vss2be << 16) | hss2be);
368 evo_data(push, (vss2de << 16) | hss2de);
369 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
370 evo_data(push, 0x00000000); /* ??? */
371 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
372 evo_data(push, mode->clock * 1000);
373 evo_data(push, 0x00200000); /* ??? */
374 evo_data(push, mode->clock * 1000);
375 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 1);
376 evo_data(push, syncs);
377 evo_kick(push, crtc->dev, 0);
380 nv_connector = nouveau_crtc_connector_get(nv_crtc);
381 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
382 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
383 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
384 return 0;
387 static int
388 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
389 struct drm_framebuffer *old_fb)
391 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
392 int ret;
394 if (!crtc->fb) {
395 NV_DEBUG_KMS(crtc->dev, "No FB bound\n");
396 return 0;
399 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
400 if (ret)
401 return ret;
403 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
404 return 0;
407 static int
408 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
409 struct drm_framebuffer *fb, int x, int y,
410 enum mode_set_atomic state)
412 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
413 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
414 return 0;
417 static void
418 nvd0_crtc_lut_load(struct drm_crtc *crtc)
420 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
421 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
422 int i;
424 for (i = 0; i < 256; i++) {
425 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
426 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
427 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
431 static int
432 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
433 uint32_t handle, uint32_t width, uint32_t height)
435 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
436 struct drm_device *dev = crtc->dev;
437 struct drm_gem_object *gem;
438 struct nouveau_bo *nvbo;
439 bool visible = (handle != 0);
440 int i, ret = 0;
442 if (visible) {
443 if (width != 64 || height != 64)
444 return -EINVAL;
446 gem = drm_gem_object_lookup(dev, file_priv, handle);
447 if (unlikely(!gem))
448 return -ENOENT;
449 nvbo = nouveau_gem_object(gem);
451 ret = nouveau_bo_map(nvbo);
452 if (ret == 0) {
453 for (i = 0; i < 64 * 64; i++) {
454 u32 v = nouveau_bo_rd32(nvbo, i);
455 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
457 nouveau_bo_unmap(nvbo);
460 drm_gem_object_unreference_unlocked(gem);
463 if (visible != nv_crtc->cursor.visible) {
464 nvd0_crtc_cursor_show(nv_crtc, visible, true);
465 nv_crtc->cursor.visible = visible;
468 return ret;
471 static int
472 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
474 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
475 const u32 data = (y << 16) | x;
477 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
478 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
479 return 0;
482 static void
483 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
484 uint32_t start, uint32_t size)
486 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
487 u32 end = max(start + size, (u32)256);
488 u32 i;
490 for (i = start; i < end; i++) {
491 nv_crtc->lut.r[i] = r[i];
492 nv_crtc->lut.g[i] = g[i];
493 nv_crtc->lut.b[i] = b[i];
496 nvd0_crtc_lut_load(crtc);
499 static void
500 nvd0_crtc_destroy(struct drm_crtc *crtc)
502 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
503 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
504 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
505 nouveau_bo_unmap(nv_crtc->lut.nvbo);
506 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
507 drm_crtc_cleanup(crtc);
508 kfree(crtc);
511 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
512 .dpms = nvd0_crtc_dpms,
513 .prepare = nvd0_crtc_prepare,
514 .commit = nvd0_crtc_commit,
515 .mode_fixup = nvd0_crtc_mode_fixup,
516 .mode_set = nvd0_crtc_mode_set,
517 .mode_set_base = nvd0_crtc_mode_set_base,
518 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
519 .load_lut = nvd0_crtc_lut_load,
522 static const struct drm_crtc_funcs nvd0_crtc_func = {
523 .cursor_set = nvd0_crtc_cursor_set,
524 .cursor_move = nvd0_crtc_cursor_move,
525 .gamma_set = nvd0_crtc_gamma_set,
526 .set_config = drm_crtc_helper_set_config,
527 .destroy = nvd0_crtc_destroy,
530 static void
531 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
535 static void
536 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
540 static int
541 nvd0_crtc_create(struct drm_device *dev, int index)
543 struct nouveau_crtc *nv_crtc;
544 struct drm_crtc *crtc;
545 int ret, i;
547 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
548 if (!nv_crtc)
549 return -ENOMEM;
551 nv_crtc->index = index;
552 nv_crtc->set_dither = nvd0_crtc_set_dither;
553 nv_crtc->set_scale = nvd0_crtc_set_scale;
554 nv_crtc->cursor.set_offset = nvd0_cursor_set_offset;
555 nv_crtc->cursor.set_pos = nvd0_cursor_set_pos;
556 for (i = 0; i < 256; i++) {
557 nv_crtc->lut.r[i] = i << 8;
558 nv_crtc->lut.g[i] = i << 8;
559 nv_crtc->lut.b[i] = i << 8;
562 crtc = &nv_crtc->base;
563 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
564 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
565 drm_mode_crtc_set_gamma_size(crtc, 256);
567 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
568 0, 0x0000, &nv_crtc->cursor.nvbo);
569 if (!ret) {
570 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
571 if (!ret)
572 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
573 if (ret)
574 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
577 if (ret)
578 goto out;
580 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
581 0, 0x0000, &nv_crtc->lut.nvbo);
582 if (!ret) {
583 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
584 if (!ret)
585 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
586 if (ret)
587 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
590 if (ret)
591 goto out;
593 nvd0_crtc_lut_load(crtc);
595 out:
596 if (ret)
597 nvd0_crtc_destroy(crtc);
598 return ret;
601 /******************************************************************************
602 * DAC
603 *****************************************************************************/
604 static void
605 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
607 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
608 struct drm_device *dev = encoder->dev;
609 int or = nv_encoder->or;
610 u32 dpms_ctrl;
612 dpms_ctrl = 0x80000000;
613 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
614 dpms_ctrl |= 0x00000001;
615 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
616 dpms_ctrl |= 0x00000004;
618 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
619 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
620 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
623 static bool
624 nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
625 struct drm_display_mode *adjusted_mode)
627 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
628 struct nouveau_connector *nv_connector;
630 nv_connector = nouveau_encoder_connector_get(nv_encoder);
631 if (nv_connector && nv_connector->native_mode) {
632 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
633 int id = adjusted_mode->base.id;
634 *adjusted_mode = *nv_connector->native_mode;
635 adjusted_mode->base.id = id;
639 return true;
642 static void
643 nvd0_dac_prepare(struct drm_encoder *encoder)
647 static void
648 nvd0_dac_commit(struct drm_encoder *encoder)
652 static void
653 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
654 struct drm_display_mode *adjusted_mode)
656 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
657 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
658 u32 *push;
660 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
662 push = evo_wait(encoder->dev, 0, 4);
663 if (push) {
664 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 2);
665 evo_data(push, 1 << nv_crtc->index);
666 evo_data(push, 0x00ff);
667 evo_kick(push, encoder->dev, 0);
670 nv_encoder->crtc = encoder->crtc;
673 static void
674 nvd0_dac_disconnect(struct drm_encoder *encoder)
676 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
677 struct drm_device *dev = encoder->dev;
678 u32 *push;
680 if (nv_encoder->crtc) {
681 nvd0_crtc_prepare(nv_encoder->crtc);
683 push = evo_wait(dev, 0, 4);
684 if (push) {
685 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
686 evo_data(push, 0x00000000);
687 evo_mthd(push, 0x0080, 1);
688 evo_data(push, 0x00000000);
689 evo_kick(push, dev, 0);
692 nv_encoder->crtc = NULL;
696 static enum drm_connector_status
697 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
699 enum drm_connector_status status = connector_status_disconnected;
700 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
701 struct drm_device *dev = encoder->dev;
702 int or = nv_encoder->or;
703 u32 load;
705 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00100000);
706 udelay(9500);
707 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x80000000);
709 load = nv_rd32(dev, 0x61a00c + (or * 0x800));
710 if ((load & 0x38000000) == 0x38000000)
711 status = connector_status_connected;
713 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00000000);
714 return status;
717 static void
718 nvd0_dac_destroy(struct drm_encoder *encoder)
720 drm_encoder_cleanup(encoder);
721 kfree(encoder);
724 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
725 .dpms = nvd0_dac_dpms,
726 .mode_fixup = nvd0_dac_mode_fixup,
727 .prepare = nvd0_dac_prepare,
728 .commit = nvd0_dac_commit,
729 .mode_set = nvd0_dac_mode_set,
730 .disable = nvd0_dac_disconnect,
731 .get_crtc = nvd0_display_crtc_get,
732 .detect = nvd0_dac_detect
735 static const struct drm_encoder_funcs nvd0_dac_func = {
736 .destroy = nvd0_dac_destroy,
739 static int
740 nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
742 struct drm_device *dev = connector->dev;
743 struct nouveau_encoder *nv_encoder;
744 struct drm_encoder *encoder;
746 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
747 if (!nv_encoder)
748 return -ENOMEM;
749 nv_encoder->dcb = dcbe;
750 nv_encoder->or = ffs(dcbe->or) - 1;
752 encoder = to_drm_encoder(nv_encoder);
753 encoder->possible_crtcs = dcbe->heads;
754 encoder->possible_clones = 0;
755 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
756 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
758 drm_mode_connector_attach_encoder(connector, encoder);
759 return 0;
762 /******************************************************************************
763 * SOR
764 *****************************************************************************/
765 static void
766 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
768 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
769 struct drm_device *dev = encoder->dev;
770 struct drm_encoder *partner;
771 int or = nv_encoder->or;
772 u32 dpms_ctrl;
774 nv_encoder->last_dpms = mode;
776 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
777 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
779 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
780 continue;
782 if (nv_partner != nv_encoder &&
783 nv_partner->dcb->or == nv_encoder->or) {
784 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
785 return;
786 break;
790 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
791 dpms_ctrl |= 0x80000000;
793 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
794 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
795 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
796 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
799 static bool
800 nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
801 struct drm_display_mode *adjusted_mode)
803 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
804 struct nouveau_connector *nv_connector;
806 nv_connector = nouveau_encoder_connector_get(nv_encoder);
807 if (nv_connector && nv_connector->native_mode) {
808 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
809 int id = adjusted_mode->base.id;
810 *adjusted_mode = *nv_connector->native_mode;
811 adjusted_mode->base.id = id;
815 return true;
818 static void
819 nvd0_sor_prepare(struct drm_encoder *encoder)
823 static void
824 nvd0_sor_commit(struct drm_encoder *encoder)
828 static void
829 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
830 struct drm_display_mode *mode)
832 struct drm_nouveau_private *dev_priv = encoder->dev->dev_private;
833 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
834 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
835 struct nouveau_connector *nv_connector;
836 struct nvbios *bios = &dev_priv->vbios;
837 u32 mode_ctrl = (1 << nv_crtc->index);
838 u32 *push, or_config;
840 nv_connector = nouveau_encoder_connector_get(nv_encoder);
841 switch (nv_encoder->dcb->type) {
842 case OUTPUT_TMDS:
843 if (nv_encoder->dcb->sorconf.link & 1) {
844 if (mode->clock < 165000)
845 mode_ctrl |= 0x00000100;
846 else
847 mode_ctrl |= 0x00000500;
848 } else {
849 mode_ctrl |= 0x00000200;
852 or_config = (mode_ctrl & 0x00000f00) >> 8;
853 if (mode->clock >= 165000)
854 or_config |= 0x0100;
855 break;
856 case OUTPUT_LVDS:
857 or_config = (mode_ctrl & 0x00000f00) >> 8;
858 if (bios->fp_no_ddc) {
859 if (bios->fp.dual_link)
860 or_config |= 0x0100;
861 if (bios->fp.if_is_24bit)
862 or_config |= 0x0200;
863 } else {
864 if (nv_connector->dcb->type == DCB_CONNECTOR_LVDS_SPWG) {
865 if (((u8 *)nv_connector->edid)[121] == 2)
866 or_config |= 0x0100;
867 } else
868 if (mode->clock >= bios->fp.duallink_transition_clk) {
869 or_config |= 0x0100;
872 if (or_config & 0x0100) {
873 if (bios->fp.strapless_is_24bit & 2)
874 or_config |= 0x0200;
875 } else {
876 if (bios->fp.strapless_is_24bit & 1)
877 or_config |= 0x0200;
880 if (nv_connector->base.display_info.bpc == 8)
881 or_config |= 0x0200;
884 break;
885 default:
886 BUG_ON(1);
887 break;
890 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
892 push = evo_wait(encoder->dev, 0, 4);
893 if (push) {
894 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 2);
895 evo_data(push, mode_ctrl);
896 evo_data(push, or_config);
897 evo_kick(push, encoder->dev, 0);
900 nv_encoder->crtc = encoder->crtc;
903 static void
904 nvd0_sor_disconnect(struct drm_encoder *encoder)
906 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
907 struct drm_device *dev = encoder->dev;
908 u32 *push;
910 if (nv_encoder->crtc) {
911 nvd0_crtc_prepare(nv_encoder->crtc);
913 push = evo_wait(dev, 0, 4);
914 if (push) {
915 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
916 evo_data(push, 0x00000000);
917 evo_mthd(push, 0x0080, 1);
918 evo_data(push, 0x00000000);
919 evo_kick(push, dev, 0);
922 nv_encoder->crtc = NULL;
923 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
927 static void
928 nvd0_sor_destroy(struct drm_encoder *encoder)
930 drm_encoder_cleanup(encoder);
931 kfree(encoder);
934 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
935 .dpms = nvd0_sor_dpms,
936 .mode_fixup = nvd0_sor_mode_fixup,
937 .prepare = nvd0_sor_prepare,
938 .commit = nvd0_sor_commit,
939 .mode_set = nvd0_sor_mode_set,
940 .disable = nvd0_sor_disconnect,
941 .get_crtc = nvd0_display_crtc_get,
944 static const struct drm_encoder_funcs nvd0_sor_func = {
945 .destroy = nvd0_sor_destroy,
948 static int
949 nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
951 struct drm_device *dev = connector->dev;
952 struct nouveau_encoder *nv_encoder;
953 struct drm_encoder *encoder;
955 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
956 if (!nv_encoder)
957 return -ENOMEM;
958 nv_encoder->dcb = dcbe;
959 nv_encoder->or = ffs(dcbe->or) - 1;
960 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
962 encoder = to_drm_encoder(nv_encoder);
963 encoder->possible_crtcs = dcbe->heads;
964 encoder->possible_clones = 0;
965 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
966 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
968 drm_mode_connector_attach_encoder(connector, encoder);
969 return 0;
972 /******************************************************************************
973 * IRQ
974 *****************************************************************************/
975 static struct dcb_entry *
976 lookup_dcb(struct drm_device *dev, int id, u32 mc)
978 struct drm_nouveau_private *dev_priv = dev->dev_private;
979 int type, or, i;
981 if (id < 4) {
982 type = OUTPUT_ANALOG;
983 or = id;
984 } else {
985 switch (mc & 0x00000f00) {
986 case 0x00000000: type = OUTPUT_LVDS; break;
987 case 0x00000100: type = OUTPUT_TMDS; break;
988 case 0x00000200: type = OUTPUT_TMDS; break;
989 case 0x00000500: type = OUTPUT_TMDS; break;
990 default:
991 NV_ERROR(dev, "PDISP: unknown SOR mc 0x%08x\n", mc);
992 return NULL;
995 or = id - 4;
998 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
999 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
1000 if (dcb->type == type && (dcb->or & (1 << or)))
1001 return dcb;
1004 NV_ERROR(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
1005 return NULL;
1008 static void
1009 nvd0_display_unk1_handler(struct drm_device *dev, u32 crtc, u32 mask)
1011 struct dcb_entry *dcb;
1012 int i;
1014 for (i = 0; mask && i < 8; i++) {
1015 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
1016 if (!(mcc & (1 << crtc)))
1017 continue;
1019 dcb = lookup_dcb(dev, i, mcc);
1020 if (!dcb)
1021 continue;
1023 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
1026 nv_wr32(dev, 0x6101d4, 0x00000000);
1027 nv_wr32(dev, 0x6109d4, 0x00000000);
1028 nv_wr32(dev, 0x6101d0, 0x80000000);
1031 static void
1032 nvd0_display_unk2_handler(struct drm_device *dev, u32 crtc, u32 mask)
1034 struct dcb_entry *dcb;
1035 u32 or, tmp, pclk;
1036 int i;
1038 for (i = 0; mask && i < 8; i++) {
1039 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
1040 if (!(mcc & (1 << crtc)))
1041 continue;
1043 dcb = lookup_dcb(dev, i, mcc);
1044 if (!dcb)
1045 continue;
1047 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
1050 pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
1051 if (mask & 0x00010000) {
1052 nv50_crtc_set_clock(dev, crtc, pclk);
1055 for (i = 0; mask && i < 8; i++) {
1056 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
1057 u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
1058 if (!(mcp & (1 << crtc)))
1059 continue;
1061 dcb = lookup_dcb(dev, i, mcp);
1062 if (!dcb)
1063 continue;
1064 or = ffs(dcb->or) - 1;
1066 nouveau_bios_run_display_table(dev, cfg, pclk, dcb, crtc);
1068 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
1069 switch (dcb->type) {
1070 case OUTPUT_ANALOG:
1071 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
1072 break;
1073 case OUTPUT_TMDS:
1074 case OUTPUT_LVDS:
1075 if (cfg & 0x00000100)
1076 tmp = 0x00000101;
1077 else
1078 tmp = 0x00000000;
1080 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
1081 break;
1082 default:
1083 break;
1086 break;
1089 nv_wr32(dev, 0x6101d4, 0x00000000);
1090 nv_wr32(dev, 0x6109d4, 0x00000000);
1091 nv_wr32(dev, 0x6101d0, 0x80000000);
1094 static void
1095 nvd0_display_unk4_handler(struct drm_device *dev, u32 crtc, u32 mask)
1097 struct dcb_entry *dcb;
1098 int pclk, i;
1100 pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
1102 for (i = 0; mask && i < 8; i++) {
1103 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
1104 u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
1105 if (!(mcp & (1 << crtc)))
1106 continue;
1108 dcb = lookup_dcb(dev, i, mcp);
1109 if (!dcb)
1110 continue;
1112 nouveau_bios_run_display_table(dev, cfg, -pclk, dcb, crtc);
1115 nv_wr32(dev, 0x6101d4, 0x00000000);
1116 nv_wr32(dev, 0x6109d4, 0x00000000);
1117 nv_wr32(dev, 0x6101d0, 0x80000000);
1120 static void
1121 nvd0_display_bh(unsigned long data)
1123 struct drm_device *dev = (struct drm_device *)data;
1124 struct nvd0_display *disp = nvd0_display(dev);
1125 u32 mask, crtc;
1126 int i;
1128 if (drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
1129 NV_INFO(dev, "PDISP: modeset req %d\n", disp->modeset);
1130 NV_INFO(dev, " STAT: 0x%08x 0x%08x 0x%08x\n",
1131 nv_rd32(dev, 0x6101d0),
1132 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
1133 for (i = 0; i < 8; i++) {
1134 NV_INFO(dev, " %s%d: 0x%08x 0x%08x\n",
1135 i < 4 ? "DAC" : "SOR", i,
1136 nv_rd32(dev, 0x640180 + (i * 0x20)),
1137 nv_rd32(dev, 0x660180 + (i * 0x20)));
1141 mask = nv_rd32(dev, 0x6101d4);
1142 crtc = 0;
1143 if (!mask) {
1144 mask = nv_rd32(dev, 0x6109d4);
1145 crtc = 1;
1148 if (disp->modeset & 0x00000001)
1149 nvd0_display_unk1_handler(dev, crtc, mask);
1150 if (disp->modeset & 0x00000002)
1151 nvd0_display_unk2_handler(dev, crtc, mask);
1152 if (disp->modeset & 0x00000004)
1153 nvd0_display_unk4_handler(dev, crtc, mask);
1156 static void
1157 nvd0_display_intr(struct drm_device *dev)
1159 struct nvd0_display *disp = nvd0_display(dev);
1160 u32 intr = nv_rd32(dev, 0x610088);
1162 if (intr & 0x00000002) {
1163 u32 stat = nv_rd32(dev, 0x61009c);
1164 int chid = ffs(stat) - 1;
1165 if (chid >= 0) {
1166 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1167 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1168 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1170 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1171 "0x%08x 0x%08x\n",
1172 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1173 nv_wr32(dev, 0x61009c, (1 << chid));
1174 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1177 intr &= ~0x00000002;
1180 if (intr & 0x00100000) {
1181 u32 stat = nv_rd32(dev, 0x6100ac);
1183 if (stat & 0x00000007) {
1184 disp->modeset = stat;
1185 tasklet_schedule(&disp->tasklet);
1187 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1188 stat &= ~0x00000007;
1191 if (stat) {
1192 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1193 nv_wr32(dev, 0x6100ac, stat);
1196 intr &= ~0x00100000;
1199 if (intr & 0x01000000) {
1200 u32 stat = nv_rd32(dev, 0x6100bc);
1201 nv_wr32(dev, 0x6100bc, stat);
1202 intr &= ~0x01000000;
1205 if (intr & 0x02000000) {
1206 u32 stat = nv_rd32(dev, 0x6108bc);
1207 nv_wr32(dev, 0x6108bc, stat);
1208 intr &= ~0x02000000;
1211 if (intr)
1212 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1215 /******************************************************************************
1216 * Init
1217 *****************************************************************************/
1218 static void
1219 nvd0_display_fini(struct drm_device *dev)
1221 int i;
1223 /* fini cursors */
1224 for (i = 14; i >= 13; i--) {
1225 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
1226 continue;
1228 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
1229 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
1230 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
1231 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
1234 /* fini master */
1235 if (nv_rd32(dev, 0x610490) & 0x00000010) {
1236 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
1237 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
1238 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
1239 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
1240 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
1245 nvd0_display_init(struct drm_device *dev)
1247 struct nvd0_display *disp = nvd0_display(dev);
1248 u32 *push;
1249 int i;
1251 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1252 nv_wr32(dev, 0x6100ac, 0x00000100);
1253 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1254 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1255 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1256 nv_rd32(dev, 0x6194e8));
1257 return -EBUSY;
1261 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1262 * work at all unless you do the SOR part below.
1264 for (i = 0; i < 3; i++) {
1265 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1266 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1269 for (i = 0; i < 4; i++) {
1270 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1271 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1274 for (i = 0; i < 2; i++) {
1275 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1276 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1277 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1278 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1279 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1280 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1283 /* point at our hash table / objects, enable interrupts */
1284 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
1285 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
1287 /* init master */
1288 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
1289 nv_wr32(dev, 0x610498, 0x00010000);
1290 nv_wr32(dev, 0x61049c, 0x00000001);
1291 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1292 nv_wr32(dev, 0x640000, 0x00000000);
1293 nv_wr32(dev, 0x610490, 0x01000013);
1294 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1295 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1296 nv_rd32(dev, 0x610490));
1297 return -EBUSY;
1299 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1300 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1302 /* init cursors */
1303 for (i = 13; i <= 14; i++) {
1304 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1305 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1306 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1307 nv_rd32(dev, 0x610490 + (i * 0x10)));
1308 return -EBUSY;
1311 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1312 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1315 push = evo_wait(dev, 0, 32);
1316 if (!push)
1317 return -EBUSY;
1318 evo_mthd(push, 0x0088, 1);
1319 evo_data(push, NvEvoSync);
1320 evo_mthd(push, 0x0084, 1);
1321 evo_data(push, 0x00000000);
1322 evo_mthd(push, 0x0084, 1);
1323 evo_data(push, 0x80000000);
1324 evo_mthd(push, 0x008c, 1);
1325 evo_data(push, 0x00000000);
1326 evo_kick(push, dev, 0);
1328 return 0;
1331 void
1332 nvd0_display_destroy(struct drm_device *dev)
1334 struct drm_nouveau_private *dev_priv = dev->dev_private;
1335 struct nvd0_display *disp = nvd0_display(dev);
1336 struct pci_dev *pdev = dev->pdev;
1338 nvd0_display_fini(dev);
1340 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
1341 nouveau_gpuobj_ref(NULL, &disp->mem);
1342 nouveau_irq_unregister(dev, 26);
1344 dev_priv->engine.display.priv = NULL;
1345 kfree(disp);
1349 nvd0_display_create(struct drm_device *dev)
1351 struct drm_nouveau_private *dev_priv = dev->dev_private;
1352 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
1353 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1354 struct drm_connector *connector, *tmp;
1355 struct pci_dev *pdev = dev->pdev;
1356 struct nvd0_display *disp;
1357 struct dcb_entry *dcbe;
1358 int ret, i;
1360 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1361 if (!disp)
1362 return -ENOMEM;
1363 dev_priv->engine.display.priv = disp;
1365 /* create crtc objects to represent the hw heads */
1366 for (i = 0; i < 2; i++) {
1367 ret = nvd0_crtc_create(dev, i);
1368 if (ret)
1369 goto out;
1372 /* create encoder/connector objects based on VBIOS DCB table */
1373 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1374 connector = nouveau_connector_create(dev, dcbe->connector);
1375 if (IS_ERR(connector))
1376 continue;
1378 if (dcbe->location != DCB_LOC_ON_CHIP) {
1379 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1380 dcbe->type, ffs(dcbe->or) - 1);
1381 continue;
1384 switch (dcbe->type) {
1385 case OUTPUT_TMDS:
1386 case OUTPUT_LVDS:
1387 nvd0_sor_create(connector, dcbe);
1388 break;
1389 case OUTPUT_ANALOG:
1390 nvd0_dac_create(connector, dcbe);
1391 break;
1392 default:
1393 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1394 dcbe->type, ffs(dcbe->or) - 1);
1395 continue;
1399 /* cull any connectors we created that don't have an encoder */
1400 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1401 if (connector->encoder_ids[0])
1402 continue;
1404 NV_WARN(dev, "%s has no encoders, removing\n",
1405 drm_get_connector_name(connector));
1406 connector->funcs->destroy(connector);
1409 /* setup interrupt handling */
1410 tasklet_init(&disp->tasklet, nvd0_display_bh, (unsigned long)dev);
1411 nouveau_irq_register(dev, 26, nvd0_display_intr);
1413 /* hash table and dma objects for the memory areas we care about */
1414 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1415 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
1416 if (ret)
1417 goto out;
1419 nv_wo32(disp->mem, 0x1000, 0x00000049);
1420 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1421 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1422 nv_wo32(disp->mem, 0x100c, 0x00000000);
1423 nv_wo32(disp->mem, 0x1010, 0x00000000);
1424 nv_wo32(disp->mem, 0x1014, 0x00000000);
1425 nv_wo32(disp->mem, 0x0000, NvEvoSync);
1426 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1428 nv_wo32(disp->mem, 0x1020, 0x00000049);
1429 nv_wo32(disp->mem, 0x1024, 0x00000000);
1430 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1431 nv_wo32(disp->mem, 0x102c, 0x00000000);
1432 nv_wo32(disp->mem, 0x1030, 0x00000000);
1433 nv_wo32(disp->mem, 0x1034, 0x00000000);
1434 nv_wo32(disp->mem, 0x0008, NvEvoVRAM);
1435 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1437 nv_wo32(disp->mem, 0x1040, 0x00000009);
1438 nv_wo32(disp->mem, 0x1044, 0x00000000);
1439 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1440 nv_wo32(disp->mem, 0x104c, 0x00000000);
1441 nv_wo32(disp->mem, 0x1050, 0x00000000);
1442 nv_wo32(disp->mem, 0x1054, 0x00000000);
1443 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1444 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1446 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1447 nv_wo32(disp->mem, 0x1064, 0x00000000);
1448 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1449 nv_wo32(disp->mem, 0x106c, 0x00000000);
1450 nv_wo32(disp->mem, 0x1070, 0x00000000);
1451 nv_wo32(disp->mem, 0x1074, 0x00000000);
1452 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1453 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1455 pinstmem->flush(dev);
1457 /* push buffers for evo channels */
1458 disp->evo[0].ptr =
1459 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1460 if (!disp->evo[0].ptr) {
1461 ret = -ENOMEM;
1462 goto out;
1465 ret = nvd0_display_init(dev);
1466 if (ret)
1467 goto out;
1469 out:
1470 if (ret)
1471 nvd0_display_destroy(dev);
1472 return ret;