PM / sleep: Asynchronous threads for suspend_noirq
[linux/fpc-iii.git] / drivers / gpu / drm / nouveau / nouveau_display.c
blob24011596af434276bbe1b9a82a26252229c20c5a
1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
30 #include "nouveau_fbcon.h"
31 #include "dispnv04/hw.h"
32 #include "nouveau_crtc.h"
33 #include "nouveau_dma.h"
34 #include "nouveau_gem.h"
35 #include "nouveau_connector.h"
36 #include "nv50_display.h"
38 #include "nouveau_fence.h"
40 #include <engine/disp.h>
42 #include <core/class.h>
44 static int
45 nouveau_display_vblank_handler(void *data, int head)
47 struct nouveau_drm *drm = data;
48 drm_handle_vblank(drm->dev, head);
49 return NVKM_EVENT_KEEP;
52 int
53 nouveau_display_vblank_enable(struct drm_device *dev, int head)
55 struct nouveau_display *disp = nouveau_display(dev);
56 if (disp) {
57 nouveau_event_get(disp->vblank[head]);
58 return 0;
60 return -EIO;
63 void
64 nouveau_display_vblank_disable(struct drm_device *dev, int head)
66 struct nouveau_display *disp = nouveau_display(dev);
67 if (disp)
68 nouveau_event_put(disp->vblank[head]);
71 static inline int
72 calc(int blanks, int blanke, int total, int line)
74 if (blanke >= blanks) {
75 if (line >= blanks)
76 line -= total;
77 } else {
78 if (line >= blanks)
79 line -= total;
80 line -= blanke + 1;
82 return line;
85 int
86 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos,
87 ktime_t *stime, ktime_t *etime)
89 const u32 mthd = NV04_DISP_SCANOUTPOS + nouveau_crtc(crtc)->index;
90 struct nouveau_display *disp = nouveau_display(crtc->dev);
91 struct nv04_display_scanoutpos args;
92 int ret, retry = 1;
94 do {
95 ret = nv_exec(disp->core, mthd, &args, sizeof(args));
96 if (ret != 0)
97 return 0;
99 if (args.vline) {
100 ret |= DRM_SCANOUTPOS_ACCURATE;
101 ret |= DRM_SCANOUTPOS_VALID;
102 break;
105 if (retry) ndelay(crtc->linedur_ns);
106 } while (retry--);
108 *hpos = calc(args.hblanks, args.hblanke, args.htotal, args.hline);
109 *vpos = calc(args.vblanks, args.vblanke, args.vtotal, args.vline);
110 if (stime) *stime = ns_to_ktime(args.time[0]);
111 if (etime) *etime = ns_to_ktime(args.time[1]);
113 if (*vpos < 0)
114 ret |= DRM_SCANOUTPOS_INVBL;
115 return ret;
119 nouveau_display_scanoutpos(struct drm_device *dev, int head, unsigned int flags,
120 int *vpos, int *hpos, ktime_t *stime, ktime_t *etime)
122 struct drm_crtc *crtc;
124 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
125 if (nouveau_crtc(crtc)->index == head) {
126 return nouveau_display_scanoutpos_head(crtc, vpos, hpos,
127 stime, etime);
131 return 0;
135 nouveau_display_vblstamp(struct drm_device *dev, int head, int *max_error,
136 struct timeval *time, unsigned flags)
138 struct drm_crtc *crtc;
140 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
141 if (nouveau_crtc(crtc)->index == head) {
142 return drm_calc_vbltimestamp_from_scanoutpos(dev,
143 head, max_error, time, flags, crtc,
144 &crtc->hwmode);
148 return -EINVAL;
151 static void
152 nouveau_display_vblank_fini(struct drm_device *dev)
154 struct nouveau_display *disp = nouveau_display(dev);
155 int i;
157 drm_vblank_cleanup(dev);
159 if (disp->vblank) {
160 for (i = 0; i < dev->mode_config.num_crtc; i++)
161 nouveau_event_ref(NULL, &disp->vblank[i]);
162 kfree(disp->vblank);
163 disp->vblank = NULL;
167 static int
168 nouveau_display_vblank_init(struct drm_device *dev)
170 struct nouveau_display *disp = nouveau_display(dev);
171 struct nouveau_drm *drm = nouveau_drm(dev);
172 struct nouveau_disp *pdisp = nouveau_disp(drm->device);
173 int ret, i;
175 disp->vblank = kzalloc(dev->mode_config.num_crtc *
176 sizeof(*disp->vblank), GFP_KERNEL);
177 if (!disp->vblank)
178 return -ENOMEM;
180 for (i = 0; i < dev->mode_config.num_crtc; i++) {
181 ret = nouveau_event_new(pdisp->vblank, i,
182 nouveau_display_vblank_handler,
183 drm, &disp->vblank[i]);
184 if (ret) {
185 nouveau_display_vblank_fini(dev);
186 return ret;
190 ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
191 if (ret) {
192 nouveau_display_vblank_fini(dev);
193 return ret;
196 return 0;
199 static void
200 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
202 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
204 if (fb->nvbo)
205 drm_gem_object_unreference_unlocked(&fb->nvbo->gem);
207 drm_framebuffer_cleanup(drm_fb);
208 kfree(fb);
211 static int
212 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
213 struct drm_file *file_priv,
214 unsigned int *handle)
216 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
218 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle);
221 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
222 .destroy = nouveau_user_framebuffer_destroy,
223 .create_handle = nouveau_user_framebuffer_create_handle,
227 nouveau_framebuffer_init(struct drm_device *dev,
228 struct nouveau_framebuffer *nv_fb,
229 struct drm_mode_fb_cmd2 *mode_cmd,
230 struct nouveau_bo *nvbo)
232 struct nouveau_drm *drm = nouveau_drm(dev);
233 struct drm_framebuffer *fb = &nv_fb->base;
234 int ret;
236 drm_helper_mode_fill_fb_struct(fb, mode_cmd);
237 nv_fb->nvbo = nvbo;
239 if (nv_device(drm->device)->card_type >= NV_50) {
240 u32 tile_flags = nouveau_bo_tile_layout(nvbo);
241 if (tile_flags == 0x7a00 ||
242 tile_flags == 0xfe00)
243 nv_fb->r_dma = NvEvoFB32;
244 else
245 if (tile_flags == 0x7000)
246 nv_fb->r_dma = NvEvoFB16;
247 else
248 nv_fb->r_dma = NvEvoVRAM_LP;
250 switch (fb->depth) {
251 case 8: nv_fb->r_format = 0x1e00; break;
252 case 15: nv_fb->r_format = 0xe900; break;
253 case 16: nv_fb->r_format = 0xe800; break;
254 case 24:
255 case 32: nv_fb->r_format = 0xcf00; break;
256 case 30: nv_fb->r_format = 0xd100; break;
257 default:
258 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
259 return -EINVAL;
262 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) {
263 NV_ERROR(drm, "framebuffer requires contiguous bo\n");
264 return -EINVAL;
267 if (nv_device(drm->device)->chipset == 0x50)
268 nv_fb->r_format |= (tile_flags << 8);
270 if (!tile_flags) {
271 if (nv_device(drm->device)->card_type < NV_D0)
272 nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
273 else
274 nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
275 } else {
276 u32 mode = nvbo->tile_mode;
277 if (nv_device(drm->device)->card_type >= NV_C0)
278 mode >>= 4;
279 nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
283 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
284 if (ret) {
285 return ret;
288 return 0;
291 static struct drm_framebuffer *
292 nouveau_user_framebuffer_create(struct drm_device *dev,
293 struct drm_file *file_priv,
294 struct drm_mode_fb_cmd2 *mode_cmd)
296 struct nouveau_framebuffer *nouveau_fb;
297 struct drm_gem_object *gem;
298 int ret = -ENOMEM;
300 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
301 if (!gem)
302 return ERR_PTR(-ENOENT);
304 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
305 if (!nouveau_fb)
306 goto err_unref;
308 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
309 if (ret)
310 goto err;
312 return &nouveau_fb->base;
314 err:
315 kfree(nouveau_fb);
316 err_unref:
317 drm_gem_object_unreference(gem);
318 return ERR_PTR(ret);
321 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
322 .fb_create = nouveau_user_framebuffer_create,
323 .output_poll_changed = nouveau_fbcon_output_poll_changed,
327 struct nouveau_drm_prop_enum_list {
328 u8 gen_mask;
329 int type;
330 char *name;
333 static struct nouveau_drm_prop_enum_list underscan[] = {
334 { 6, UNDERSCAN_AUTO, "auto" },
335 { 6, UNDERSCAN_OFF, "off" },
336 { 6, UNDERSCAN_ON, "on" },
340 static struct nouveau_drm_prop_enum_list dither_mode[] = {
341 { 7, DITHERING_MODE_AUTO, "auto" },
342 { 7, DITHERING_MODE_OFF, "off" },
343 { 1, DITHERING_MODE_ON, "on" },
344 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
345 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
346 { 4, DITHERING_MODE_TEMPORAL, "temporal" },
350 static struct nouveau_drm_prop_enum_list dither_depth[] = {
351 { 6, DITHERING_DEPTH_AUTO, "auto" },
352 { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
353 { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
357 #define PROP_ENUM(p,gen,n,list) do { \
358 struct nouveau_drm_prop_enum_list *l = (list); \
359 int c = 0; \
360 while (l->gen_mask) { \
361 if (l->gen_mask & (1 << (gen))) \
362 c++; \
363 l++; \
365 if (c) { \
366 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
367 l = (list); \
368 c = 0; \
369 while (p && l->gen_mask) { \
370 if (l->gen_mask & (1 << (gen))) { \
371 drm_property_add_enum(p, c, l->type, l->name); \
372 c++; \
374 l++; \
377 } while(0)
380 nouveau_display_init(struct drm_device *dev)
382 struct nouveau_display *disp = nouveau_display(dev);
383 struct drm_connector *connector;
384 int ret;
386 ret = disp->init(dev);
387 if (ret)
388 return ret;
390 /* enable polling for external displays */
391 drm_kms_helper_poll_enable(dev);
393 /* enable hotplug interrupts */
394 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
395 struct nouveau_connector *conn = nouveau_connector(connector);
396 if (conn->hpd_func) nouveau_event_get(conn->hpd_func);
399 return ret;
402 void
403 nouveau_display_fini(struct drm_device *dev)
405 struct nouveau_display *disp = nouveau_display(dev);
406 struct drm_connector *connector;
408 /* disable hotplug interrupts */
409 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
410 struct nouveau_connector *conn = nouveau_connector(connector);
411 if (conn->hpd_func) nouveau_event_put(conn->hpd_func);
414 drm_kms_helper_poll_disable(dev);
415 disp->fini(dev);
419 nouveau_display_create(struct drm_device *dev)
421 struct nouveau_drm *drm = nouveau_drm(dev);
422 struct nouveau_display *disp;
423 int ret, gen;
425 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
426 if (!disp)
427 return -ENOMEM;
429 drm_mode_config_init(dev);
430 drm_mode_create_scaling_mode_property(dev);
431 drm_mode_create_dvi_i_properties(dev);
433 if (nv_device(drm->device)->card_type < NV_50)
434 gen = 0;
435 else
436 if (nv_device(drm->device)->card_type < NV_D0)
437 gen = 1;
438 else
439 gen = 2;
441 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
442 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
443 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
445 disp->underscan_hborder_property =
446 drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
448 disp->underscan_vborder_property =
449 drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
451 if (gen >= 1) {
452 /* -90..+90 */
453 disp->vibrant_hue_property =
454 drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
456 /* -100..+100 */
457 disp->color_vibrance_property =
458 drm_property_create_range(dev, 0, "color vibrance", 0, 200);
461 dev->mode_config.funcs = &nouveau_mode_config_funcs;
462 dev->mode_config.fb_base = pci_resource_start(dev->pdev, 1);
464 dev->mode_config.min_width = 0;
465 dev->mode_config.min_height = 0;
466 if (nv_device(drm->device)->card_type < NV_10) {
467 dev->mode_config.max_width = 2048;
468 dev->mode_config.max_height = 2048;
469 } else
470 if (nv_device(drm->device)->card_type < NV_50) {
471 dev->mode_config.max_width = 4096;
472 dev->mode_config.max_height = 4096;
473 } else {
474 dev->mode_config.max_width = 8192;
475 dev->mode_config.max_height = 8192;
478 dev->mode_config.preferred_depth = 24;
479 dev->mode_config.prefer_shadow = 1;
481 if (nv_device(drm->device)->chipset < 0x11)
482 dev->mode_config.async_page_flip = false;
483 else
484 dev->mode_config.async_page_flip = true;
486 drm_kms_helper_poll_init(dev);
487 drm_kms_helper_poll_disable(dev);
489 if (drm->vbios.dcb.entries) {
490 static const u16 oclass[] = {
491 NVF0_DISP_CLASS,
492 NVE0_DISP_CLASS,
493 NVD0_DISP_CLASS,
494 NVA3_DISP_CLASS,
495 NV94_DISP_CLASS,
496 NVA0_DISP_CLASS,
497 NV84_DISP_CLASS,
498 NV50_DISP_CLASS,
499 NV04_DISP_CLASS,
501 int i;
503 for (i = 0, ret = -ENODEV; ret && i < ARRAY_SIZE(oclass); i++) {
504 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
505 NVDRM_DISPLAY, oclass[i],
506 NULL, 0, &disp->core);
509 if (ret == 0) {
510 if (nv_mclass(disp->core) < NV50_DISP_CLASS)
511 ret = nv04_display_create(dev);
512 else
513 ret = nv50_display_create(dev);
515 } else {
516 ret = 0;
519 if (ret)
520 goto disp_create_err;
522 if (dev->mode_config.num_crtc) {
523 ret = nouveau_display_vblank_init(dev);
524 if (ret)
525 goto vblank_err;
528 nouveau_backlight_init(dev);
529 return 0;
531 vblank_err:
532 disp->dtor(dev);
533 disp_create_err:
534 drm_kms_helper_poll_fini(dev);
535 drm_mode_config_cleanup(dev);
536 return ret;
539 void
540 nouveau_display_destroy(struct drm_device *dev)
542 struct nouveau_display *disp = nouveau_display(dev);
543 struct nouveau_drm *drm = nouveau_drm(dev);
545 nouveau_backlight_exit(dev);
546 nouveau_display_vblank_fini(dev);
548 drm_kms_helper_poll_fini(dev);
549 drm_mode_config_cleanup(dev);
551 if (disp->dtor)
552 disp->dtor(dev);
554 nouveau_object_del(nv_object(drm), NVDRM_DEVICE, NVDRM_DISPLAY);
556 nouveau_drm(dev)->display = NULL;
557 kfree(disp);
561 nouveau_display_suspend(struct drm_device *dev)
563 struct nouveau_drm *drm = nouveau_drm(dev);
564 struct drm_crtc *crtc;
566 nouveau_display_fini(dev);
568 NV_INFO(drm, "unpinning framebuffer(s)...\n");
569 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
570 struct nouveau_framebuffer *nouveau_fb;
572 nouveau_fb = nouveau_framebuffer(crtc->fb);
573 if (!nouveau_fb || !nouveau_fb->nvbo)
574 continue;
576 nouveau_bo_unpin(nouveau_fb->nvbo);
579 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
580 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
582 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
583 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
586 return 0;
589 void
590 nouveau_display_repin(struct drm_device *dev)
592 struct nouveau_drm *drm = nouveau_drm(dev);
593 struct drm_crtc *crtc;
594 int ret;
596 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
597 struct nouveau_framebuffer *nouveau_fb;
599 nouveau_fb = nouveau_framebuffer(crtc->fb);
600 if (!nouveau_fb || !nouveau_fb->nvbo)
601 continue;
603 nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM);
606 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
607 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
609 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
610 if (!ret)
611 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
612 if (ret)
613 NV_ERROR(drm, "Could not pin/map cursor.\n");
617 void
618 nouveau_display_resume(struct drm_device *dev)
620 struct drm_crtc *crtc;
621 nouveau_display_init(dev);
623 /* Force CLUT to get re-loaded during modeset */
624 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
625 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
627 nv_crtc->lut.depth = 0;
630 drm_helper_resume_force_mode(dev);
632 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
633 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
634 u32 offset = nv_crtc->cursor.nvbo->bo.offset;
636 nv_crtc->cursor.set_offset(nv_crtc, offset);
637 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x,
638 nv_crtc->cursor_saved_y);
642 static int
643 nouveau_page_flip_emit(struct nouveau_channel *chan,
644 struct nouveau_bo *old_bo,
645 struct nouveau_bo *new_bo,
646 struct nouveau_page_flip_state *s,
647 struct nouveau_fence **pfence)
649 struct nouveau_fence_chan *fctx = chan->fence;
650 struct nouveau_drm *drm = chan->drm;
651 struct drm_device *dev = drm->dev;
652 unsigned long flags;
653 int ret;
655 /* Queue it to the pending list */
656 spin_lock_irqsave(&dev->event_lock, flags);
657 list_add_tail(&s->head, &fctx->flip);
658 spin_unlock_irqrestore(&dev->event_lock, flags);
660 /* Synchronize with the old framebuffer */
661 ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
662 if (ret)
663 goto fail;
665 /* Emit the pageflip */
666 ret = RING_SPACE(chan, 2);
667 if (ret)
668 goto fail;
670 if (nv_device(drm->device)->card_type < NV_C0)
671 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
672 else
673 BEGIN_NVC0(chan, FermiSw, NV_SW_PAGE_FLIP, 1);
674 OUT_RING (chan, 0x00000000);
675 FIRE_RING (chan);
677 ret = nouveau_fence_new(chan, false, pfence);
678 if (ret)
679 goto fail;
681 return 0;
682 fail:
683 spin_lock_irqsave(&dev->event_lock, flags);
684 list_del(&s->head);
685 spin_unlock_irqrestore(&dev->event_lock, flags);
686 return ret;
690 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
691 struct drm_pending_vblank_event *event, u32 flags)
693 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1;
694 struct drm_device *dev = crtc->dev;
695 struct nouveau_drm *drm = nouveau_drm(dev);
696 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->fb)->nvbo;
697 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
698 struct nouveau_page_flip_state *s;
699 struct nouveau_channel *chan = drm->channel;
700 struct nouveau_fence *fence;
701 int ret;
703 if (!drm->channel)
704 return -ENODEV;
706 s = kzalloc(sizeof(*s), GFP_KERNEL);
707 if (!s)
708 return -ENOMEM;
710 if (new_bo != old_bo) {
711 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
712 if (ret)
713 goto fail_free;
716 mutex_lock(&chan->cli->mutex);
718 /* synchronise rendering channel with the kernel's channel */
719 spin_lock(&new_bo->bo.bdev->fence_lock);
720 fence = nouveau_fence_ref(new_bo->bo.sync_obj);
721 spin_unlock(&new_bo->bo.bdev->fence_lock);
722 ret = nouveau_fence_sync(fence, chan);
723 nouveau_fence_unref(&fence);
724 if (ret)
725 goto fail_unpin;
727 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL);
728 if (ret)
729 goto fail_unpin;
731 /* Initialize a page flip struct */
732 *s = (struct nouveau_page_flip_state)
733 { { }, event, nouveau_crtc(crtc)->index,
734 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
735 new_bo->bo.offset };
737 /* Emit a page flip */
738 if (nv_device(drm->device)->card_type >= NV_50) {
739 ret = nv50_display_flip_next(crtc, fb, chan, swap_interval);
740 if (ret)
741 goto fail_unreserve;
742 } else {
743 struct nv04_display *dispnv04 = nv04_display(dev);
744 int head = nouveau_crtc(crtc)->index;
746 if (swap_interval) {
747 ret = RING_SPACE(chan, 8);
748 if (ret)
749 goto fail_unreserve;
751 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1);
752 OUT_RING (chan, 0);
753 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1);
754 OUT_RING (chan, head);
755 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1);
756 OUT_RING (chan, 0);
757 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1);
758 OUT_RING (chan, 0);
761 nouveau_bo_ref(new_bo, &dispnv04->image[head]);
764 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
765 mutex_unlock(&chan->cli->mutex);
766 if (ret)
767 goto fail_unreserve;
769 /* Update the crtc struct and cleanup */
770 crtc->fb = fb;
772 nouveau_bo_fence(old_bo, fence);
773 ttm_bo_unreserve(&old_bo->bo);
774 if (old_bo != new_bo)
775 nouveau_bo_unpin(old_bo);
776 nouveau_fence_unref(&fence);
777 return 0;
779 fail_unreserve:
780 ttm_bo_unreserve(&old_bo->bo);
781 fail_unpin:
782 mutex_unlock(&chan->cli->mutex);
783 if (old_bo != new_bo)
784 nouveau_bo_unpin(new_bo);
785 fail_free:
786 kfree(s);
787 return ret;
791 nouveau_finish_page_flip(struct nouveau_channel *chan,
792 struct nouveau_page_flip_state *ps)
794 struct nouveau_fence_chan *fctx = chan->fence;
795 struct nouveau_drm *drm = chan->drm;
796 struct drm_device *dev = drm->dev;
797 struct nouveau_page_flip_state *s;
798 unsigned long flags;
800 spin_lock_irqsave(&dev->event_lock, flags);
802 if (list_empty(&fctx->flip)) {
803 NV_ERROR(drm, "unexpected pageflip\n");
804 spin_unlock_irqrestore(&dev->event_lock, flags);
805 return -EINVAL;
808 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head);
809 if (s->event)
810 drm_send_vblank_event(dev, s->crtc, s->event);
812 list_del(&s->head);
813 if (ps)
814 *ps = *s;
815 kfree(s);
817 spin_unlock_irqrestore(&dev->event_lock, flags);
818 return 0;
822 nouveau_flip_complete(void *data)
824 struct nouveau_channel *chan = data;
825 struct nouveau_drm *drm = chan->drm;
826 struct nouveau_page_flip_state state;
828 if (!nouveau_finish_page_flip(chan, &state)) {
829 if (nv_device(drm->device)->card_type < NV_50) {
830 nv_set_crtc_base(drm->dev, state.crtc, state.offset +
831 state.y * state.pitch +
832 state.x * state.bpp / 8);
836 return 0;
840 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
841 struct drm_mode_create_dumb *args)
843 struct nouveau_bo *bo;
844 int ret;
846 args->pitch = roundup(args->width * (args->bpp / 8), 256);
847 args->size = args->pitch * args->height;
848 args->size = roundup(args->size, PAGE_SIZE);
850 ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
851 if (ret)
852 return ret;
854 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle);
855 drm_gem_object_unreference_unlocked(&bo->gem);
856 return ret;
860 nouveau_display_dumb_map_offset(struct drm_file *file_priv,
861 struct drm_device *dev,
862 uint32_t handle, uint64_t *poffset)
864 struct drm_gem_object *gem;
866 gem = drm_gem_object_lookup(dev, file_priv, handle);
867 if (gem) {
868 struct nouveau_bo *bo = nouveau_gem_object(gem);
869 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node);
870 drm_gem_object_unreference_unlocked(gem);
871 return 0;
874 return -ENOENT;