2 * Copyright (C) 2008 Maarten Maathuis.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28 #include <drm/drm_crtc_helper.h>
30 #include "nouveau_fbcon.h"
31 #include "dispnv04/hw.h"
32 #include "nouveau_crtc.h"
33 #include "nouveau_dma.h"
34 #include "nouveau_gem.h"
35 #include "nouveau_connector.h"
36 #include "nv50_display.h"
38 #include "nouveau_fence.h"
40 #include <engine/disp.h>
42 #include <core/class.h>
45 nouveau_display_vblank_handler(void *data
, u32 type
, int head
)
47 struct nouveau_drm
*drm
= data
;
48 drm_handle_vblank(drm
->dev
, head
);
49 return NVKM_EVENT_KEEP
;
53 nouveau_display_vblank_enable(struct drm_device
*dev
, int head
)
55 struct nouveau_display
*disp
= nouveau_display(dev
);
57 nouveau_event_get(disp
->vblank
[head
]);
64 nouveau_display_vblank_disable(struct drm_device
*dev
, int head
)
66 struct nouveau_display
*disp
= nouveau_display(dev
);
68 nouveau_event_put(disp
->vblank
[head
]);
72 calc(int blanks
, int blanke
, int total
, int line
)
74 if (blanke
>= blanks
) {
86 nouveau_display_scanoutpos_head(struct drm_crtc
*crtc
, int *vpos
, int *hpos
,
87 ktime_t
*stime
, ktime_t
*etime
)
89 const u32 mthd
= NV04_DISP_SCANOUTPOS
+ nouveau_crtc(crtc
)->index
;
90 struct nouveau_display
*disp
= nouveau_display(crtc
->dev
);
91 struct nv04_display_scanoutpos args
;
95 ret
= nv_exec(disp
->core
, mthd
, &args
, sizeof(args
));
100 ret
|= DRM_SCANOUTPOS_ACCURATE
;
101 ret
|= DRM_SCANOUTPOS_VALID
;
105 if (retry
) ndelay(crtc
->linedur_ns
);
109 *vpos
= calc(args
.vblanks
, args
.vblanke
, args
.vtotal
, args
.vline
);
110 if (stime
) *stime
= ns_to_ktime(args
.time
[0]);
111 if (etime
) *etime
= ns_to_ktime(args
.time
[1]);
114 ret
|= DRM_SCANOUTPOS_INVBL
;
119 nouveau_display_scanoutpos(struct drm_device
*dev
, int head
, unsigned int flags
,
120 int *vpos
, int *hpos
, ktime_t
*stime
, ktime_t
*etime
)
122 struct drm_crtc
*crtc
;
124 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
125 if (nouveau_crtc(crtc
)->index
== head
) {
126 return nouveau_display_scanoutpos_head(crtc
, vpos
, hpos
,
135 nouveau_display_vblstamp(struct drm_device
*dev
, int head
, int *max_error
,
136 struct timeval
*time
, unsigned flags
)
138 struct drm_crtc
*crtc
;
140 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
141 if (nouveau_crtc(crtc
)->index
== head
) {
142 return drm_calc_vbltimestamp_from_scanoutpos(dev
,
143 head
, max_error
, time
, flags
, crtc
,
152 nouveau_display_vblank_fini(struct drm_device
*dev
)
154 struct nouveau_display
*disp
= nouveau_display(dev
);
157 drm_vblank_cleanup(dev
);
160 for (i
= 0; i
< dev
->mode_config
.num_crtc
; i
++)
161 nouveau_event_ref(NULL
, &disp
->vblank
[i
]);
168 nouveau_display_vblank_init(struct drm_device
*dev
)
170 struct nouveau_display
*disp
= nouveau_display(dev
);
171 struct nouveau_drm
*drm
= nouveau_drm(dev
);
172 struct nouveau_disp
*pdisp
= nouveau_disp(drm
->device
);
175 disp
->vblank
= kzalloc(dev
->mode_config
.num_crtc
*
176 sizeof(*disp
->vblank
), GFP_KERNEL
);
180 for (i
= 0; i
< dev
->mode_config
.num_crtc
; i
++) {
181 ret
= nouveau_event_new(pdisp
->vblank
, 1, i
,
182 nouveau_display_vblank_handler
,
183 drm
, &disp
->vblank
[i
]);
185 nouveau_display_vblank_fini(dev
);
190 ret
= drm_vblank_init(dev
, dev
->mode_config
.num_crtc
);
192 nouveau_display_vblank_fini(dev
);
200 nouveau_user_framebuffer_destroy(struct drm_framebuffer
*drm_fb
)
202 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
205 drm_gem_object_unreference_unlocked(&fb
->nvbo
->gem
);
207 drm_framebuffer_cleanup(drm_fb
);
212 nouveau_user_framebuffer_create_handle(struct drm_framebuffer
*drm_fb
,
213 struct drm_file
*file_priv
,
214 unsigned int *handle
)
216 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
218 return drm_gem_handle_create(file_priv
, &fb
->nvbo
->gem
, handle
);
221 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs
= {
222 .destroy
= nouveau_user_framebuffer_destroy
,
223 .create_handle
= nouveau_user_framebuffer_create_handle
,
227 nouveau_framebuffer_init(struct drm_device
*dev
,
228 struct nouveau_framebuffer
*nv_fb
,
229 struct drm_mode_fb_cmd2
*mode_cmd
,
230 struct nouveau_bo
*nvbo
)
232 struct nouveau_drm
*drm
= nouveau_drm(dev
);
233 struct drm_framebuffer
*fb
= &nv_fb
->base
;
236 drm_helper_mode_fill_fb_struct(fb
, mode_cmd
);
239 if (nv_device(drm
->device
)->card_type
>= NV_50
) {
240 u32 tile_flags
= nouveau_bo_tile_layout(nvbo
);
241 if (tile_flags
== 0x7a00 ||
242 tile_flags
== 0xfe00)
243 nv_fb
->r_dma
= NvEvoFB32
;
245 if (tile_flags
== 0x7000)
246 nv_fb
->r_dma
= NvEvoFB16
;
248 nv_fb
->r_dma
= NvEvoVRAM_LP
;
251 case 8: nv_fb
->r_format
= 0x1e00; break;
252 case 15: nv_fb
->r_format
= 0xe900; break;
253 case 16: nv_fb
->r_format
= 0xe800; break;
255 case 32: nv_fb
->r_format
= 0xcf00; break;
256 case 30: nv_fb
->r_format
= 0xd100; break;
258 NV_ERROR(drm
, "unknown depth %d\n", fb
->depth
);
262 if (nvbo
->tile_flags
& NOUVEAU_GEM_TILE_NONCONTIG
) {
263 NV_ERROR(drm
, "framebuffer requires contiguous bo\n");
267 if (nv_device(drm
->device
)->chipset
== 0x50)
268 nv_fb
->r_format
|= (tile_flags
<< 8);
271 if (nv_device(drm
->device
)->card_type
< NV_D0
)
272 nv_fb
->r_pitch
= 0x00100000 | fb
->pitches
[0];
274 nv_fb
->r_pitch
= 0x01000000 | fb
->pitches
[0];
276 u32 mode
= nvbo
->tile_mode
;
277 if (nv_device(drm
->device
)->card_type
>= NV_C0
)
279 nv_fb
->r_pitch
= ((fb
->pitches
[0] / 4) << 4) | mode
;
283 ret
= drm_framebuffer_init(dev
, fb
, &nouveau_framebuffer_funcs
);
291 static struct drm_framebuffer
*
292 nouveau_user_framebuffer_create(struct drm_device
*dev
,
293 struct drm_file
*file_priv
,
294 struct drm_mode_fb_cmd2
*mode_cmd
)
296 struct nouveau_framebuffer
*nouveau_fb
;
297 struct drm_gem_object
*gem
;
300 gem
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
302 return ERR_PTR(-ENOENT
);
304 nouveau_fb
= kzalloc(sizeof(struct nouveau_framebuffer
), GFP_KERNEL
);
308 ret
= nouveau_framebuffer_init(dev
, nouveau_fb
, mode_cmd
, nouveau_gem_object(gem
));
312 return &nouveau_fb
->base
;
317 drm_gem_object_unreference(gem
);
321 static const struct drm_mode_config_funcs nouveau_mode_config_funcs
= {
322 .fb_create
= nouveau_user_framebuffer_create
,
323 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
327 struct nouveau_drm_prop_enum_list
{
333 static struct nouveau_drm_prop_enum_list underscan
[] = {
334 { 6, UNDERSCAN_AUTO
, "auto" },
335 { 6, UNDERSCAN_OFF
, "off" },
336 { 6, UNDERSCAN_ON
, "on" },
340 static struct nouveau_drm_prop_enum_list dither_mode
[] = {
341 { 7, DITHERING_MODE_AUTO
, "auto" },
342 { 7, DITHERING_MODE_OFF
, "off" },
343 { 1, DITHERING_MODE_ON
, "on" },
344 { 6, DITHERING_MODE_STATIC2X2
, "static 2x2" },
345 { 6, DITHERING_MODE_DYNAMIC2X2
, "dynamic 2x2" },
346 { 4, DITHERING_MODE_TEMPORAL
, "temporal" },
350 static struct nouveau_drm_prop_enum_list dither_depth
[] = {
351 { 6, DITHERING_DEPTH_AUTO
, "auto" },
352 { 6, DITHERING_DEPTH_6BPC
, "6 bpc" },
353 { 6, DITHERING_DEPTH_8BPC
, "8 bpc" },
357 #define PROP_ENUM(p,gen,n,list) do { \
358 struct nouveau_drm_prop_enum_list *l = (list); \
360 while (l->gen_mask) { \
361 if (l->gen_mask & (1 << (gen))) \
366 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
369 while (p && l->gen_mask) { \
370 if (l->gen_mask & (1 << (gen))) { \
371 drm_property_add_enum(p, c, l->type, l->name); \
380 nouveau_display_init(struct drm_device
*dev
)
382 struct nouveau_display
*disp
= nouveau_display(dev
);
383 struct drm_connector
*connector
;
386 ret
= disp
->init(dev
);
390 /* enable polling for external displays */
391 drm_kms_helper_poll_enable(dev
);
393 /* enable hotplug interrupts */
394 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
395 struct nouveau_connector
*conn
= nouveau_connector(connector
);
396 if (conn
->hpd
) nouveau_event_get(conn
->hpd
);
403 nouveau_display_fini(struct drm_device
*dev
)
405 struct nouveau_display
*disp
= nouveau_display(dev
);
406 struct drm_connector
*connector
;
408 /* disable hotplug interrupts */
409 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
410 struct nouveau_connector
*conn
= nouveau_connector(connector
);
411 if (conn
->hpd
) nouveau_event_put(conn
->hpd
);
414 drm_kms_helper_poll_disable(dev
);
419 nouveau_display_create(struct drm_device
*dev
)
421 struct nouveau_drm
*drm
= nouveau_drm(dev
);
422 struct nouveau_device
*device
= nouveau_dev(dev
);
423 struct nouveau_display
*disp
;
426 disp
= drm
->display
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
430 drm_mode_config_init(dev
);
431 drm_mode_create_scaling_mode_property(dev
);
432 drm_mode_create_dvi_i_properties(dev
);
434 if (nv_device(drm
->device
)->card_type
< NV_50
)
437 if (nv_device(drm
->device
)->card_type
< NV_D0
)
442 PROP_ENUM(disp
->dithering_mode
, gen
, "dithering mode", dither_mode
);
443 PROP_ENUM(disp
->dithering_depth
, gen
, "dithering depth", dither_depth
);
444 PROP_ENUM(disp
->underscan_property
, gen
, "underscan", underscan
);
446 disp
->underscan_hborder_property
=
447 drm_property_create_range(dev
, 0, "underscan hborder", 0, 128);
449 disp
->underscan_vborder_property
=
450 drm_property_create_range(dev
, 0, "underscan vborder", 0, 128);
454 disp
->vibrant_hue_property
=
455 drm_property_create_range(dev
, 0, "vibrant hue", 0, 180);
458 disp
->color_vibrance_property
=
459 drm_property_create_range(dev
, 0, "color vibrance", 0, 200);
462 dev
->mode_config
.funcs
= &nouveau_mode_config_funcs
;
463 dev
->mode_config
.fb_base
= nv_device_resource_start(device
, 1);
465 dev
->mode_config
.min_width
= 0;
466 dev
->mode_config
.min_height
= 0;
467 if (nv_device(drm
->device
)->card_type
< NV_10
) {
468 dev
->mode_config
.max_width
= 2048;
469 dev
->mode_config
.max_height
= 2048;
471 if (nv_device(drm
->device
)->card_type
< NV_50
) {
472 dev
->mode_config
.max_width
= 4096;
473 dev
->mode_config
.max_height
= 4096;
475 dev
->mode_config
.max_width
= 8192;
476 dev
->mode_config
.max_height
= 8192;
479 dev
->mode_config
.preferred_depth
= 24;
480 dev
->mode_config
.prefer_shadow
= 1;
482 if (nv_device(drm
->device
)->chipset
< 0x11)
483 dev
->mode_config
.async_page_flip
= false;
485 dev
->mode_config
.async_page_flip
= true;
487 drm_kms_helper_poll_init(dev
);
488 drm_kms_helper_poll_disable(dev
);
490 if (drm
->vbios
.dcb
.entries
) {
491 static const u16 oclass
[] = {
505 for (i
= 0, ret
= -ENODEV
; ret
&& i
< ARRAY_SIZE(oclass
); i
++) {
506 ret
= nouveau_object_new(nv_object(drm
), NVDRM_DEVICE
,
507 NVDRM_DISPLAY
, oclass
[i
],
508 NULL
, 0, &disp
->core
);
512 if (nv_mclass(disp
->core
) < NV50_DISP_CLASS
)
513 ret
= nv04_display_create(dev
);
515 ret
= nv50_display_create(dev
);
522 goto disp_create_err
;
524 if (dev
->mode_config
.num_crtc
) {
525 ret
= nouveau_display_vblank_init(dev
);
530 nouveau_backlight_init(dev
);
536 drm_kms_helper_poll_fini(dev
);
537 drm_mode_config_cleanup(dev
);
542 nouveau_display_destroy(struct drm_device
*dev
)
544 struct nouveau_display
*disp
= nouveau_display(dev
);
545 struct nouveau_drm
*drm
= nouveau_drm(dev
);
547 nouveau_backlight_exit(dev
);
548 nouveau_display_vblank_fini(dev
);
550 drm_kms_helper_poll_fini(dev
);
551 drm_mode_config_cleanup(dev
);
556 nouveau_object_del(nv_object(drm
), NVDRM_DEVICE
, NVDRM_DISPLAY
);
558 nouveau_drm(dev
)->display
= NULL
;
563 nouveau_display_suspend(struct drm_device
*dev
)
565 struct nouveau_drm
*drm
= nouveau_drm(dev
);
566 struct drm_crtc
*crtc
;
568 nouveau_display_fini(dev
);
570 NV_INFO(drm
, "unpinning framebuffer(s)...\n");
571 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
572 struct nouveau_framebuffer
*nouveau_fb
;
574 nouveau_fb
= nouveau_framebuffer(crtc
->primary
->fb
);
575 if (!nouveau_fb
|| !nouveau_fb
->nvbo
)
578 nouveau_bo_unpin(nouveau_fb
->nvbo
);
581 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
582 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
584 nouveau_bo_unmap(nv_crtc
->cursor
.nvbo
);
585 nouveau_bo_unpin(nv_crtc
->cursor
.nvbo
);
592 nouveau_display_repin(struct drm_device
*dev
)
594 struct nouveau_drm
*drm
= nouveau_drm(dev
);
595 struct drm_crtc
*crtc
;
598 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
599 struct nouveau_framebuffer
*nouveau_fb
;
601 nouveau_fb
= nouveau_framebuffer(crtc
->primary
->fb
);
602 if (!nouveau_fb
|| !nouveau_fb
->nvbo
)
605 nouveau_bo_pin(nouveau_fb
->nvbo
, TTM_PL_FLAG_VRAM
);
608 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
609 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
611 ret
= nouveau_bo_pin(nv_crtc
->cursor
.nvbo
, TTM_PL_FLAG_VRAM
);
613 ret
= nouveau_bo_map(nv_crtc
->cursor
.nvbo
);
615 NV_ERROR(drm
, "Could not pin/map cursor.\n");
620 nouveau_display_resume(struct drm_device
*dev
)
622 struct drm_crtc
*crtc
;
623 nouveau_display_init(dev
);
625 /* Force CLUT to get re-loaded during modeset */
626 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
627 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
629 nv_crtc
->lut
.depth
= 0;
632 drm_helper_resume_force_mode(dev
);
634 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
635 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
636 u32 offset
= nv_crtc
->cursor
.nvbo
->bo
.offset
;
638 nv_crtc
->cursor
.set_offset(nv_crtc
, offset
);
639 nv_crtc
->cursor
.set_pos(nv_crtc
, nv_crtc
->cursor_saved_x
,
640 nv_crtc
->cursor_saved_y
);
645 nouveau_page_flip_emit(struct nouveau_channel
*chan
,
646 struct nouveau_bo
*old_bo
,
647 struct nouveau_bo
*new_bo
,
648 struct nouveau_page_flip_state
*s
,
649 struct nouveau_fence
**pfence
)
651 struct nouveau_fence_chan
*fctx
= chan
->fence
;
652 struct nouveau_drm
*drm
= chan
->drm
;
653 struct drm_device
*dev
= drm
->dev
;
657 /* Queue it to the pending list */
658 spin_lock_irqsave(&dev
->event_lock
, flags
);
659 list_add_tail(&s
->head
, &fctx
->flip
);
660 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
662 /* Synchronize with the old framebuffer */
663 ret
= nouveau_fence_sync(old_bo
->bo
.sync_obj
, chan
);
667 /* Emit the pageflip */
668 ret
= RING_SPACE(chan
, 2);
672 if (nv_device(drm
->device
)->card_type
< NV_C0
)
673 BEGIN_NV04(chan
, NvSubSw
, NV_SW_PAGE_FLIP
, 1);
675 BEGIN_NVC0(chan
, FermiSw
, NV_SW_PAGE_FLIP
, 1);
676 OUT_RING (chan
, 0x00000000);
679 ret
= nouveau_fence_new(chan
, false, pfence
);
685 spin_lock_irqsave(&dev
->event_lock
, flags
);
687 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
692 nouveau_crtc_page_flip(struct drm_crtc
*crtc
, struct drm_framebuffer
*fb
,
693 struct drm_pending_vblank_event
*event
, u32 flags
)
695 const int swap_interval
= (flags
& DRM_MODE_PAGE_FLIP_ASYNC
) ? 0 : 1;
696 struct drm_device
*dev
= crtc
->dev
;
697 struct nouveau_drm
*drm
= nouveau_drm(dev
);
698 struct nouveau_bo
*old_bo
= nouveau_framebuffer(crtc
->primary
->fb
)->nvbo
;
699 struct nouveau_bo
*new_bo
= nouveau_framebuffer(fb
)->nvbo
;
700 struct nouveau_page_flip_state
*s
;
701 struct nouveau_channel
*chan
= drm
->channel
;
702 struct nouveau_fence
*fence
;
708 s
= kzalloc(sizeof(*s
), GFP_KERNEL
);
712 if (new_bo
!= old_bo
) {
713 ret
= nouveau_bo_pin(new_bo
, TTM_PL_FLAG_VRAM
);
718 mutex_lock(&chan
->cli
->mutex
);
720 /* synchronise rendering channel with the kernel's channel */
721 spin_lock(&new_bo
->bo
.bdev
->fence_lock
);
722 fence
= nouveau_fence_ref(new_bo
->bo
.sync_obj
);
723 spin_unlock(&new_bo
->bo
.bdev
->fence_lock
);
724 ret
= nouveau_fence_sync(fence
, chan
);
725 nouveau_fence_unref(&fence
);
729 ret
= ttm_bo_reserve(&old_bo
->bo
, true, false, false, NULL
);
733 /* Initialize a page flip struct */
734 *s
= (struct nouveau_page_flip_state
)
735 { { }, event
, nouveau_crtc(crtc
)->index
,
736 fb
->bits_per_pixel
, fb
->pitches
[0], crtc
->x
, crtc
->y
,
739 /* Keep vblanks on during flip, for the target crtc of this flip */
740 drm_vblank_get(dev
, nouveau_crtc(crtc
)->index
);
742 /* Emit a page flip */
743 if (nv_device(drm
->device
)->card_type
>= NV_50
) {
744 ret
= nv50_display_flip_next(crtc
, fb
, chan
, swap_interval
);
748 struct nv04_display
*dispnv04
= nv04_display(dev
);
749 int head
= nouveau_crtc(crtc
)->index
;
752 ret
= RING_SPACE(chan
, 8);
756 BEGIN_NV04(chan
, NvSubImageBlit
, 0x012c, 1);
758 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0134, 1);
759 OUT_RING (chan
, head
);
760 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0100, 1);
762 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0130, 1);
766 nouveau_bo_ref(new_bo
, &dispnv04
->image
[head
]);
769 ret
= nouveau_page_flip_emit(chan
, old_bo
, new_bo
, s
, &fence
);
772 mutex_unlock(&chan
->cli
->mutex
);
774 /* Update the crtc struct and cleanup */
775 crtc
->primary
->fb
= fb
;
777 nouveau_bo_fence(old_bo
, fence
);
778 ttm_bo_unreserve(&old_bo
->bo
);
779 if (old_bo
!= new_bo
)
780 nouveau_bo_unpin(old_bo
);
781 nouveau_fence_unref(&fence
);
785 drm_vblank_put(dev
, nouveau_crtc(crtc
)->index
);
786 ttm_bo_unreserve(&old_bo
->bo
);
788 mutex_unlock(&chan
->cli
->mutex
);
789 if (old_bo
!= new_bo
)
790 nouveau_bo_unpin(new_bo
);
797 nouveau_finish_page_flip(struct nouveau_channel
*chan
,
798 struct nouveau_page_flip_state
*ps
)
800 struct nouveau_fence_chan
*fctx
= chan
->fence
;
801 struct nouveau_drm
*drm
= chan
->drm
;
802 struct drm_device
*dev
= drm
->dev
;
803 struct nouveau_page_flip_state
*s
;
807 spin_lock_irqsave(&dev
->event_lock
, flags
);
809 if (list_empty(&fctx
->flip
)) {
810 NV_ERROR(drm
, "unexpected pageflip\n");
811 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
815 s
= list_first_entry(&fctx
->flip
, struct nouveau_page_flip_state
, head
);
817 /* Vblank timestamps/counts are only correct on >= NV-50 */
818 if (nv_device(drm
->device
)->card_type
>= NV_50
)
821 drm_send_vblank_event(dev
, crtcid
, s
->event
);
824 /* Give up ownership of vblank for page-flipped crtc */
825 drm_vblank_put(dev
, s
->crtc
);
832 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
837 nouveau_flip_complete(void *data
)
839 struct nouveau_channel
*chan
= data
;
840 struct nouveau_drm
*drm
= chan
->drm
;
841 struct nouveau_page_flip_state state
;
843 if (!nouveau_finish_page_flip(chan
, &state
)) {
844 if (nv_device(drm
->device
)->card_type
< NV_50
) {
845 nv_set_crtc_base(drm
->dev
, state
.crtc
, state
.offset
+
846 state
.y
* state
.pitch
+
847 state
.x
* state
.bpp
/ 8);
855 nouveau_display_dumb_create(struct drm_file
*file_priv
, struct drm_device
*dev
,
856 struct drm_mode_create_dumb
*args
)
858 struct nouveau_bo
*bo
;
861 args
->pitch
= roundup(args
->width
* (args
->bpp
/ 8), 256);
862 args
->size
= args
->pitch
* args
->height
;
863 args
->size
= roundup(args
->size
, PAGE_SIZE
);
865 ret
= nouveau_gem_new(dev
, args
->size
, 0, NOUVEAU_GEM_DOMAIN_VRAM
, 0, 0, &bo
);
869 ret
= drm_gem_handle_create(file_priv
, &bo
->gem
, &args
->handle
);
870 drm_gem_object_unreference_unlocked(&bo
->gem
);
875 nouveau_display_dumb_map_offset(struct drm_file
*file_priv
,
876 struct drm_device
*dev
,
877 uint32_t handle
, uint64_t *poffset
)
879 struct drm_gem_object
*gem
;
881 gem
= drm_gem_object_lookup(dev
, file_priv
, handle
);
883 struct nouveau_bo
*bo
= nouveau_gem_object(gem
);
884 *poffset
= drm_vma_node_offset_addr(&bo
->bo
.vma_node
);
885 drm_gem_object_unreference_unlocked(gem
);