2 * Copyright (C) 2008 Maarten Maathuis.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28 #include <drm/drm_crtc_helper.h>
30 #include <nvif/class.h>
32 #include "nouveau_fbcon.h"
33 #include "dispnv04/hw.h"
34 #include "nouveau_crtc.h"
35 #include "nouveau_dma.h"
36 #include "nouveau_gem.h"
37 #include "nouveau_connector.h"
38 #include "nv50_display.h"
40 #include "nouveau_fence.h"
42 #include <nvif/cl0046.h>
43 #include <nvif/event.h>
46 nouveau_display_vblank_handler(struct nvif_notify
*notify
)
48 struct nouveau_crtc
*nv_crtc
=
49 container_of(notify
, typeof(*nv_crtc
), vblank
);
50 drm_handle_vblank(nv_crtc
->base
.dev
, nv_crtc
->index
);
51 return NVIF_NOTIFY_KEEP
;
55 nouveau_display_vblank_enable(struct drm_device
*dev
, unsigned int pipe
)
57 struct drm_crtc
*crtc
;
58 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
59 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
60 if (nv_crtc
->index
== pipe
) {
61 nvif_notify_get(&nv_crtc
->vblank
);
69 nouveau_display_vblank_disable(struct drm_device
*dev
, unsigned int pipe
)
71 struct drm_crtc
*crtc
;
72 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
73 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
74 if (nv_crtc
->index
== pipe
) {
75 nvif_notify_put(&nv_crtc
->vblank
);
82 calc(int blanks
, int blanke
, int total
, int line
)
84 if (blanke
>= blanks
) {
96 nouveau_display_scanoutpos_head(struct drm_crtc
*crtc
, int *vpos
, int *hpos
,
97 ktime_t
*stime
, ktime_t
*etime
)
100 struct nv04_disp_mthd_v0 base
;
101 struct nv04_disp_scanoutpos_v0 scan
;
103 .base
.method
= NV04_DISP_SCANOUTPOS
,
104 .base
.head
= nouveau_crtc(crtc
)->index
,
106 struct nouveau_display
*disp
= nouveau_display(crtc
->dev
);
107 struct drm_vblank_crtc
*vblank
= &crtc
->dev
->vblank
[drm_crtc_index(crtc
)];
111 ret
= nvif_mthd(&disp
->disp
, 0, &args
, sizeof(args
));
115 if (args
.scan
.vline
) {
116 ret
|= DRM_SCANOUTPOS_ACCURATE
;
117 ret
|= DRM_SCANOUTPOS_VALID
;
121 if (retry
) ndelay(vblank
->linedur_ns
);
124 *hpos
= args
.scan
.hline
;
125 *vpos
= calc(args
.scan
.vblanks
, args
.scan
.vblanke
,
126 args
.scan
.vtotal
, args
.scan
.vline
);
127 if (stime
) *stime
= ns_to_ktime(args
.scan
.time
[0]);
128 if (etime
) *etime
= ns_to_ktime(args
.scan
.time
[1]);
131 ret
|= DRM_SCANOUTPOS_IN_VBLANK
;
136 nouveau_display_scanoutpos(struct drm_device
*dev
, unsigned int pipe
,
137 unsigned int flags
, int *vpos
, int *hpos
,
138 ktime_t
*stime
, ktime_t
*etime
,
139 const struct drm_display_mode
*mode
)
141 struct drm_crtc
*crtc
;
143 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
144 if (nouveau_crtc(crtc
)->index
== pipe
) {
145 return nouveau_display_scanoutpos_head(crtc
, vpos
, hpos
,
154 nouveau_display_vblstamp(struct drm_device
*dev
, unsigned int pipe
,
155 int *max_error
, struct timeval
*time
, unsigned flags
)
157 struct drm_crtc
*crtc
;
159 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
160 if (nouveau_crtc(crtc
)->index
== pipe
) {
161 return drm_calc_vbltimestamp_from_scanoutpos(dev
,
162 pipe
, max_error
, time
, flags
,
171 nouveau_display_vblank_fini(struct drm_device
*dev
)
173 struct drm_crtc
*crtc
;
175 drm_vblank_cleanup(dev
);
177 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
178 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
179 nvif_notify_fini(&nv_crtc
->vblank
);
184 nouveau_display_vblank_init(struct drm_device
*dev
)
186 struct nouveau_display
*disp
= nouveau_display(dev
);
187 struct drm_crtc
*crtc
;
190 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
191 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
192 ret
= nvif_notify_init(&disp
->disp
,
193 nouveau_display_vblank_handler
, false,
194 NV04_DISP_NTFY_VBLANK
,
195 &(struct nvif_notify_head_req_v0
) {
196 .head
= nv_crtc
->index
,
198 sizeof(struct nvif_notify_head_req_v0
),
199 sizeof(struct nvif_notify_head_rep_v0
),
202 nouveau_display_vblank_fini(dev
);
207 ret
= drm_vblank_init(dev
, dev
->mode_config
.num_crtc
);
209 nouveau_display_vblank_fini(dev
);
217 nouveau_user_framebuffer_destroy(struct drm_framebuffer
*drm_fb
)
219 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
220 struct nouveau_display
*disp
= nouveau_display(drm_fb
->dev
);
223 disp
->fb_dtor(drm_fb
);
226 drm_gem_object_unreference_unlocked(&fb
->nvbo
->gem
);
228 drm_framebuffer_cleanup(drm_fb
);
233 nouveau_user_framebuffer_create_handle(struct drm_framebuffer
*drm_fb
,
234 struct drm_file
*file_priv
,
235 unsigned int *handle
)
237 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
239 return drm_gem_handle_create(file_priv
, &fb
->nvbo
->gem
, handle
);
242 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs
= {
243 .destroy
= nouveau_user_framebuffer_destroy
,
244 .create_handle
= nouveau_user_framebuffer_create_handle
,
248 nouveau_framebuffer_init(struct drm_device
*dev
,
249 struct nouveau_framebuffer
*nv_fb
,
250 const struct drm_mode_fb_cmd2
*mode_cmd
,
251 struct nouveau_bo
*nvbo
)
253 struct nouveau_display
*disp
= nouveau_display(dev
);
254 struct drm_framebuffer
*fb
= &nv_fb
->base
;
257 drm_helper_mode_fill_fb_struct(fb
, mode_cmd
);
260 ret
= drm_framebuffer_init(dev
, fb
, &nouveau_framebuffer_funcs
);
265 ret
= disp
->fb_ctor(fb
);
273 static struct drm_framebuffer
*
274 nouveau_user_framebuffer_create(struct drm_device
*dev
,
275 struct drm_file
*file_priv
,
276 const struct drm_mode_fb_cmd2
*mode_cmd
)
278 struct nouveau_framebuffer
*nouveau_fb
;
279 struct drm_gem_object
*gem
;
282 gem
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
284 return ERR_PTR(-ENOENT
);
286 nouveau_fb
= kzalloc(sizeof(struct nouveau_framebuffer
), GFP_KERNEL
);
290 ret
= nouveau_framebuffer_init(dev
, nouveau_fb
, mode_cmd
, nouveau_gem_object(gem
));
294 return &nouveau_fb
->base
;
299 drm_gem_object_unreference(gem
);
303 static const struct drm_mode_config_funcs nouveau_mode_config_funcs
= {
304 .fb_create
= nouveau_user_framebuffer_create
,
305 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
309 struct nouveau_drm_prop_enum_list
{
315 static struct nouveau_drm_prop_enum_list underscan
[] = {
316 { 6, UNDERSCAN_AUTO
, "auto" },
317 { 6, UNDERSCAN_OFF
, "off" },
318 { 6, UNDERSCAN_ON
, "on" },
322 static struct nouveau_drm_prop_enum_list dither_mode
[] = {
323 { 7, DITHERING_MODE_AUTO
, "auto" },
324 { 7, DITHERING_MODE_OFF
, "off" },
325 { 1, DITHERING_MODE_ON
, "on" },
326 { 6, DITHERING_MODE_STATIC2X2
, "static 2x2" },
327 { 6, DITHERING_MODE_DYNAMIC2X2
, "dynamic 2x2" },
328 { 4, DITHERING_MODE_TEMPORAL
, "temporal" },
332 static struct nouveau_drm_prop_enum_list dither_depth
[] = {
333 { 6, DITHERING_DEPTH_AUTO
, "auto" },
334 { 6, DITHERING_DEPTH_6BPC
, "6 bpc" },
335 { 6, DITHERING_DEPTH_8BPC
, "8 bpc" },
339 #define PROP_ENUM(p,gen,n,list) do { \
340 struct nouveau_drm_prop_enum_list *l = (list); \
342 while (l->gen_mask) { \
343 if (l->gen_mask & (1 << (gen))) \
348 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
351 while (p && l->gen_mask) { \
352 if (l->gen_mask & (1 << (gen))) { \
353 drm_property_add_enum(p, c, l->type, l->name); \
362 nouveau_display_init(struct drm_device
*dev
)
364 struct nouveau_display
*disp
= nouveau_display(dev
);
365 struct nouveau_drm
*drm
= nouveau_drm(dev
);
366 struct drm_connector
*connector
;
369 ret
= disp
->init(dev
);
373 /* enable polling for external displays */
374 drm_kms_helper_poll_enable(dev
);
376 /* enable hotplug interrupts */
377 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
378 struct nouveau_connector
*conn
= nouveau_connector(connector
);
379 nvif_notify_get(&conn
->hpd
);
382 /* enable flip completion events */
383 nvif_notify_get(&drm
->flip
);
388 nouveau_display_fini(struct drm_device
*dev
)
390 struct nouveau_display
*disp
= nouveau_display(dev
);
391 struct nouveau_drm
*drm
= nouveau_drm(dev
);
392 struct drm_connector
*connector
;
395 /* Make sure that drm and hw vblank irqs get properly disabled. */
396 for (head
= 0; head
< dev
->mode_config
.num_crtc
; head
++)
397 drm_vblank_off(dev
, head
);
399 /* disable flip completion events */
400 nvif_notify_put(&drm
->flip
);
402 /* disable hotplug interrupts */
403 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
404 struct nouveau_connector
*conn
= nouveau_connector(connector
);
405 nvif_notify_put(&conn
->hpd
);
408 drm_kms_helper_poll_disable(dev
);
413 nouveau_display_create_properties(struct drm_device
*dev
)
415 struct nouveau_display
*disp
= nouveau_display(dev
);
418 if (disp
->disp
.oclass
< NV50_DISP
)
421 if (disp
->disp
.oclass
< GF110_DISP
)
426 PROP_ENUM(disp
->dithering_mode
, gen
, "dithering mode", dither_mode
);
427 PROP_ENUM(disp
->dithering_depth
, gen
, "dithering depth", dither_depth
);
428 PROP_ENUM(disp
->underscan_property
, gen
, "underscan", underscan
);
430 disp
->underscan_hborder_property
=
431 drm_property_create_range(dev
, 0, "underscan hborder", 0, 128);
433 disp
->underscan_vborder_property
=
434 drm_property_create_range(dev
, 0, "underscan vborder", 0, 128);
440 disp
->vibrant_hue_property
=
441 drm_property_create_range(dev
, 0, "vibrant hue", 0, 180);
444 disp
->color_vibrance_property
=
445 drm_property_create_range(dev
, 0, "color vibrance", 0, 200);
449 nouveau_display_create(struct drm_device
*dev
)
451 struct nouveau_drm
*drm
= nouveau_drm(dev
);
452 struct nvkm_device
*device
= nvxx_device(&drm
->device
);
453 struct nouveau_display
*disp
;
456 disp
= drm
->display
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
460 drm_mode_config_init(dev
);
461 drm_mode_create_scaling_mode_property(dev
);
462 drm_mode_create_dvi_i_properties(dev
);
464 dev
->mode_config
.funcs
= &nouveau_mode_config_funcs
;
465 dev
->mode_config
.fb_base
= device
->func
->resource_addr(device
, 1);
467 dev
->mode_config
.min_width
= 0;
468 dev
->mode_config
.min_height
= 0;
469 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_CELSIUS
) {
470 dev
->mode_config
.max_width
= 2048;
471 dev
->mode_config
.max_height
= 2048;
473 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_TESLA
) {
474 dev
->mode_config
.max_width
= 4096;
475 dev
->mode_config
.max_height
= 4096;
477 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_FERMI
) {
478 dev
->mode_config
.max_width
= 8192;
479 dev
->mode_config
.max_height
= 8192;
481 dev
->mode_config
.max_width
= 16384;
482 dev
->mode_config
.max_height
= 16384;
485 dev
->mode_config
.preferred_depth
= 24;
486 dev
->mode_config
.prefer_shadow
= 1;
488 if (drm
->device
.info
.chipset
< 0x11)
489 dev
->mode_config
.async_page_flip
= false;
491 dev
->mode_config
.async_page_flip
= true;
493 drm_kms_helper_poll_init(dev
);
494 drm_kms_helper_poll_disable(dev
);
496 if (nouveau_modeset
!= 2 && drm
->vbios
.dcb
.entries
) {
497 static const u16 oclass
[] = {
512 for (i
= 0, ret
= -ENODEV
; ret
&& i
< ARRAY_SIZE(oclass
); i
++) {
513 ret
= nvif_object_init(&drm
->device
.object
, 0,
514 oclass
[i
], NULL
, 0, &disp
->disp
);
518 nouveau_display_create_properties(dev
);
519 if (disp
->disp
.oclass
< NV50_DISP
)
520 ret
= nv04_display_create(dev
);
522 ret
= nv50_display_create(dev
);
529 goto disp_create_err
;
531 if (dev
->mode_config
.num_crtc
) {
532 ret
= nouveau_display_vblank_init(dev
);
537 nouveau_backlight_init(dev
);
543 drm_kms_helper_poll_fini(dev
);
544 drm_mode_config_cleanup(dev
);
549 nouveau_display_destroy(struct drm_device
*dev
)
551 struct nouveau_display
*disp
= nouveau_display(dev
);
553 nouveau_backlight_exit(dev
);
554 nouveau_display_vblank_fini(dev
);
556 drm_kms_helper_poll_fini(dev
);
557 drm_mode_config_cleanup(dev
);
562 nvif_object_fini(&disp
->disp
);
564 nouveau_drm(dev
)->display
= NULL
;
569 nouveau_display_suspend(struct drm_device
*dev
, bool runtime
)
571 struct drm_crtc
*crtc
;
573 nouveau_display_fini(dev
);
575 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
576 struct nouveau_framebuffer
*nouveau_fb
;
578 nouveau_fb
= nouveau_framebuffer(crtc
->primary
->fb
);
579 if (!nouveau_fb
|| !nouveau_fb
->nvbo
)
582 nouveau_bo_unpin(nouveau_fb
->nvbo
);
585 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
586 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
587 if (nv_crtc
->cursor
.nvbo
) {
588 if (nv_crtc
->cursor
.set_offset
)
589 nouveau_bo_unmap(nv_crtc
->cursor
.nvbo
);
590 nouveau_bo_unpin(nv_crtc
->cursor
.nvbo
);
598 nouveau_display_resume(struct drm_device
*dev
, bool runtime
)
600 struct nouveau_drm
*drm
= nouveau_drm(dev
);
601 struct drm_crtc
*crtc
;
604 /* re-pin fb/cursors */
605 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
606 struct nouveau_framebuffer
*nouveau_fb
;
608 nouveau_fb
= nouveau_framebuffer(crtc
->primary
->fb
);
609 if (!nouveau_fb
|| !nouveau_fb
->nvbo
)
612 ret
= nouveau_bo_pin(nouveau_fb
->nvbo
, TTM_PL_FLAG_VRAM
, true);
614 NV_ERROR(drm
, "Could not pin framebuffer\n");
617 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
618 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
619 if (!nv_crtc
->cursor
.nvbo
)
622 ret
= nouveau_bo_pin(nv_crtc
->cursor
.nvbo
, TTM_PL_FLAG_VRAM
, true);
623 if (!ret
&& nv_crtc
->cursor
.set_offset
)
624 ret
= nouveau_bo_map(nv_crtc
->cursor
.nvbo
);
626 NV_ERROR(drm
, "Could not pin/map cursor.\n");
629 nouveau_display_init(dev
);
631 /* Force CLUT to get re-loaded during modeset */
632 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
633 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
635 nv_crtc
->lut
.depth
= 0;
638 /* This should ensure we don't hit a locking problem when someone
639 * wakes us up via a connector. We should never go into suspend
640 * while the display is on anyways.
645 drm_helper_resume_force_mode(dev
);
647 /* Make sure that drm and hw vblank irqs get resumed if needed. */
648 for (head
= 0; head
< dev
->mode_config
.num_crtc
; head
++)
649 drm_vblank_on(dev
, head
);
651 list_for_each_entry(crtc
, &dev
->mode_config
.crtc_list
, head
) {
652 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
654 if (!nv_crtc
->cursor
.nvbo
)
657 if (nv_crtc
->cursor
.set_offset
)
658 nv_crtc
->cursor
.set_offset(nv_crtc
, nv_crtc
->cursor
.nvbo
->bo
.offset
);
659 nv_crtc
->cursor
.set_pos(nv_crtc
, nv_crtc
->cursor_saved_x
,
660 nv_crtc
->cursor_saved_y
);
665 nouveau_page_flip_emit(struct nouveau_channel
*chan
,
666 struct nouveau_bo
*old_bo
,
667 struct nouveau_bo
*new_bo
,
668 struct nouveau_page_flip_state
*s
,
669 struct nouveau_fence
**pfence
)
671 struct nouveau_fence_chan
*fctx
= chan
->fence
;
672 struct nouveau_drm
*drm
= chan
->drm
;
673 struct drm_device
*dev
= drm
->dev
;
677 /* Queue it to the pending list */
678 spin_lock_irqsave(&dev
->event_lock
, flags
);
679 list_add_tail(&s
->head
, &fctx
->flip
);
680 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
682 /* Synchronize with the old framebuffer */
683 ret
= nouveau_fence_sync(old_bo
, chan
, false, false);
687 /* Emit the pageflip */
688 ret
= RING_SPACE(chan
, 2);
692 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_FERMI
)
693 BEGIN_NV04(chan
, NvSubSw
, NV_SW_PAGE_FLIP
, 1);
695 BEGIN_NVC0(chan
, FermiSw
, NV_SW_PAGE_FLIP
, 1);
696 OUT_RING (chan
, 0x00000000);
699 ret
= nouveau_fence_new(chan
, false, pfence
);
705 spin_lock_irqsave(&dev
->event_lock
, flags
);
707 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
712 nouveau_crtc_page_flip(struct drm_crtc
*crtc
, struct drm_framebuffer
*fb
,
713 struct drm_pending_vblank_event
*event
, u32 flags
)
715 const int swap_interval
= (flags
& DRM_MODE_PAGE_FLIP_ASYNC
) ? 0 : 1;
716 struct drm_device
*dev
= crtc
->dev
;
717 struct nouveau_drm
*drm
= nouveau_drm(dev
);
718 struct nouveau_bo
*old_bo
= nouveau_framebuffer(crtc
->primary
->fb
)->nvbo
;
719 struct nouveau_bo
*new_bo
= nouveau_framebuffer(fb
)->nvbo
;
720 struct nouveau_page_flip_state
*s
;
721 struct nouveau_channel
*chan
;
722 struct nouveau_cli
*cli
;
723 struct nouveau_fence
*fence
;
729 cli
= (void *)chan
->user
.client
;
731 s
= kzalloc(sizeof(*s
), GFP_KERNEL
);
735 if (new_bo
!= old_bo
) {
736 ret
= nouveau_bo_pin(new_bo
, TTM_PL_FLAG_VRAM
, true);
741 mutex_lock(&cli
->mutex
);
742 ret
= ttm_bo_reserve(&new_bo
->bo
, true, false, false, NULL
);
746 /* synchronise rendering channel with the kernel's channel */
747 ret
= nouveau_fence_sync(new_bo
, chan
, false, true);
749 ttm_bo_unreserve(&new_bo
->bo
);
753 if (new_bo
!= old_bo
) {
754 ttm_bo_unreserve(&new_bo
->bo
);
756 ret
= ttm_bo_reserve(&old_bo
->bo
, true, false, false, NULL
);
761 /* Initialize a page flip struct */
762 *s
= (struct nouveau_page_flip_state
)
763 { { }, event
, nouveau_crtc(crtc
)->index
,
764 fb
->bits_per_pixel
, fb
->pitches
[0], crtc
->x
, crtc
->y
,
767 /* Keep vblanks on during flip, for the target crtc of this flip */
768 drm_vblank_get(dev
, nouveau_crtc(crtc
)->index
);
770 /* Emit a page flip */
771 if (drm
->device
.info
.family
>= NV_DEVICE_INFO_V0_TESLA
) {
772 ret
= nv50_display_flip_next(crtc
, fb
, chan
, swap_interval
);
776 struct nv04_display
*dispnv04
= nv04_display(dev
);
777 int head
= nouveau_crtc(crtc
)->index
;
780 ret
= RING_SPACE(chan
, 8);
784 BEGIN_NV04(chan
, NvSubImageBlit
, 0x012c, 1);
786 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0134, 1);
787 OUT_RING (chan
, head
);
788 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0100, 1);
790 BEGIN_NV04(chan
, NvSubImageBlit
, 0x0130, 1);
794 nouveau_bo_ref(new_bo
, &dispnv04
->image
[head
]);
797 ret
= nouveau_page_flip_emit(chan
, old_bo
, new_bo
, s
, &fence
);
800 mutex_unlock(&cli
->mutex
);
802 /* Update the crtc struct and cleanup */
803 crtc
->primary
->fb
= fb
;
805 nouveau_bo_fence(old_bo
, fence
, false);
806 ttm_bo_unreserve(&old_bo
->bo
);
807 if (old_bo
!= new_bo
)
808 nouveau_bo_unpin(old_bo
);
809 nouveau_fence_unref(&fence
);
813 drm_vblank_put(dev
, nouveau_crtc(crtc
)->index
);
814 ttm_bo_unreserve(&old_bo
->bo
);
816 mutex_unlock(&cli
->mutex
);
817 if (old_bo
!= new_bo
)
818 nouveau_bo_unpin(new_bo
);
825 nouveau_finish_page_flip(struct nouveau_channel
*chan
,
826 struct nouveau_page_flip_state
*ps
)
828 struct nouveau_fence_chan
*fctx
= chan
->fence
;
829 struct nouveau_drm
*drm
= chan
->drm
;
830 struct drm_device
*dev
= drm
->dev
;
831 struct nouveau_page_flip_state
*s
;
834 spin_lock_irqsave(&dev
->event_lock
, flags
);
836 if (list_empty(&fctx
->flip
)) {
837 NV_ERROR(drm
, "unexpected pageflip\n");
838 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
842 s
= list_first_entry(&fctx
->flip
, struct nouveau_page_flip_state
, head
);
844 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_TESLA
) {
845 drm_arm_vblank_event(dev
, s
->crtc
, s
->event
);
847 drm_send_vblank_event(dev
, s
->crtc
, s
->event
);
849 /* Give up ownership of vblank for page-flipped crtc */
850 drm_vblank_put(dev
, s
->crtc
);
854 /* Give up ownership of vblank for page-flipped crtc */
855 drm_vblank_put(dev
, s
->crtc
);
863 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
868 nouveau_flip_complete(struct nvif_notify
*notify
)
870 struct nouveau_drm
*drm
= container_of(notify
, typeof(*drm
), flip
);
871 struct nouveau_channel
*chan
= drm
->channel
;
872 struct nouveau_page_flip_state state
;
874 if (!nouveau_finish_page_flip(chan
, &state
)) {
875 if (drm
->device
.info
.family
< NV_DEVICE_INFO_V0_TESLA
) {
876 nv_set_crtc_base(drm
->dev
, state
.crtc
, state
.offset
+
877 state
.y
* state
.pitch
+
878 state
.x
* state
.bpp
/ 8);
882 return NVIF_NOTIFY_KEEP
;
886 nouveau_display_dumb_create(struct drm_file
*file_priv
, struct drm_device
*dev
,
887 struct drm_mode_create_dumb
*args
)
889 struct nouveau_bo
*bo
;
893 args
->pitch
= roundup(args
->width
* (args
->bpp
/ 8), 256);
894 args
->size
= args
->pitch
* args
->height
;
895 args
->size
= roundup(args
->size
, PAGE_SIZE
);
897 /* Use VRAM if there is any ; otherwise fallback to system memory */
898 if (nouveau_drm(dev
)->device
.info
.ram_size
!= 0)
899 domain
= NOUVEAU_GEM_DOMAIN_VRAM
;
901 domain
= NOUVEAU_GEM_DOMAIN_GART
;
903 ret
= nouveau_gem_new(dev
, args
->size
, 0, domain
, 0, 0, &bo
);
907 ret
= drm_gem_handle_create(file_priv
, &bo
->gem
, &args
->handle
);
908 drm_gem_object_unreference_unlocked(&bo
->gem
);
913 nouveau_display_dumb_map_offset(struct drm_file
*file_priv
,
914 struct drm_device
*dev
,
915 uint32_t handle
, uint64_t *poffset
)
917 struct drm_gem_object
*gem
;
919 gem
= drm_gem_object_lookup(dev
, file_priv
, handle
);
921 struct nouveau_bo
*bo
= nouveau_gem_object(gem
);
922 *poffset
= drm_vma_node_offset_addr(&bo
->bo
.vma_node
);
923 drm_gem_object_unreference_unlocked(gem
);