2 * Copyright (C) 2008 Maarten Maathuis.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28 #include "drm_crtc_helper.h"
29 #include "nouveau_drv.h"
30 #include "nouveau_fb.h"
31 #include "nouveau_fbcon.h"
32 #include "nouveau_hw.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_connector.h"
36 #include "nouveau_software.h"
37 #include "nouveau_gpio.h"
38 #include "nouveau_fence.h"
39 #include "nv50_display.h"
42 nouveau_user_framebuffer_destroy(struct drm_framebuffer
*drm_fb
)
44 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
47 drm_gem_object_unreference_unlocked(fb
->nvbo
->gem
);
49 drm_framebuffer_cleanup(drm_fb
);
54 nouveau_user_framebuffer_create_handle(struct drm_framebuffer
*drm_fb
,
55 struct drm_file
*file_priv
,
58 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
60 return drm_gem_handle_create(file_priv
, fb
->nvbo
->gem
, handle
);
63 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs
= {
64 .destroy
= nouveau_user_framebuffer_destroy
,
65 .create_handle
= nouveau_user_framebuffer_create_handle
,
69 nouveau_framebuffer_init(struct drm_device
*dev
,
70 struct nouveau_framebuffer
*nv_fb
,
71 struct drm_mode_fb_cmd2
*mode_cmd
,
72 struct nouveau_bo
*nvbo
)
74 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
75 struct drm_framebuffer
*fb
= &nv_fb
->base
;
78 ret
= drm_framebuffer_init(dev
, fb
, &nouveau_framebuffer_funcs
);
83 drm_helper_mode_fill_fb_struct(fb
, mode_cmd
);
86 if (dev_priv
->card_type
>= NV_50
) {
87 u32 tile_flags
= nouveau_bo_tile_layout(nvbo
);
88 if (tile_flags
== 0x7a00 ||
90 nv_fb
->r_dma
= NvEvoFB32
;
92 if (tile_flags
== 0x7000)
93 nv_fb
->r_dma
= NvEvoFB16
;
95 nv_fb
->r_dma
= NvEvoVRAM_LP
;
98 case 8: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_8
; break;
99 case 15: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_15
; break;
100 case 16: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_16
; break;
102 case 32: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_24
; break;
103 case 30: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_30
; break;
105 NV_ERROR(dev
, "unknown depth %d\n", fb
->depth
);
109 if (dev_priv
->chipset
== 0x50)
110 nv_fb
->r_format
|= (tile_flags
<< 8);
113 if (dev_priv
->card_type
< NV_D0
)
114 nv_fb
->r_pitch
= 0x00100000 | fb
->pitches
[0];
116 nv_fb
->r_pitch
= 0x01000000 | fb
->pitches
[0];
118 u32 mode
= nvbo
->tile_mode
;
119 if (dev_priv
->card_type
>= NV_C0
)
121 nv_fb
->r_pitch
= ((fb
->pitches
[0] / 4) << 4) | mode
;
128 static struct drm_framebuffer
*
129 nouveau_user_framebuffer_create(struct drm_device
*dev
,
130 struct drm_file
*file_priv
,
131 struct drm_mode_fb_cmd2
*mode_cmd
)
133 struct nouveau_framebuffer
*nouveau_fb
;
134 struct drm_gem_object
*gem
;
137 gem
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
139 return ERR_PTR(-ENOENT
);
141 nouveau_fb
= kzalloc(sizeof(struct nouveau_framebuffer
), GFP_KERNEL
);
143 return ERR_PTR(-ENOMEM
);
145 ret
= nouveau_framebuffer_init(dev
, nouveau_fb
, mode_cmd
, nouveau_gem_object(gem
));
147 drm_gem_object_unreference(gem
);
151 return &nouveau_fb
->base
;
154 static const struct drm_mode_config_funcs nouveau_mode_config_funcs
= {
155 .fb_create
= nouveau_user_framebuffer_create
,
156 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
160 struct nouveau_drm_prop_enum_list
{
166 static struct nouveau_drm_prop_enum_list underscan
[] = {
167 { 6, UNDERSCAN_AUTO
, "auto" },
168 { 6, UNDERSCAN_OFF
, "off" },
169 { 6, UNDERSCAN_ON
, "on" },
173 static struct nouveau_drm_prop_enum_list dither_mode
[] = {
174 { 7, DITHERING_MODE_AUTO
, "auto" },
175 { 7, DITHERING_MODE_OFF
, "off" },
176 { 1, DITHERING_MODE_ON
, "on" },
177 { 6, DITHERING_MODE_STATIC2X2
, "static 2x2" },
178 { 6, DITHERING_MODE_DYNAMIC2X2
, "dynamic 2x2" },
179 { 4, DITHERING_MODE_TEMPORAL
, "temporal" },
183 static struct nouveau_drm_prop_enum_list dither_depth
[] = {
184 { 6, DITHERING_DEPTH_AUTO
, "auto" },
185 { 6, DITHERING_DEPTH_6BPC
, "6 bpc" },
186 { 6, DITHERING_DEPTH_8BPC
, "8 bpc" },
190 #define PROP_ENUM(p,gen,n,list) do { \
191 struct nouveau_drm_prop_enum_list *l = (list); \
193 while (l->gen_mask) { \
194 if (l->gen_mask & (1 << (gen))) \
199 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
202 while (p && l->gen_mask) { \
203 if (l->gen_mask & (1 << (gen))) { \
204 drm_property_add_enum(p, c, l->type, l->name); \
213 nouveau_display_init(struct drm_device
*dev
)
215 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
216 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
217 struct drm_connector
*connector
;
220 ret
= disp
->init(dev
);
224 /* power on internal panel if it's not already. the init tables of
225 * some vbios default this to off for some reason, causing the
226 * panel to not work after resume
228 if (nouveau_gpio_func_get(dev
, DCB_GPIO_PANEL_POWER
) == 0) {
229 nouveau_gpio_func_set(dev
, DCB_GPIO_PANEL_POWER
, true);
233 /* enable polling for external displays */
234 drm_kms_helper_poll_enable(dev
);
236 /* enable hotplug interrupts */
237 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
238 struct nouveau_connector
*conn
= nouveau_connector(connector
);
239 nouveau_gpio_irq(dev
, 0, conn
->hpd
, 0xff, true);
246 nouveau_display_fini(struct drm_device
*dev
)
248 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
249 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
250 struct drm_connector
*connector
;
252 /* disable hotplug interrupts */
253 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
254 struct nouveau_connector
*conn
= nouveau_connector(connector
);
255 nouveau_gpio_irq(dev
, 0, conn
->hpd
, 0xff, false);
258 drm_kms_helper_poll_disable(dev
);
263 nouveau_display_create(struct drm_device
*dev
)
265 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
266 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
269 drm_mode_config_init(dev
);
270 drm_mode_create_scaling_mode_property(dev
);
271 drm_mode_create_dvi_i_properties(dev
);
273 if (dev_priv
->card_type
< NV_50
)
276 if (dev_priv
->card_type
< NV_D0
)
281 PROP_ENUM(disp
->dithering_mode
, gen
, "dithering mode", dither_mode
);
282 PROP_ENUM(disp
->dithering_depth
, gen
, "dithering depth", dither_depth
);
283 PROP_ENUM(disp
->underscan_property
, gen
, "underscan", underscan
);
285 disp
->underscan_hborder_property
=
286 drm_property_create_range(dev
, 0, "underscan hborder", 0, 128);
288 disp
->underscan_vborder_property
=
289 drm_property_create_range(dev
, 0, "underscan vborder", 0, 128);
292 disp
->vibrant_hue_property
=
293 drm_property_create(dev
, DRM_MODE_PROP_RANGE
,
295 disp
->vibrant_hue_property
->values
[0] = 0;
296 disp
->vibrant_hue_property
->values
[1] = 180; /* -90..+90 */
298 disp
->color_vibrance_property
=
299 drm_property_create(dev
, DRM_MODE_PROP_RANGE
,
300 "color vibrance", 2);
301 disp
->color_vibrance_property
->values
[0] = 0;
302 disp
->color_vibrance_property
->values
[1] = 200; /* -100..+100 */
305 dev
->mode_config
.funcs
= &nouveau_mode_config_funcs
;
306 dev
->mode_config
.fb_base
= pci_resource_start(dev
->pdev
, 1);
308 dev
->mode_config
.min_width
= 0;
309 dev
->mode_config
.min_height
= 0;
310 if (dev_priv
->card_type
< NV_10
) {
311 dev
->mode_config
.max_width
= 2048;
312 dev
->mode_config
.max_height
= 2048;
314 if (dev_priv
->card_type
< NV_50
) {
315 dev
->mode_config
.max_width
= 4096;
316 dev
->mode_config
.max_height
= 4096;
318 dev
->mode_config
.max_width
= 8192;
319 dev
->mode_config
.max_height
= 8192;
322 dev
->mode_config
.preferred_depth
= 24;
323 dev
->mode_config
.prefer_shadow
= 1;
325 drm_kms_helper_poll_init(dev
);
326 drm_kms_helper_poll_disable(dev
);
328 ret
= disp
->create(dev
);
330 goto disp_create_err
;
332 if (dev
->mode_config
.num_crtc
) {
333 ret
= drm_vblank_init(dev
, dev
->mode_config
.num_crtc
);
343 drm_kms_helper_poll_fini(dev
);
344 drm_mode_config_cleanup(dev
);
349 nouveau_display_destroy(struct drm_device
*dev
)
351 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
352 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
354 drm_vblank_cleanup(dev
);
358 drm_kms_helper_poll_fini(dev
);
359 drm_mode_config_cleanup(dev
);
363 nouveau_vblank_enable(struct drm_device
*dev
, int crtc
)
365 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
367 if (dev_priv
->card_type
>= NV_50
)
368 nv_mask(dev
, NV50_PDISPLAY_INTR_EN_1
, 0,
369 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc
));
371 NVWriteCRTC(dev
, crtc
, NV_PCRTC_INTR_EN_0
,
372 NV_PCRTC_INTR_0_VBLANK
);
378 nouveau_vblank_disable(struct drm_device
*dev
, int crtc
)
380 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
382 if (dev_priv
->card_type
>= NV_50
)
383 nv_mask(dev
, NV50_PDISPLAY_INTR_EN_1
,
384 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc
), 0);
386 NVWriteCRTC(dev
, crtc
, NV_PCRTC_INTR_EN_0
, 0);
390 nouveau_page_flip_reserve(struct nouveau_bo
*old_bo
,
391 struct nouveau_bo
*new_bo
)
395 ret
= nouveau_bo_pin(new_bo
, TTM_PL_FLAG_VRAM
);
399 ret
= ttm_bo_reserve(&new_bo
->bo
, false, false, false, 0);
403 ret
= ttm_bo_reserve(&old_bo
->bo
, false, false, false, 0);
410 ttm_bo_unreserve(&new_bo
->bo
);
412 nouveau_bo_unpin(new_bo
);
417 nouveau_page_flip_unreserve(struct nouveau_bo
*old_bo
,
418 struct nouveau_bo
*new_bo
,
419 struct nouveau_fence
*fence
)
421 nouveau_bo_fence(new_bo
, fence
);
422 ttm_bo_unreserve(&new_bo
->bo
);
424 nouveau_bo_fence(old_bo
, fence
);
425 ttm_bo_unreserve(&old_bo
->bo
);
427 nouveau_bo_unpin(old_bo
);
431 nouveau_page_flip_emit(struct nouveau_channel
*chan
,
432 struct nouveau_bo
*old_bo
,
433 struct nouveau_bo
*new_bo
,
434 struct nouveau_page_flip_state
*s
,
435 struct nouveau_fence
**pfence
)
437 struct nouveau_software_chan
*swch
= chan
->engctx
[NVOBJ_ENGINE_SW
];
438 struct drm_nouveau_private
*dev_priv
= chan
->dev
->dev_private
;
439 struct drm_device
*dev
= chan
->dev
;
443 /* Queue it to the pending list */
444 spin_lock_irqsave(&dev
->event_lock
, flags
);
445 list_add_tail(&s
->head
, &swch
->flip
);
446 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
448 /* Synchronize with the old framebuffer */
449 ret
= nouveau_fence_sync(old_bo
->bo
.sync_obj
, chan
);
453 /* Emit the pageflip */
454 ret
= RING_SPACE(chan
, 3);
458 if (dev_priv
->card_type
< NV_C0
) {
459 BEGIN_NV04(chan
, NvSubSw
, NV_SW_PAGE_FLIP
, 1);
460 OUT_RING (chan
, 0x00000000);
461 OUT_RING (chan
, 0x00000000);
463 BEGIN_NVC0(chan
, 0, NV10_SUBCHAN_REF_CNT
, 1);
465 BEGIN_IMC0(chan
, 0, NVSW_SUBCHAN_PAGE_FLIP
, 0x0000);
469 ret
= nouveau_fence_new(chan
, pfence
);
475 spin_lock_irqsave(&dev
->event_lock
, flags
);
477 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
482 nouveau_crtc_page_flip(struct drm_crtc
*crtc
, struct drm_framebuffer
*fb
,
483 struct drm_pending_vblank_event
*event
)
485 struct drm_device
*dev
= crtc
->dev
;
486 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
487 struct nouveau_bo
*old_bo
= nouveau_framebuffer(crtc
->fb
)->nvbo
;
488 struct nouveau_bo
*new_bo
= nouveau_framebuffer(fb
)->nvbo
;
489 struct nouveau_page_flip_state
*s
;
490 struct nouveau_channel
*chan
= NULL
;
491 struct nouveau_fence
*fence
;
494 if (!dev_priv
->channel
)
497 s
= kzalloc(sizeof(*s
), GFP_KERNEL
);
501 /* Don't let the buffers go away while we flip */
502 ret
= nouveau_page_flip_reserve(old_bo
, new_bo
);
506 /* Initialize a page flip struct */
507 *s
= (struct nouveau_page_flip_state
)
508 { { }, event
, nouveau_crtc(crtc
)->index
,
509 fb
->bits_per_pixel
, fb
->pitches
[0], crtc
->x
, crtc
->y
,
512 /* Choose the channel the flip will be handled in */
513 fence
= new_bo
->bo
.sync_obj
;
515 chan
= nouveau_channel_get_unlocked(fence
->channel
);
517 chan
= nouveau_channel_get_unlocked(dev_priv
->channel
);
518 mutex_lock(&chan
->mutex
);
520 /* Emit a page flip */
521 if (dev_priv
->card_type
>= NV_50
) {
522 if (dev_priv
->card_type
>= NV_D0
)
523 ret
= nvd0_display_flip_next(crtc
, fb
, chan
, 0);
525 ret
= nv50_display_flip_next(crtc
, fb
, chan
);
527 nouveau_channel_put(&chan
);
532 ret
= nouveau_page_flip_emit(chan
, old_bo
, new_bo
, s
, &fence
);
533 nouveau_channel_put(&chan
);
537 /* Update the crtc struct and cleanup */
540 nouveau_page_flip_unreserve(old_bo
, new_bo
, fence
);
541 nouveau_fence_unref(&fence
);
545 nouveau_page_flip_unreserve(old_bo
, new_bo
, NULL
);
552 nouveau_finish_page_flip(struct nouveau_channel
*chan
,
553 struct nouveau_page_flip_state
*ps
)
555 struct nouveau_software_chan
*swch
= chan
->engctx
[NVOBJ_ENGINE_SW
];
556 struct drm_device
*dev
= chan
->dev
;
557 struct nouveau_page_flip_state
*s
;
560 spin_lock_irqsave(&dev
->event_lock
, flags
);
562 if (list_empty(&swch
->flip
)) {
563 NV_ERROR(dev
, "Unexpected pageflip in channel %d.\n", chan
->id
);
564 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
568 s
= list_first_entry(&swch
->flip
, struct nouveau_page_flip_state
, head
);
570 struct drm_pending_vblank_event
*e
= s
->event
;
573 do_gettimeofday(&now
);
574 e
->event
.sequence
= 0;
575 e
->event
.tv_sec
= now
.tv_sec
;
576 e
->event
.tv_usec
= now
.tv_usec
;
577 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
578 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
586 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
591 nouveau_display_dumb_create(struct drm_file
*file_priv
, struct drm_device
*dev
,
592 struct drm_mode_create_dumb
*args
)
594 struct nouveau_bo
*bo
;
597 args
->pitch
= roundup(args
->width
* (args
->bpp
/ 8), 256);
598 args
->size
= args
->pitch
* args
->height
;
599 args
->size
= roundup(args
->size
, PAGE_SIZE
);
601 ret
= nouveau_gem_new(dev
, args
->size
, 0, NOUVEAU_GEM_DOMAIN_VRAM
, 0, 0, &bo
);
605 ret
= drm_gem_handle_create(file_priv
, bo
->gem
, &args
->handle
);
606 drm_gem_object_unreference_unlocked(bo
->gem
);
611 nouveau_display_dumb_destroy(struct drm_file
*file_priv
, struct drm_device
*dev
,
614 return drm_gem_handle_delete(file_priv
, handle
);
618 nouveau_display_dumb_map_offset(struct drm_file
*file_priv
,
619 struct drm_device
*dev
,
620 uint32_t handle
, uint64_t *poffset
)
622 struct drm_gem_object
*gem
;
624 gem
= drm_gem_object_lookup(dev
, file_priv
, handle
);
626 struct nouveau_bo
*bo
= gem
->driver_private
;
627 *poffset
= bo
->bo
.addr_space_offset
;
628 drm_gem_object_unreference_unlocked(gem
);