2 * Copyright (C) 2008 Maarten Maathuis.
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28 #include "drm_crtc_helper.h"
29 #include "nouveau_drv.h"
30 #include "nouveau_fb.h"
31 #include "nouveau_fbcon.h"
32 #include "nouveau_hw.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_connector.h"
36 #include "nouveau_gpio.h"
37 #include "nv50_display.h"
40 nouveau_user_framebuffer_destroy(struct drm_framebuffer
*drm_fb
)
42 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
45 drm_gem_object_unreference_unlocked(fb
->nvbo
->gem
);
47 drm_framebuffer_cleanup(drm_fb
);
52 nouveau_user_framebuffer_create_handle(struct drm_framebuffer
*drm_fb
,
53 struct drm_file
*file_priv
,
56 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(drm_fb
);
58 return drm_gem_handle_create(file_priv
, fb
->nvbo
->gem
, handle
);
61 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs
= {
62 .destroy
= nouveau_user_framebuffer_destroy
,
63 .create_handle
= nouveau_user_framebuffer_create_handle
,
67 nouveau_framebuffer_init(struct drm_device
*dev
,
68 struct nouveau_framebuffer
*nv_fb
,
69 struct drm_mode_fb_cmd2
*mode_cmd
,
70 struct nouveau_bo
*nvbo
)
72 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
73 struct drm_framebuffer
*fb
= &nv_fb
->base
;
76 ret
= drm_framebuffer_init(dev
, fb
, &nouveau_framebuffer_funcs
);
81 drm_helper_mode_fill_fb_struct(fb
, mode_cmd
);
84 if (dev_priv
->card_type
>= NV_50
) {
85 u32 tile_flags
= nouveau_bo_tile_layout(nvbo
);
86 if (tile_flags
== 0x7a00 ||
88 nv_fb
->r_dma
= NvEvoFB32
;
90 if (tile_flags
== 0x7000)
91 nv_fb
->r_dma
= NvEvoFB16
;
93 nv_fb
->r_dma
= NvEvoVRAM_LP
;
96 case 8: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_8
; break;
97 case 15: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_15
; break;
98 case 16: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_16
; break;
100 case 32: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_24
; break;
101 case 30: nv_fb
->r_format
= NV50_EVO_CRTC_FB_DEPTH_30
; break;
103 NV_ERROR(dev
, "unknown depth %d\n", fb
->depth
);
107 if (dev_priv
->chipset
== 0x50)
108 nv_fb
->r_format
|= (tile_flags
<< 8);
111 if (dev_priv
->card_type
< NV_D0
)
112 nv_fb
->r_pitch
= 0x00100000 | fb
->pitches
[0];
114 nv_fb
->r_pitch
= 0x01000000 | fb
->pitches
[0];
116 u32 mode
= nvbo
->tile_mode
;
117 if (dev_priv
->card_type
>= NV_C0
)
119 nv_fb
->r_pitch
= ((fb
->pitches
[0] / 4) << 4) | mode
;
126 static struct drm_framebuffer
*
127 nouveau_user_framebuffer_create(struct drm_device
*dev
,
128 struct drm_file
*file_priv
,
129 struct drm_mode_fb_cmd2
*mode_cmd
)
131 struct nouveau_framebuffer
*nouveau_fb
;
132 struct drm_gem_object
*gem
;
135 gem
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
137 return ERR_PTR(-ENOENT
);
139 nouveau_fb
= kzalloc(sizeof(struct nouveau_framebuffer
), GFP_KERNEL
);
141 return ERR_PTR(-ENOMEM
);
143 ret
= nouveau_framebuffer_init(dev
, nouveau_fb
, mode_cmd
, nouveau_gem_object(gem
));
145 drm_gem_object_unreference(gem
);
149 return &nouveau_fb
->base
;
152 static const struct drm_mode_config_funcs nouveau_mode_config_funcs
= {
153 .fb_create
= nouveau_user_framebuffer_create
,
154 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
158 struct drm_prop_enum_list
{
164 static struct drm_prop_enum_list underscan
[] = {
165 { 6, UNDERSCAN_AUTO
, "auto" },
166 { 6, UNDERSCAN_OFF
, "off" },
167 { 6, UNDERSCAN_ON
, "on" },
171 static struct drm_prop_enum_list dither_mode
[] = {
172 { 7, DITHERING_MODE_AUTO
, "auto" },
173 { 7, DITHERING_MODE_OFF
, "off" },
174 { 1, DITHERING_MODE_ON
, "on" },
175 { 6, DITHERING_MODE_STATIC2X2
, "static 2x2" },
176 { 6, DITHERING_MODE_DYNAMIC2X2
, "dynamic 2x2" },
177 { 4, DITHERING_MODE_TEMPORAL
, "temporal" },
181 static struct drm_prop_enum_list dither_depth
[] = {
182 { 6, DITHERING_DEPTH_AUTO
, "auto" },
183 { 6, DITHERING_DEPTH_6BPC
, "6 bpc" },
184 { 6, DITHERING_DEPTH_8BPC
, "8 bpc" },
188 #define PROP_ENUM(p,gen,n,list) do { \
189 struct drm_prop_enum_list *l = (list); \
191 while (l->gen_mask) { \
192 if (l->gen_mask & (1 << (gen))) \
197 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
200 while (p && l->gen_mask) { \
201 if (l->gen_mask & (1 << (gen))) { \
202 drm_property_add_enum(p, c, l->type, l->name); \
211 nouveau_display_init(struct drm_device
*dev
)
213 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
214 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
215 struct drm_connector
*connector
;
218 ret
= disp
->init(dev
);
222 /* power on internal panel if it's not already. the init tables of
223 * some vbios default this to off for some reason, causing the
224 * panel to not work after resume
226 if (nouveau_gpio_func_get(dev
, DCB_GPIO_PANEL_POWER
) == 0) {
227 nouveau_gpio_func_set(dev
, DCB_GPIO_PANEL_POWER
, true);
231 /* enable polling for external displays */
232 drm_kms_helper_poll_enable(dev
);
234 /* enable hotplug interrupts */
235 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
236 struct nouveau_connector
*conn
= nouveau_connector(connector
);
237 nouveau_gpio_irq(dev
, 0, conn
->hpd
, 0xff, true);
244 nouveau_display_fini(struct drm_device
*dev
)
246 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
247 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
248 struct drm_connector
*connector
;
250 /* disable hotplug interrupts */
251 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
252 struct nouveau_connector
*conn
= nouveau_connector(connector
);
253 nouveau_gpio_irq(dev
, 0, conn
->hpd
, 0xff, false);
256 drm_kms_helper_poll_disable(dev
);
261 nouveau_display_create(struct drm_device
*dev
)
263 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
264 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
267 drm_mode_config_init(dev
);
268 drm_mode_create_scaling_mode_property(dev
);
269 drm_mode_create_dvi_i_properties(dev
);
271 if (dev_priv
->card_type
< NV_50
)
274 if (dev_priv
->card_type
< NV_D0
)
279 PROP_ENUM(disp
->dithering_mode
, gen
, "dithering mode", dither_mode
);
280 PROP_ENUM(disp
->dithering_depth
, gen
, "dithering depth", dither_depth
);
281 PROP_ENUM(disp
->underscan_property
, gen
, "underscan", underscan
);
283 disp
->underscan_hborder_property
=
284 drm_property_create(dev
, DRM_MODE_PROP_RANGE
,
285 "underscan hborder", 2);
286 disp
->underscan_hborder_property
->values
[0] = 0;
287 disp
->underscan_hborder_property
->values
[1] = 128;
289 disp
->underscan_vborder_property
=
290 drm_property_create(dev
, DRM_MODE_PROP_RANGE
,
291 "underscan vborder", 2);
292 disp
->underscan_vborder_property
->values
[0] = 0;
293 disp
->underscan_vborder_property
->values
[1] = 128;
295 dev
->mode_config
.funcs
= (void *)&nouveau_mode_config_funcs
;
296 dev
->mode_config
.fb_base
= pci_resource_start(dev
->pdev
, 1);
298 dev
->mode_config
.min_width
= 0;
299 dev
->mode_config
.min_height
= 0;
300 if (dev_priv
->card_type
< NV_10
) {
301 dev
->mode_config
.max_width
= 2048;
302 dev
->mode_config
.max_height
= 2048;
304 if (dev_priv
->card_type
< NV_50
) {
305 dev
->mode_config
.max_width
= 4096;
306 dev
->mode_config
.max_height
= 4096;
308 dev
->mode_config
.max_width
= 8192;
309 dev
->mode_config
.max_height
= 8192;
312 drm_kms_helper_poll_init(dev
);
313 drm_kms_helper_poll_disable(dev
);
315 ret
= disp
->create(dev
);
319 if (dev
->mode_config
.num_crtc
) {
320 ret
= drm_vblank_init(dev
, dev
->mode_config
.num_crtc
);
329 nouveau_display_destroy(struct drm_device
*dev
)
331 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
332 struct nouveau_display_engine
*disp
= &dev_priv
->engine
.display
;
334 drm_vblank_cleanup(dev
);
338 drm_kms_helper_poll_fini(dev
);
339 drm_mode_config_cleanup(dev
);
343 nouveau_vblank_enable(struct drm_device
*dev
, int crtc
)
345 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
347 if (dev_priv
->card_type
>= NV_50
)
348 nv_mask(dev
, NV50_PDISPLAY_INTR_EN_1
, 0,
349 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc
));
351 NVWriteCRTC(dev
, crtc
, NV_PCRTC_INTR_EN_0
,
352 NV_PCRTC_INTR_0_VBLANK
);
358 nouveau_vblank_disable(struct drm_device
*dev
, int crtc
)
360 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
362 if (dev_priv
->card_type
>= NV_50
)
363 nv_mask(dev
, NV50_PDISPLAY_INTR_EN_1
,
364 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc
), 0);
366 NVWriteCRTC(dev
, crtc
, NV_PCRTC_INTR_EN_0
, 0);
370 nouveau_page_flip_reserve(struct nouveau_bo
*old_bo
,
371 struct nouveau_bo
*new_bo
)
375 ret
= nouveau_bo_pin(new_bo
, TTM_PL_FLAG_VRAM
);
379 ret
= ttm_bo_reserve(&new_bo
->bo
, false, false, false, 0);
383 ret
= ttm_bo_reserve(&old_bo
->bo
, false, false, false, 0);
390 ttm_bo_unreserve(&new_bo
->bo
);
392 nouveau_bo_unpin(new_bo
);
397 nouveau_page_flip_unreserve(struct nouveau_bo
*old_bo
,
398 struct nouveau_bo
*new_bo
,
399 struct nouveau_fence
*fence
)
401 nouveau_bo_fence(new_bo
, fence
);
402 ttm_bo_unreserve(&new_bo
->bo
);
404 nouveau_bo_fence(old_bo
, fence
);
405 ttm_bo_unreserve(&old_bo
->bo
);
407 nouveau_bo_unpin(old_bo
);
411 nouveau_page_flip_emit(struct nouveau_channel
*chan
,
412 struct nouveau_bo
*old_bo
,
413 struct nouveau_bo
*new_bo
,
414 struct nouveau_page_flip_state
*s
,
415 struct nouveau_fence
**pfence
)
417 struct drm_nouveau_private
*dev_priv
= chan
->dev
->dev_private
;
418 struct drm_device
*dev
= chan
->dev
;
422 /* Queue it to the pending list */
423 spin_lock_irqsave(&dev
->event_lock
, flags
);
424 list_add_tail(&s
->head
, &chan
->nvsw
.flip
);
425 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
427 /* Synchronize with the old framebuffer */
428 ret
= nouveau_fence_sync(old_bo
->bo
.sync_obj
, chan
);
432 /* Emit the pageflip */
433 ret
= RING_SPACE(chan
, 2);
437 if (dev_priv
->card_type
< NV_C0
)
438 BEGIN_RING(chan
, NvSubSw
, NV_SW_PAGE_FLIP
, 1);
440 BEGIN_NVC0(chan
, 2, NvSubM2MF
, 0x0500, 1);
444 ret
= nouveau_fence_new(chan
, pfence
, true);
450 spin_lock_irqsave(&dev
->event_lock
, flags
);
452 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
457 nouveau_crtc_page_flip(struct drm_crtc
*crtc
, struct drm_framebuffer
*fb
,
458 struct drm_pending_vblank_event
*event
)
460 struct drm_device
*dev
= crtc
->dev
;
461 struct drm_nouveau_private
*dev_priv
= dev
->dev_private
;
462 struct nouveau_bo
*old_bo
= nouveau_framebuffer(crtc
->fb
)->nvbo
;
463 struct nouveau_bo
*new_bo
= nouveau_framebuffer(fb
)->nvbo
;
464 struct nouveau_page_flip_state
*s
;
465 struct nouveau_channel
*chan
;
466 struct nouveau_fence
*fence
;
469 if (!dev_priv
->channel
)
472 s
= kzalloc(sizeof(*s
), GFP_KERNEL
);
476 /* Don't let the buffers go away while we flip */
477 ret
= nouveau_page_flip_reserve(old_bo
, new_bo
);
481 /* Initialize a page flip struct */
482 *s
= (struct nouveau_page_flip_state
)
483 { { }, event
, nouveau_crtc(crtc
)->index
,
484 fb
->bits_per_pixel
, fb
->pitches
[0], crtc
->x
, crtc
->y
,
487 /* Choose the channel the flip will be handled in */
488 chan
= nouveau_fence_channel(new_bo
->bo
.sync_obj
);
490 chan
= nouveau_channel_get_unlocked(dev_priv
->channel
);
491 mutex_lock(&chan
->mutex
);
493 /* Emit a page flip */
494 if (dev_priv
->card_type
>= NV_50
) {
495 if (dev_priv
->card_type
>= NV_D0
)
496 ret
= nvd0_display_flip_next(crtc
, fb
, chan
, 0);
498 ret
= nv50_display_flip_next(crtc
, fb
, chan
);
500 nouveau_channel_put(&chan
);
505 ret
= nouveau_page_flip_emit(chan
, old_bo
, new_bo
, s
, &fence
);
506 nouveau_channel_put(&chan
);
510 /* Update the crtc struct and cleanup */
513 nouveau_page_flip_unreserve(old_bo
, new_bo
, fence
);
514 nouveau_fence_unref(&fence
);
518 nouveau_page_flip_unreserve(old_bo
, new_bo
, NULL
);
525 nouveau_finish_page_flip(struct nouveau_channel
*chan
,
526 struct nouveau_page_flip_state
*ps
)
528 struct drm_device
*dev
= chan
->dev
;
529 struct nouveau_page_flip_state
*s
;
532 spin_lock_irqsave(&dev
->event_lock
, flags
);
534 if (list_empty(&chan
->nvsw
.flip
)) {
535 NV_ERROR(dev
, "Unexpected pageflip in channel %d.\n", chan
->id
);
536 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
540 s
= list_first_entry(&chan
->nvsw
.flip
,
541 struct nouveau_page_flip_state
, head
);
543 struct drm_pending_vblank_event
*e
= s
->event
;
546 do_gettimeofday(&now
);
547 e
->event
.sequence
= 0;
548 e
->event
.tv_sec
= now
.tv_sec
;
549 e
->event
.tv_usec
= now
.tv_usec
;
550 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
551 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
559 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
564 nouveau_display_dumb_create(struct drm_file
*file_priv
, struct drm_device
*dev
,
565 struct drm_mode_create_dumb
*args
)
567 struct nouveau_bo
*bo
;
570 args
->pitch
= roundup(args
->width
* (args
->bpp
/ 8), 256);
571 args
->size
= args
->pitch
* args
->height
;
572 args
->size
= roundup(args
->size
, PAGE_SIZE
);
574 ret
= nouveau_gem_new(dev
, args
->size
, 0, TTM_PL_FLAG_VRAM
, 0, 0, &bo
);
578 ret
= drm_gem_handle_create(file_priv
, bo
->gem
, &args
->handle
);
579 drm_gem_object_unreference_unlocked(bo
->gem
);
584 nouveau_display_dumb_destroy(struct drm_file
*file_priv
, struct drm_device
*dev
,
587 return drm_gem_handle_delete(file_priv
, handle
);
591 nouveau_display_dumb_map_offset(struct drm_file
*file_priv
,
592 struct drm_device
*dev
,
593 uint32_t handle
, uint64_t *poffset
)
595 struct drm_gem_object
*gem
;
597 gem
= drm_gem_object_lookup(dev
, file_priv
, handle
);
599 struct nouveau_bo
*bo
= gem
->driver_private
;
600 *poffset
= bo
->bo
.addr_space_offset
;
601 drm_gem_object_unreference_unlocked(gem
);