2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include <drm/drm_atomic.h>
29 #include <drm/drm_atomic_helper.h>
30 #include <drm/drm_crtc_helper.h>
31 #include <drm/drm_dp_helper.h>
32 #include <drm/drm_fb_helper.h>
33 #include <drm/drm_plane_helper.h>
35 #include <nvif/class.h>
36 #include <nvif/cl0002.h>
37 #include <nvif/cl5070.h>
38 #include <nvif/cl507a.h>
39 #include <nvif/cl507b.h>
40 #include <nvif/cl507c.h>
41 #include <nvif/cl507d.h>
42 #include <nvif/cl507e.h>
43 #include <nvif/event.h>
45 #include "nouveau_drv.h"
46 #include "nouveau_dma.h"
47 #include "nouveau_gem.h"
48 #include "nouveau_connector.h"
49 #include "nouveau_encoder.h"
50 #include "nouveau_crtc.h"
51 #include "nouveau_fence.h"
52 #include "nouveau_fbcon.h"
53 #include "nv50_display.h"
57 #define EVO_MASTER (0x00)
58 #define EVO_FLIP(c) (0x01 + (c))
59 #define EVO_OVLY(c) (0x05 + (c))
60 #define EVO_OIMM(c) (0x09 + (c))
61 #define EVO_CURS(c) (0x0d + (c))
63 /* offsets in shared sync bo of various structures */
64 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
65 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
66 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
67 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
68 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
69 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
71 /******************************************************************************
73 *****************************************************************************/
74 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
77 struct drm_atomic_state state
;
79 struct list_head outp
;
84 struct nv50_outp_atom
{
85 struct list_head head
;
87 struct drm_encoder
*encoder
;
105 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
107 struct nv50_head_atom
{
108 struct drm_crtc_state state
;
117 struct nv50_head_mode
{
214 static inline struct nv50_head_atom
*
215 nv50_head_atom_get(struct drm_atomic_state
*state
, struct drm_crtc
*crtc
)
217 struct drm_crtc_state
*statec
= drm_atomic_get_crtc_state(state
, crtc
);
219 return (void *)statec
;
220 return nv50_head_atom(statec
);
223 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
225 struct nv50_wndw_atom
{
226 struct drm_plane_state state
;
229 struct drm_rect clip
;
290 /******************************************************************************
292 *****************************************************************************/
295 struct nvif_object user
;
296 struct nvif_device
*device
;
300 nv50_chan_create(struct nvif_device
*device
, struct nvif_object
*disp
,
301 const s32
*oclass
, u8 head
, void *data
, u32 size
,
302 struct nv50_chan
*chan
)
304 struct nvif_sclass
*sclass
;
307 chan
->device
= device
;
309 ret
= n
= nvif_object_sclass_get(disp
, &sclass
);
314 for (i
= 0; i
< n
; i
++) {
315 if (sclass
[i
].oclass
== oclass
[0]) {
316 ret
= nvif_object_init(disp
, 0, oclass
[0],
317 data
, size
, &chan
->user
);
319 nvif_object_map(&chan
->user
);
320 nvif_object_sclass_put(&sclass
);
327 nvif_object_sclass_put(&sclass
);
332 nv50_chan_destroy(struct nv50_chan
*chan
)
334 nvif_object_fini(&chan
->user
);
337 /******************************************************************************
339 *****************************************************************************/
342 struct nv50_chan base
;
346 nv50_pioc_destroy(struct nv50_pioc
*pioc
)
348 nv50_chan_destroy(&pioc
->base
);
352 nv50_pioc_create(struct nvif_device
*device
, struct nvif_object
*disp
,
353 const s32
*oclass
, u8 head
, void *data
, u32 size
,
354 struct nv50_pioc
*pioc
)
356 return nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
360 /******************************************************************************
362 *****************************************************************************/
365 struct nv50_pioc base
;
369 nv50_oimm_create(struct nvif_device
*device
, struct nvif_object
*disp
,
370 int head
, struct nv50_oimm
*oimm
)
372 struct nv50_disp_cursor_v0 args
= {
375 static const s32 oclass
[] = {
384 return nv50_pioc_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
388 /******************************************************************************
390 *****************************************************************************/
392 struct nv50_dmac_ctxdma
{
393 struct list_head head
;
394 struct nvif_object object
;
398 struct nv50_chan base
;
402 struct nvif_object sync
;
403 struct nvif_object vram
;
404 struct list_head ctxdma
;
406 /* Protects against concurrent pushbuf access to this channel, lock is
407 * grabbed by evo_wait (if the pushbuf reservation is successful) and
408 * dropped again by evo_kick. */
413 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma
*ctxdma
)
415 nvif_object_fini(&ctxdma
->object
);
416 list_del(&ctxdma
->head
);
420 static struct nv50_dmac_ctxdma
*
421 nv50_dmac_ctxdma_new(struct nv50_dmac
*dmac
, struct nouveau_framebuffer
*fb
)
423 struct nouveau_drm
*drm
= nouveau_drm(fb
->base
.dev
);
424 struct nv50_dmac_ctxdma
*ctxdma
;
425 const u8 kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
426 const u32 handle
= 0xfb000000 | kind
;
428 struct nv_dma_v0 base
;
430 struct nv50_dma_v0 nv50
;
431 struct gf100_dma_v0 gf100
;
432 struct gf119_dma_v0 gf119
;
435 u32 argc
= sizeof(args
.base
);
438 list_for_each_entry(ctxdma
, &dmac
->ctxdma
, head
) {
439 if (ctxdma
->object
.handle
== handle
)
443 if (!(ctxdma
= kzalloc(sizeof(*ctxdma
), GFP_KERNEL
)))
444 return ERR_PTR(-ENOMEM
);
445 list_add(&ctxdma
->head
, &dmac
->ctxdma
);
447 args
.base
.target
= NV_DMA_V0_TARGET_VRAM
;
448 args
.base
.access
= NV_DMA_V0_ACCESS_RDWR
;
450 args
.base
.limit
= drm
->client
.device
.info
.ram_user
- 1;
452 if (drm
->client
.device
.info
.chipset
< 0x80) {
453 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
454 argc
+= sizeof(args
.nv50
);
456 if (drm
->client
.device
.info
.chipset
< 0xc0) {
457 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
458 args
.nv50
.kind
= kind
;
459 argc
+= sizeof(args
.nv50
);
461 if (drm
->client
.device
.info
.chipset
< 0xd0) {
462 args
.gf100
.kind
= kind
;
463 argc
+= sizeof(args
.gf100
);
465 args
.gf119
.page
= GF119_DMA_V0_PAGE_LP
;
466 args
.gf119
.kind
= kind
;
467 argc
+= sizeof(args
.gf119
);
470 ret
= nvif_object_init(&dmac
->base
.user
, handle
, NV_DMA_IN_MEMORY
,
471 &args
, argc
, &ctxdma
->object
);
473 nv50_dmac_ctxdma_del(ctxdma
);
481 nv50_dmac_destroy(struct nv50_dmac
*dmac
, struct nvif_object
*disp
)
483 struct nvif_device
*device
= dmac
->base
.device
;
484 struct nv50_dmac_ctxdma
*ctxdma
, *ctxtmp
;
486 list_for_each_entry_safe(ctxdma
, ctxtmp
, &dmac
->ctxdma
, head
) {
487 nv50_dmac_ctxdma_del(ctxdma
);
490 nvif_object_fini(&dmac
->vram
);
491 nvif_object_fini(&dmac
->sync
);
493 nv50_chan_destroy(&dmac
->base
);
496 struct device
*dev
= nvxx_device(device
)->dev
;
497 dma_free_coherent(dev
, PAGE_SIZE
, dmac
->ptr
, dmac
->handle
);
502 nv50_dmac_create(struct nvif_device
*device
, struct nvif_object
*disp
,
503 const s32
*oclass
, u8 head
, void *data
, u32 size
, u64 syncbuf
,
504 struct nv50_dmac
*dmac
)
506 struct nv50_disp_core_channel_dma_v0
*args
= data
;
507 struct nvif_object pushbuf
;
510 mutex_init(&dmac
->lock
);
512 dmac
->ptr
= dma_alloc_coherent(nvxx_device(device
)->dev
, PAGE_SIZE
,
513 &dmac
->handle
, GFP_KERNEL
);
517 ret
= nvif_object_init(&device
->object
, 0, NV_DMA_FROM_MEMORY
,
518 &(struct nv_dma_v0
) {
519 .target
= NV_DMA_V0_TARGET_PCI_US
,
520 .access
= NV_DMA_V0_ACCESS_RD
,
521 .start
= dmac
->handle
+ 0x0000,
522 .limit
= dmac
->handle
+ 0x0fff,
523 }, sizeof(struct nv_dma_v0
), &pushbuf
);
527 args
->pushbuf
= nvif_handle(&pushbuf
);
529 ret
= nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
531 nvif_object_fini(&pushbuf
);
535 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000000, NV_DMA_IN_MEMORY
,
536 &(struct nv_dma_v0
) {
537 .target
= NV_DMA_V0_TARGET_VRAM
,
538 .access
= NV_DMA_V0_ACCESS_RDWR
,
539 .start
= syncbuf
+ 0x0000,
540 .limit
= syncbuf
+ 0x0fff,
541 }, sizeof(struct nv_dma_v0
),
546 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000001, NV_DMA_IN_MEMORY
,
547 &(struct nv_dma_v0
) {
548 .target
= NV_DMA_V0_TARGET_VRAM
,
549 .access
= NV_DMA_V0_ACCESS_RDWR
,
551 .limit
= device
->info
.ram_user
- 1,
552 }, sizeof(struct nv_dma_v0
),
557 INIT_LIST_HEAD(&dmac
->ctxdma
);
561 /******************************************************************************
563 *****************************************************************************/
566 struct nv50_dmac base
;
570 nv50_core_create(struct nvif_device
*device
, struct nvif_object
*disp
,
571 u64 syncbuf
, struct nv50_mast
*core
)
573 struct nv50_disp_core_channel_dma_v0 args
= {
574 .pushbuf
= 0xb0007d00,
576 static const s32 oclass
[] = {
577 GP102_DISP_CORE_CHANNEL_DMA
,
578 GP100_DISP_CORE_CHANNEL_DMA
,
579 GM200_DISP_CORE_CHANNEL_DMA
,
580 GM107_DISP_CORE_CHANNEL_DMA
,
581 GK110_DISP_CORE_CHANNEL_DMA
,
582 GK104_DISP_CORE_CHANNEL_DMA
,
583 GF110_DISP_CORE_CHANNEL_DMA
,
584 GT214_DISP_CORE_CHANNEL_DMA
,
585 GT206_DISP_CORE_CHANNEL_DMA
,
586 GT200_DISP_CORE_CHANNEL_DMA
,
587 G82_DISP_CORE_CHANNEL_DMA
,
588 NV50_DISP_CORE_CHANNEL_DMA
,
592 return nv50_dmac_create(device
, disp
, oclass
, 0, &args
, sizeof(args
),
593 syncbuf
, &core
->base
);
596 /******************************************************************************
598 *****************************************************************************/
601 struct nv50_dmac base
;
607 nv50_base_create(struct nvif_device
*device
, struct nvif_object
*disp
,
608 int head
, u64 syncbuf
, struct nv50_sync
*base
)
610 struct nv50_disp_base_channel_dma_v0 args
= {
611 .pushbuf
= 0xb0007c00 | head
,
614 static const s32 oclass
[] = {
615 GK110_DISP_BASE_CHANNEL_DMA
,
616 GK104_DISP_BASE_CHANNEL_DMA
,
617 GF110_DISP_BASE_CHANNEL_DMA
,
618 GT214_DISP_BASE_CHANNEL_DMA
,
619 GT200_DISP_BASE_CHANNEL_DMA
,
620 G82_DISP_BASE_CHANNEL_DMA
,
621 NV50_DISP_BASE_CHANNEL_DMA
,
625 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
626 syncbuf
, &base
->base
);
629 /******************************************************************************
631 *****************************************************************************/
634 struct nv50_dmac base
;
638 nv50_ovly_create(struct nvif_device
*device
, struct nvif_object
*disp
,
639 int head
, u64 syncbuf
, struct nv50_ovly
*ovly
)
641 struct nv50_disp_overlay_channel_dma_v0 args
= {
642 .pushbuf
= 0xb0007e00 | head
,
645 static const s32 oclass
[] = {
646 GK104_DISP_OVERLAY_CONTROL_DMA
,
647 GF110_DISP_OVERLAY_CONTROL_DMA
,
648 GT214_DISP_OVERLAY_CHANNEL_DMA
,
649 GT200_DISP_OVERLAY_CHANNEL_DMA
,
650 G82_DISP_OVERLAY_CHANNEL_DMA
,
651 NV50_DISP_OVERLAY_CHANNEL_DMA
,
655 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
656 syncbuf
, &ovly
->base
);
660 struct nouveau_crtc base
;
661 struct nv50_ovly ovly
;
662 struct nv50_oimm oimm
;
665 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
666 #define nv50_ovly(c) (&nv50_head(c)->ovly)
667 #define nv50_oimm(c) (&nv50_head(c)->oimm)
668 #define nv50_chan(c) (&(c)->base.base)
669 #define nv50_vers(c) nv50_chan(c)->user.oclass
672 struct nvif_object
*disp
;
673 struct nv50_mast mast
;
675 struct nouveau_bo
*sync
;
680 static struct nv50_disp
*
681 nv50_disp(struct drm_device
*dev
)
683 return nouveau_display(dev
)->priv
;
686 #define nv50_mast(d) (&nv50_disp(d)->mast)
688 /******************************************************************************
689 * EVO channel helpers
690 *****************************************************************************/
692 evo_wait(void *evoc
, int nr
)
694 struct nv50_dmac
*dmac
= evoc
;
695 struct nvif_device
*device
= dmac
->base
.device
;
696 u32 put
= nvif_rd32(&dmac
->base
.user
, 0x0000) / 4;
698 mutex_lock(&dmac
->lock
);
699 if (put
+ nr
>= (PAGE_SIZE
/ 4) - 8) {
700 dmac
->ptr
[put
] = 0x20000000;
702 nvif_wr32(&dmac
->base
.user
, 0x0000, 0x00000000);
703 if (nvif_msec(device
, 2000,
704 if (!nvif_rd32(&dmac
->base
.user
, 0x0004))
707 mutex_unlock(&dmac
->lock
);
708 pr_err("nouveau: evo channel stalled\n");
715 return dmac
->ptr
+ put
;
719 evo_kick(u32
*push
, void *evoc
)
721 struct nv50_dmac
*dmac
= evoc
;
722 nvif_wr32(&dmac
->base
.user
, 0x0000, (push
- dmac
->ptr
) << 2);
723 mutex_unlock(&dmac
->lock
);
726 #define evo_mthd(p, m, s) do { \
727 const u32 _m = (m), _s = (s); \
728 if (drm_debug & DRM_UT_KMS) \
729 pr_err("%04x %d %s\n", _m, _s, __func__); \
730 *((p)++) = ((_s << 18) | _m); \
733 #define evo_data(p, d) do { \
734 const u32 _d = (d); \
735 if (drm_debug & DRM_UT_KMS) \
736 pr_err("\t%08x\n", _d); \
740 /******************************************************************************
742 *****************************************************************************/
743 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
746 const struct nv50_wndw_func
*func
;
747 struct nv50_dmac
*dmac
;
749 struct drm_plane plane
;
751 struct nvif_notify notify
;
757 struct nv50_wndw_func
{
758 void *(*dtor
)(struct nv50_wndw
*);
759 int (*acquire
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
760 struct nv50_head_atom
*asyh
);
761 void (*release
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
762 struct nv50_head_atom
*asyh
);
763 void (*prepare
)(struct nv50_wndw
*, struct nv50_head_atom
*asyh
,
764 struct nv50_wndw_atom
*asyw
);
766 void (*sema_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
767 void (*sema_clr
)(struct nv50_wndw
*);
768 void (*ntfy_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
769 void (*ntfy_clr
)(struct nv50_wndw
*);
770 int (*ntfy_wait_begun
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
771 void (*image_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
772 void (*image_clr
)(struct nv50_wndw
*);
773 void (*lut
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
774 void (*point
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
776 u32 (*update
)(struct nv50_wndw
*, u32 interlock
);
780 nv50_wndw_wait_armed(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
783 return wndw
->func
->ntfy_wait_begun(wndw
, asyw
);
788 nv50_wndw_flush_clr(struct nv50_wndw
*wndw
, u32 interlock
, bool flush
,
789 struct nv50_wndw_atom
*asyw
)
791 if (asyw
->clr
.sema
&& (!asyw
->set
.sema
|| flush
))
792 wndw
->func
->sema_clr(wndw
);
793 if (asyw
->clr
.ntfy
&& (!asyw
->set
.ntfy
|| flush
))
794 wndw
->func
->ntfy_clr(wndw
);
795 if (asyw
->clr
.image
&& (!asyw
->set
.image
|| flush
))
796 wndw
->func
->image_clr(wndw
);
798 return flush
? wndw
->func
->update(wndw
, interlock
) : 0;
802 nv50_wndw_flush_set(struct nv50_wndw
*wndw
, u32 interlock
,
803 struct nv50_wndw_atom
*asyw
)
806 asyw
->image
.mode
= 0;
807 asyw
->image
.interval
= 1;
810 if (asyw
->set
.sema
) wndw
->func
->sema_set (wndw
, asyw
);
811 if (asyw
->set
.ntfy
) wndw
->func
->ntfy_set (wndw
, asyw
);
812 if (asyw
->set
.image
) wndw
->func
->image_set(wndw
, asyw
);
813 if (asyw
->set
.lut
) wndw
->func
->lut (wndw
, asyw
);
814 if (asyw
->set
.point
) wndw
->func
->point (wndw
, asyw
);
816 return wndw
->func
->update(wndw
, interlock
);
820 nv50_wndw_atomic_check_release(struct nv50_wndw
*wndw
,
821 struct nv50_wndw_atom
*asyw
,
822 struct nv50_head_atom
*asyh
)
824 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
825 NV_ATOMIC(drm
, "%s release\n", wndw
->plane
.name
);
826 wndw
->func
->release(wndw
, asyw
, asyh
);
827 asyw
->ntfy
.handle
= 0;
828 asyw
->sema
.handle
= 0;
832 nv50_wndw_atomic_check_acquire(struct nv50_wndw
*wndw
,
833 struct nv50_wndw_atom
*asyw
,
834 struct nv50_head_atom
*asyh
)
836 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(asyw
->state
.fb
);
837 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
840 NV_ATOMIC(drm
, "%s acquire\n", wndw
->plane
.name
);
843 asyw
->clip
.x2
= asyh
->state
.mode
.hdisplay
;
844 asyw
->clip
.y2
= asyh
->state
.mode
.vdisplay
;
846 asyw
->image
.w
= fb
->base
.width
;
847 asyw
->image
.h
= fb
->base
.height
;
848 asyw
->image
.kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
850 if (asyh
->state
.pageflip_flags
& DRM_MODE_PAGE_FLIP_ASYNC
)
855 if (asyw
->image
.kind
) {
856 asyw
->image
.layout
= 0;
857 if (drm
->client
.device
.info
.chipset
>= 0xc0)
858 asyw
->image
.block
= fb
->nvbo
->tile_mode
>> 4;
860 asyw
->image
.block
= fb
->nvbo
->tile_mode
;
861 asyw
->image
.pitch
= (fb
->base
.pitches
[0] / 4) << 4;
863 asyw
->image
.layout
= 1;
864 asyw
->image
.block
= 0;
865 asyw
->image
.pitch
= fb
->base
.pitches
[0];
868 ret
= wndw
->func
->acquire(wndw
, asyw
, asyh
);
872 if (asyw
->set
.image
) {
873 if (!(asyw
->image
.mode
= asyw
->interval
? 0 : 1))
874 asyw
->image
.interval
= asyw
->interval
;
876 asyw
->image
.interval
= 0;
883 nv50_wndw_atomic_check(struct drm_plane
*plane
, struct drm_plane_state
*state
)
885 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
886 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
887 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(wndw
->plane
.state
);
888 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
889 struct nv50_head_atom
*harm
= NULL
, *asyh
= NULL
;
890 bool varm
= false, asyv
= false, asym
= false;
893 NV_ATOMIC(drm
, "%s atomic_check\n", plane
->name
);
894 if (asyw
->state
.crtc
) {
895 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
897 return PTR_ERR(asyh
);
898 asym
= drm_atomic_crtc_needs_modeset(&asyh
->state
);
899 asyv
= asyh
->state
.active
;
902 if (armw
->state
.crtc
) {
903 harm
= nv50_head_atom_get(asyw
->state
.state
, armw
->state
.crtc
);
905 return PTR_ERR(harm
);
906 varm
= harm
->state
.crtc
->state
->active
;
910 asyw
->point
.x
= asyw
->state
.crtc_x
;
911 asyw
->point
.y
= asyw
->state
.crtc_y
;
912 if (memcmp(&armw
->point
, &asyw
->point
, sizeof(asyw
->point
)))
913 asyw
->set
.point
= true;
915 ret
= nv50_wndw_atomic_check_acquire(wndw
, asyw
, asyh
);
920 nv50_wndw_atomic_check_release(wndw
, asyw
, harm
);
926 asyw
->clr
.ntfy
= armw
->ntfy
.handle
!= 0;
927 asyw
->clr
.sema
= armw
->sema
.handle
!= 0;
928 if (wndw
->func
->image_clr
)
929 asyw
->clr
.image
= armw
->image
.handle
!= 0;
930 asyw
->set
.lut
= wndw
->func
->lut
&& asyv
;
937 nv50_wndw_cleanup_fb(struct drm_plane
*plane
, struct drm_plane_state
*old_state
)
939 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(old_state
->fb
);
940 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
942 NV_ATOMIC(drm
, "%s cleanup: %p\n", plane
->name
, old_state
->fb
);
946 nouveau_bo_unpin(fb
->nvbo
);
950 nv50_wndw_prepare_fb(struct drm_plane
*plane
, struct drm_plane_state
*state
)
952 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(state
->fb
);
953 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
954 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
955 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
956 struct nv50_head_atom
*asyh
;
957 struct nv50_dmac_ctxdma
*ctxdma
;
960 NV_ATOMIC(drm
, "%s prepare: %p\n", plane
->name
, state
->fb
);
964 ret
= nouveau_bo_pin(fb
->nvbo
, TTM_PL_FLAG_VRAM
, true);
968 ctxdma
= nv50_dmac_ctxdma_new(wndw
->dmac
, fb
);
969 if (IS_ERR(ctxdma
)) {
970 nouveau_bo_unpin(fb
->nvbo
);
971 return PTR_ERR(ctxdma
);
974 asyw
->state
.fence
= reservation_object_get_excl_rcu(fb
->nvbo
->bo
.resv
);
975 asyw
->image
.handle
= ctxdma
->object
.handle
;
976 asyw
->image
.offset
= fb
->nvbo
->bo
.offset
;
978 if (wndw
->func
->prepare
) {
979 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
981 return PTR_ERR(asyh
);
983 wndw
->func
->prepare(wndw
, asyh
, asyw
);
989 static const struct drm_plane_helper_funcs
991 .prepare_fb
= nv50_wndw_prepare_fb
,
992 .cleanup_fb
= nv50_wndw_cleanup_fb
,
993 .atomic_check
= nv50_wndw_atomic_check
,
997 nv50_wndw_atomic_destroy_state(struct drm_plane
*plane
,
998 struct drm_plane_state
*state
)
1000 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
1001 __drm_atomic_helper_plane_destroy_state(&asyw
->state
);
1005 static struct drm_plane_state
*
1006 nv50_wndw_atomic_duplicate_state(struct drm_plane
*plane
)
1008 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(plane
->state
);
1009 struct nv50_wndw_atom
*asyw
;
1010 if (!(asyw
= kmalloc(sizeof(*asyw
), GFP_KERNEL
)))
1012 __drm_atomic_helper_plane_duplicate_state(plane
, &asyw
->state
);
1014 asyw
->sema
= armw
->sema
;
1015 asyw
->ntfy
= armw
->ntfy
;
1016 asyw
->image
= armw
->image
;
1017 asyw
->point
= armw
->point
;
1018 asyw
->lut
= armw
->lut
;
1021 return &asyw
->state
;
1025 nv50_wndw_reset(struct drm_plane
*plane
)
1027 struct nv50_wndw_atom
*asyw
;
1029 if (WARN_ON(!(asyw
= kzalloc(sizeof(*asyw
), GFP_KERNEL
))))
1033 plane
->funcs
->atomic_destroy_state(plane
, plane
->state
);
1034 plane
->state
= &asyw
->state
;
1035 plane
->state
->plane
= plane
;
1036 plane
->state
->rotation
= DRM_ROTATE_0
;
1040 nv50_wndw_destroy(struct drm_plane
*plane
)
1042 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1044 nvif_notify_fini(&wndw
->notify
);
1045 data
= wndw
->func
->dtor(wndw
);
1046 drm_plane_cleanup(&wndw
->plane
);
1050 static const struct drm_plane_funcs
1052 .update_plane
= drm_atomic_helper_update_plane
,
1053 .disable_plane
= drm_atomic_helper_disable_plane
,
1054 .destroy
= nv50_wndw_destroy
,
1055 .reset
= nv50_wndw_reset
,
1056 .set_property
= drm_atomic_helper_plane_set_property
,
1057 .atomic_duplicate_state
= nv50_wndw_atomic_duplicate_state
,
1058 .atomic_destroy_state
= nv50_wndw_atomic_destroy_state
,
1062 nv50_wndw_fini(struct nv50_wndw
*wndw
)
1064 nvif_notify_put(&wndw
->notify
);
1068 nv50_wndw_init(struct nv50_wndw
*wndw
)
1070 nvif_notify_get(&wndw
->notify
);
1074 nv50_wndw_ctor(const struct nv50_wndw_func
*func
, struct drm_device
*dev
,
1075 enum drm_plane_type type
, const char *name
, int index
,
1076 struct nv50_dmac
*dmac
, const u32
*format
, int nformat
,
1077 struct nv50_wndw
*wndw
)
1084 ret
= drm_universal_plane_init(dev
, &wndw
->plane
, 0, &nv50_wndw
, format
,
1085 nformat
, type
, "%s-%d", name
, index
);
1089 drm_plane_helper_add(&wndw
->plane
, &nv50_wndw_helper
);
1093 /******************************************************************************
1095 *****************************************************************************/
1096 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1099 struct nv50_wndw wndw
;
1100 struct nvif_object chan
;
1104 nv50_curs_update(struct nv50_wndw
*wndw
, u32 interlock
)
1106 struct nv50_curs
*curs
= nv50_curs(wndw
);
1107 nvif_wr32(&curs
->chan
, 0x0080, 0x00000000);
1112 nv50_curs_point(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1114 struct nv50_curs
*curs
= nv50_curs(wndw
);
1115 nvif_wr32(&curs
->chan
, 0x0084, (asyw
->point
.y
<< 16) | asyw
->point
.x
);
1119 nv50_curs_prepare(struct nv50_wndw
*wndw
, struct nv50_head_atom
*asyh
,
1120 struct nv50_wndw_atom
*asyw
)
1122 u32 handle
= nv50_disp(wndw
->plane
.dev
)->mast
.base
.vram
.handle
;
1123 u32 offset
= asyw
->image
.offset
;
1124 if (asyh
->curs
.handle
!= handle
|| asyh
->curs
.offset
!= offset
) {
1125 asyh
->curs
.handle
= handle
;
1126 asyh
->curs
.offset
= offset
;
1127 asyh
->set
.curs
= asyh
->curs
.visible
;
1132 nv50_curs_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1133 struct nv50_head_atom
*asyh
)
1135 asyh
->curs
.visible
= false;
1139 nv50_curs_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1140 struct nv50_head_atom
*asyh
)
1144 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1145 DRM_PLANE_HELPER_NO_SCALING
,
1146 DRM_PLANE_HELPER_NO_SCALING
,
1148 asyh
->curs
.visible
= asyw
->state
.visible
;
1149 if (ret
|| !asyh
->curs
.visible
)
1152 switch (asyw
->state
.fb
->width
) {
1153 case 32: asyh
->curs
.layout
= 0; break;
1154 case 64: asyh
->curs
.layout
= 1; break;
1159 if (asyw
->state
.fb
->width
!= asyw
->state
.fb
->height
)
1162 switch (asyw
->state
.fb
->format
->format
) {
1163 case DRM_FORMAT_ARGB8888
: asyh
->curs
.format
= 1; break;
1173 nv50_curs_dtor(struct nv50_wndw
*wndw
)
1175 struct nv50_curs
*curs
= nv50_curs(wndw
);
1176 nvif_object_fini(&curs
->chan
);
1181 nv50_curs_format
[] = {
1182 DRM_FORMAT_ARGB8888
,
1185 static const struct nv50_wndw_func
1187 .dtor
= nv50_curs_dtor
,
1188 .acquire
= nv50_curs_acquire
,
1189 .release
= nv50_curs_release
,
1190 .prepare
= nv50_curs_prepare
,
1191 .point
= nv50_curs_point
,
1192 .update
= nv50_curs_update
,
1196 nv50_curs_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1197 struct nv50_curs
**pcurs
)
1199 static const struct nvif_mclass curses
[] = {
1200 { GK104_DISP_CURSOR
, 0 },
1201 { GF110_DISP_CURSOR
, 0 },
1202 { GT214_DISP_CURSOR
, 0 },
1203 { G82_DISP_CURSOR
, 0 },
1204 { NV50_DISP_CURSOR
, 0 },
1207 struct nv50_disp_cursor_v0 args
= {
1208 .head
= head
->base
.index
,
1210 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1211 struct nv50_curs
*curs
;
1214 cid
= nvif_mclass(disp
->disp
, curses
);
1216 NV_ERROR(drm
, "No supported cursor immediate class\n");
1220 if (!(curs
= *pcurs
= kzalloc(sizeof(*curs
), GFP_KERNEL
)))
1223 ret
= nv50_wndw_ctor(&nv50_curs
, drm
->dev
, DRM_PLANE_TYPE_CURSOR
,
1224 "curs", head
->base
.index
, &disp
->mast
.base
,
1225 nv50_curs_format
, ARRAY_SIZE(nv50_curs_format
),
1232 ret
= nvif_object_init(disp
->disp
, 0, curses
[cid
].oclass
, &args
,
1233 sizeof(args
), &curs
->chan
);
1235 NV_ERROR(drm
, "curs%04x allocation failed: %d\n",
1236 curses
[cid
].oclass
, ret
);
1243 /******************************************************************************
1245 *****************************************************************************/
1246 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1249 struct nv50_wndw wndw
;
1250 struct nv50_sync chan
;
1255 nv50_base_notify(struct nvif_notify
*notify
)
1257 return NVIF_NOTIFY_KEEP
;
1261 nv50_base_lut(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1263 struct nv50_base
*base
= nv50_base(wndw
);
1265 if ((push
= evo_wait(&base
->chan
, 2))) {
1266 evo_mthd(push
, 0x00e0, 1);
1267 evo_data(push
, asyw
->lut
.enable
<< 30);
1268 evo_kick(push
, &base
->chan
);
1273 nv50_base_image_clr(struct nv50_wndw
*wndw
)
1275 struct nv50_base
*base
= nv50_base(wndw
);
1277 if ((push
= evo_wait(&base
->chan
, 4))) {
1278 evo_mthd(push
, 0x0084, 1);
1279 evo_data(push
, 0x00000000);
1280 evo_mthd(push
, 0x00c0, 1);
1281 evo_data(push
, 0x00000000);
1282 evo_kick(push
, &base
->chan
);
1287 nv50_base_image_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1289 struct nv50_base
*base
= nv50_base(wndw
);
1290 const s32 oclass
= base
->chan
.base
.base
.user
.oclass
;
1292 if ((push
= evo_wait(&base
->chan
, 10))) {
1293 evo_mthd(push
, 0x0084, 1);
1294 evo_data(push
, (asyw
->image
.mode
<< 8) |
1295 (asyw
->image
.interval
<< 4));
1296 evo_mthd(push
, 0x00c0, 1);
1297 evo_data(push
, asyw
->image
.handle
);
1298 if (oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1299 evo_mthd(push
, 0x0800, 5);
1300 evo_data(push
, asyw
->image
.offset
>> 8);
1301 evo_data(push
, 0x00000000);
1302 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1303 evo_data(push
, (asyw
->image
.layout
<< 20) |
1306 evo_data(push
, (asyw
->image
.kind
<< 16) |
1307 (asyw
->image
.format
<< 8));
1309 if (oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1310 evo_mthd(push
, 0x0800, 5);
1311 evo_data(push
, asyw
->image
.offset
>> 8);
1312 evo_data(push
, 0x00000000);
1313 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1314 evo_data(push
, (asyw
->image
.layout
<< 20) |
1317 evo_data(push
, asyw
->image
.format
<< 8);
1319 evo_mthd(push
, 0x0400, 5);
1320 evo_data(push
, asyw
->image
.offset
>> 8);
1321 evo_data(push
, 0x00000000);
1322 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1323 evo_data(push
, (asyw
->image
.layout
<< 24) |
1326 evo_data(push
, asyw
->image
.format
<< 8);
1328 evo_kick(push
, &base
->chan
);
1333 nv50_base_ntfy_clr(struct nv50_wndw
*wndw
)
1335 struct nv50_base
*base
= nv50_base(wndw
);
1337 if ((push
= evo_wait(&base
->chan
, 2))) {
1338 evo_mthd(push
, 0x00a4, 1);
1339 evo_data(push
, 0x00000000);
1340 evo_kick(push
, &base
->chan
);
1345 nv50_base_ntfy_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1347 struct nv50_base
*base
= nv50_base(wndw
);
1349 if ((push
= evo_wait(&base
->chan
, 3))) {
1350 evo_mthd(push
, 0x00a0, 2);
1351 evo_data(push
, (asyw
->ntfy
.awaken
<< 30) | asyw
->ntfy
.offset
);
1352 evo_data(push
, asyw
->ntfy
.handle
);
1353 evo_kick(push
, &base
->chan
);
1358 nv50_base_sema_clr(struct nv50_wndw
*wndw
)
1360 struct nv50_base
*base
= nv50_base(wndw
);
1362 if ((push
= evo_wait(&base
->chan
, 2))) {
1363 evo_mthd(push
, 0x0094, 1);
1364 evo_data(push
, 0x00000000);
1365 evo_kick(push
, &base
->chan
);
1370 nv50_base_sema_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1372 struct nv50_base
*base
= nv50_base(wndw
);
1374 if ((push
= evo_wait(&base
->chan
, 5))) {
1375 evo_mthd(push
, 0x0088, 4);
1376 evo_data(push
, asyw
->sema
.offset
);
1377 evo_data(push
, asyw
->sema
.acquire
);
1378 evo_data(push
, asyw
->sema
.release
);
1379 evo_data(push
, asyw
->sema
.handle
);
1380 evo_kick(push
, &base
->chan
);
1385 nv50_base_update(struct nv50_wndw
*wndw
, u32 interlock
)
1387 struct nv50_base
*base
= nv50_base(wndw
);
1390 if (!(push
= evo_wait(&base
->chan
, 2)))
1392 evo_mthd(push
, 0x0080, 1);
1393 evo_data(push
, interlock
);
1394 evo_kick(push
, &base
->chan
);
1396 if (base
->chan
.base
.base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
)
1397 return interlock
? 2 << (base
->id
* 8) : 0;
1398 return interlock
? 2 << (base
->id
* 4) : 0;
1402 nv50_base_ntfy_wait_begun(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1404 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
1405 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1406 if (nvif_msec(&drm
->client
.device
, 2000ULL,
1407 u32 data
= nouveau_bo_rd32(disp
->sync
, asyw
->ntfy
.offset
/ 4);
1408 if ((data
& 0xc0000000) == 0x40000000)
1417 nv50_base_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1418 struct nv50_head_atom
*asyh
)
1424 nv50_base_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1425 struct nv50_head_atom
*asyh
)
1427 const struct drm_framebuffer
*fb
= asyw
->state
.fb
;
1430 if (!fb
->format
->depth
)
1433 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1434 DRM_PLANE_HELPER_NO_SCALING
,
1435 DRM_PLANE_HELPER_NO_SCALING
,
1440 asyh
->base
.depth
= fb
->format
->depth
;
1441 asyh
->base
.cpp
= fb
->format
->cpp
[0];
1442 asyh
->base
.x
= asyw
->state
.src
.x1
>> 16;
1443 asyh
->base
.y
= asyw
->state
.src
.y1
>> 16;
1444 asyh
->base
.w
= asyw
->state
.fb
->width
;
1445 asyh
->base
.h
= asyw
->state
.fb
->height
;
1447 switch (fb
->format
->format
) {
1448 case DRM_FORMAT_C8
: asyw
->image
.format
= 0x1e; break;
1449 case DRM_FORMAT_RGB565
: asyw
->image
.format
= 0xe8; break;
1450 case DRM_FORMAT_XRGB1555
:
1451 case DRM_FORMAT_ARGB1555
: asyw
->image
.format
= 0xe9; break;
1452 case DRM_FORMAT_XRGB8888
:
1453 case DRM_FORMAT_ARGB8888
: asyw
->image
.format
= 0xcf; break;
1454 case DRM_FORMAT_XBGR2101010
:
1455 case DRM_FORMAT_ABGR2101010
: asyw
->image
.format
= 0xd1; break;
1456 case DRM_FORMAT_XBGR8888
:
1457 case DRM_FORMAT_ABGR8888
: asyw
->image
.format
= 0xd5; break;
1463 asyw
->lut
.enable
= 1;
1464 asyw
->set
.image
= true;
1469 nv50_base_dtor(struct nv50_wndw
*wndw
)
1471 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1472 struct nv50_base
*base
= nv50_base(wndw
);
1473 nv50_dmac_destroy(&base
->chan
.base
, disp
->disp
);
1478 nv50_base_format
[] = {
1481 DRM_FORMAT_XRGB1555
,
1482 DRM_FORMAT_ARGB1555
,
1483 DRM_FORMAT_XRGB8888
,
1484 DRM_FORMAT_ARGB8888
,
1485 DRM_FORMAT_XBGR2101010
,
1486 DRM_FORMAT_ABGR2101010
,
1487 DRM_FORMAT_XBGR8888
,
1488 DRM_FORMAT_ABGR8888
,
1491 static const struct nv50_wndw_func
1493 .dtor
= nv50_base_dtor
,
1494 .acquire
= nv50_base_acquire
,
1495 .release
= nv50_base_release
,
1496 .sema_set
= nv50_base_sema_set
,
1497 .sema_clr
= nv50_base_sema_clr
,
1498 .ntfy_set
= nv50_base_ntfy_set
,
1499 .ntfy_clr
= nv50_base_ntfy_clr
,
1500 .ntfy_wait_begun
= nv50_base_ntfy_wait_begun
,
1501 .image_set
= nv50_base_image_set
,
1502 .image_clr
= nv50_base_image_clr
,
1503 .lut
= nv50_base_lut
,
1504 .update
= nv50_base_update
,
1508 nv50_base_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1509 struct nv50_base
**pbase
)
1511 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1512 struct nv50_base
*base
;
1515 if (!(base
= *pbase
= kzalloc(sizeof(*base
), GFP_KERNEL
)))
1517 base
->id
= head
->base
.index
;
1518 base
->wndw
.ntfy
= EVO_FLIP_NTFY0(base
->id
);
1519 base
->wndw
.sema
= EVO_FLIP_SEM0(base
->id
);
1520 base
->wndw
.data
= 0x00000000;
1522 ret
= nv50_wndw_ctor(&nv50_base
, drm
->dev
, DRM_PLANE_TYPE_PRIMARY
,
1523 "base", base
->id
, &base
->chan
.base
,
1524 nv50_base_format
, ARRAY_SIZE(nv50_base_format
),
1531 ret
= nv50_base_create(&drm
->client
.device
, disp
->disp
, base
->id
,
1532 disp
->sync
->bo
.offset
, &base
->chan
);
1536 return nvif_notify_init(&base
->chan
.base
.base
.user
, nv50_base_notify
,
1538 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT
,
1539 &(struct nvif_notify_uevent_req
) {},
1540 sizeof(struct nvif_notify_uevent_req
),
1541 sizeof(struct nvif_notify_uevent_rep
),
1542 &base
->wndw
.notify
);
1545 /******************************************************************************
1547 *****************************************************************************/
1549 nv50_head_procamp(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1551 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1553 if ((push
= evo_wait(core
, 2))) {
1554 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1555 evo_mthd(push
, 0x08a8 + (head
->base
.index
* 0x400), 1);
1557 evo_mthd(push
, 0x0498 + (head
->base
.index
* 0x300), 1);
1558 evo_data(push
, (asyh
->procamp
.sat
.sin
<< 20) |
1559 (asyh
->procamp
.sat
.cos
<< 8));
1560 evo_kick(push
, core
);
1565 nv50_head_dither(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1567 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1569 if ((push
= evo_wait(core
, 2))) {
1570 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1571 evo_mthd(push
, 0x08a0 + (head
->base
.index
* 0x0400), 1);
1573 if (core
->base
.user
.oclass
< GK104_DISP_CORE_CHANNEL_DMA
)
1574 evo_mthd(push
, 0x0490 + (head
->base
.index
* 0x0300), 1);
1576 evo_mthd(push
, 0x04a0 + (head
->base
.index
* 0x0300), 1);
1577 evo_data(push
, (asyh
->dither
.mode
<< 3) |
1578 (asyh
->dither
.bits
<< 1) |
1579 asyh
->dither
.enable
);
1580 evo_kick(push
, core
);
1585 nv50_head_ovly(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1587 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1591 if (asyh
->base
.cpp
) {
1592 switch (asyh
->base
.cpp
) {
1593 case 8: bounds
|= 0x00000500; break;
1594 case 4: bounds
|= 0x00000300; break;
1595 case 2: bounds
|= 0x00000100; break;
1600 bounds
|= 0x00000001;
1603 if ((push
= evo_wait(core
, 2))) {
1604 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1605 evo_mthd(push
, 0x0904 + head
->base
.index
* 0x400, 1);
1607 evo_mthd(push
, 0x04d4 + head
->base
.index
* 0x300, 1);
1608 evo_data(push
, bounds
);
1609 evo_kick(push
, core
);
1614 nv50_head_base(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1616 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1620 if (asyh
->base
.cpp
) {
1621 switch (asyh
->base
.cpp
) {
1622 case 8: bounds
|= 0x00000500; break;
1623 case 4: bounds
|= 0x00000300; break;
1624 case 2: bounds
|= 0x00000100; break;
1625 case 1: bounds
|= 0x00000000; break;
1630 bounds
|= 0x00000001;
1633 if ((push
= evo_wait(core
, 2))) {
1634 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1635 evo_mthd(push
, 0x0900 + head
->base
.index
* 0x400, 1);
1637 evo_mthd(push
, 0x04d0 + head
->base
.index
* 0x300, 1);
1638 evo_data(push
, bounds
);
1639 evo_kick(push
, core
);
1644 nv50_head_curs_clr(struct nv50_head
*head
)
1646 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1648 if ((push
= evo_wait(core
, 4))) {
1649 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1650 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1651 evo_data(push
, 0x05000000);
1653 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1654 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1655 evo_data(push
, 0x05000000);
1656 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1657 evo_data(push
, 0x00000000);
1659 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 1);
1660 evo_data(push
, 0x05000000);
1661 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1662 evo_data(push
, 0x00000000);
1664 evo_kick(push
, core
);
1669 nv50_head_curs_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1671 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1673 if ((push
= evo_wait(core
, 5))) {
1674 if (core
->base
.user
.oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1675 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1676 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1677 (asyh
->curs
.format
<< 24));
1678 evo_data(push
, asyh
->curs
.offset
>> 8);
1680 if (core
->base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1681 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1682 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1683 (asyh
->curs
.format
<< 24));
1684 evo_data(push
, asyh
->curs
.offset
>> 8);
1685 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1686 evo_data(push
, asyh
->curs
.handle
);
1688 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 2);
1689 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1690 (asyh
->curs
.format
<< 24));
1691 evo_data(push
, asyh
->curs
.offset
>> 8);
1692 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1693 evo_data(push
, asyh
->curs
.handle
);
1695 evo_kick(push
, core
);
1700 nv50_head_core_clr(struct nv50_head
*head
)
1702 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1704 if ((push
= evo_wait(core
, 2))) {
1705 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1706 evo_mthd(push
, 0x0874 + head
->base
.index
* 0x400, 1);
1708 evo_mthd(push
, 0x0474 + head
->base
.index
* 0x300, 1);
1709 evo_data(push
, 0x00000000);
1710 evo_kick(push
, core
);
1715 nv50_head_core_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1717 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1719 if ((push
= evo_wait(core
, 9))) {
1720 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1721 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1722 evo_data(push
, asyh
->core
.offset
>> 8);
1723 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1724 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1725 evo_data(push
, asyh
->core
.layout
<< 20 |
1726 (asyh
->core
.pitch
>> 8) << 8 |
1728 evo_data(push
, asyh
->core
.kind
<< 16 |
1729 asyh
->core
.format
<< 8);
1730 evo_data(push
, asyh
->core
.handle
);
1731 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1732 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1733 /* EVO will complain with INVALID_STATE if we have an
1734 * active cursor and (re)specify HeadSetContextDmaIso
1735 * without also updating HeadSetOffsetCursor.
1737 asyh
->set
.curs
= asyh
->curs
.visible
;
1739 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1740 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1741 evo_data(push
, asyh
->core
.offset
>> 8);
1742 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1743 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1744 evo_data(push
, asyh
->core
.layout
<< 20 |
1745 (asyh
->core
.pitch
>> 8) << 8 |
1747 evo_data(push
, asyh
->core
.format
<< 8);
1748 evo_data(push
, asyh
->core
.handle
);
1749 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1750 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1752 evo_mthd(push
, 0x0460 + head
->base
.index
* 0x300, 1);
1753 evo_data(push
, asyh
->core
.offset
>> 8);
1754 evo_mthd(push
, 0x0468 + head
->base
.index
* 0x300, 4);
1755 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1756 evo_data(push
, asyh
->core
.layout
<< 24 |
1757 (asyh
->core
.pitch
>> 8) << 8 |
1759 evo_data(push
, asyh
->core
.format
<< 8);
1760 evo_data(push
, asyh
->core
.handle
);
1761 evo_mthd(push
, 0x04b0 + head
->base
.index
* 0x300, 1);
1762 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1764 evo_kick(push
, core
);
1769 nv50_head_lut_clr(struct nv50_head
*head
)
1771 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1773 if ((push
= evo_wait(core
, 4))) {
1774 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1775 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1776 evo_data(push
, 0x40000000);
1778 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1779 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1780 evo_data(push
, 0x40000000);
1781 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1782 evo_data(push
, 0x00000000);
1784 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 1);
1785 evo_data(push
, 0x03000000);
1786 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1787 evo_data(push
, 0x00000000);
1789 evo_kick(push
, core
);
1794 nv50_head_lut_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1796 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1798 if ((push
= evo_wait(core
, 7))) {
1799 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1800 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1801 evo_data(push
, 0xc0000000);
1802 evo_data(push
, asyh
->lut
.offset
>> 8);
1804 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1805 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1806 evo_data(push
, 0xc0000000);
1807 evo_data(push
, asyh
->lut
.offset
>> 8);
1808 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1809 evo_data(push
, asyh
->lut
.handle
);
1811 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 4);
1812 evo_data(push
, 0x83000000);
1813 evo_data(push
, asyh
->lut
.offset
>> 8);
1814 evo_data(push
, 0x00000000);
1815 evo_data(push
, 0x00000000);
1816 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1817 evo_data(push
, asyh
->lut
.handle
);
1819 evo_kick(push
, core
);
1824 nv50_head_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1826 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1827 struct nv50_head_mode
*m
= &asyh
->mode
;
1829 if ((push
= evo_wait(core
, 14))) {
1830 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1831 evo_mthd(push
, 0x0804 + (head
->base
.index
* 0x400), 2);
1832 evo_data(push
, 0x00800000 | m
->clock
);
1833 evo_data(push
, m
->interlace
? 0x00000002 : 0x00000000);
1834 evo_mthd(push
, 0x0810 + (head
->base
.index
* 0x400), 7);
1835 evo_data(push
, 0x00000000);
1836 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1837 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1838 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1839 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1840 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1841 evo_data(push
, asyh
->mode
.v
.blankus
);
1842 evo_mthd(push
, 0x082c + (head
->base
.index
* 0x400), 1);
1843 evo_data(push
, 0x00000000);
1845 evo_mthd(push
, 0x0410 + (head
->base
.index
* 0x300), 6);
1846 evo_data(push
, 0x00000000);
1847 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1848 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1849 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1850 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1851 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1852 evo_mthd(push
, 0x042c + (head
->base
.index
* 0x300), 2);
1853 evo_data(push
, 0x00000000); /* ??? */
1854 evo_data(push
, 0xffffff00);
1855 evo_mthd(push
, 0x0450 + (head
->base
.index
* 0x300), 3);
1856 evo_data(push
, m
->clock
* 1000);
1857 evo_data(push
, 0x00200000); /* ??? */
1858 evo_data(push
, m
->clock
* 1000);
1860 evo_kick(push
, core
);
1865 nv50_head_view(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1867 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1869 if ((push
= evo_wait(core
, 10))) {
1870 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1871 evo_mthd(push
, 0x08a4 + (head
->base
.index
* 0x400), 1);
1872 evo_data(push
, 0x00000000);
1873 evo_mthd(push
, 0x08c8 + (head
->base
.index
* 0x400), 1);
1874 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1875 evo_mthd(push
, 0x08d8 + (head
->base
.index
* 0x400), 2);
1876 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1877 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1879 evo_mthd(push
, 0x0494 + (head
->base
.index
* 0x300), 1);
1880 evo_data(push
, 0x00000000);
1881 evo_mthd(push
, 0x04b8 + (head
->base
.index
* 0x300), 1);
1882 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1883 evo_mthd(push
, 0x04c0 + (head
->base
.index
* 0x300), 3);
1884 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1885 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1886 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1888 evo_kick(push
, core
);
1893 nv50_head_flush_clr(struct nv50_head
*head
, struct nv50_head_atom
*asyh
, bool y
)
1895 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1896 nv50_head_lut_clr(head
);
1897 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1898 nv50_head_core_clr(head
);
1899 if (asyh
->clr
.curs
&& (!asyh
->set
.curs
|| y
))
1900 nv50_head_curs_clr(head
);
1904 nv50_head_flush_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1906 if (asyh
->set
.view
) nv50_head_view (head
, asyh
);
1907 if (asyh
->set
.mode
) nv50_head_mode (head
, asyh
);
1908 if (asyh
->set
.core
) nv50_head_lut_set (head
, asyh
);
1909 if (asyh
->set
.core
) nv50_head_core_set(head
, asyh
);
1910 if (asyh
->set
.curs
) nv50_head_curs_set(head
, asyh
);
1911 if (asyh
->set
.base
) nv50_head_base (head
, asyh
);
1912 if (asyh
->set
.ovly
) nv50_head_ovly (head
, asyh
);
1913 if (asyh
->set
.dither
) nv50_head_dither (head
, asyh
);
1914 if (asyh
->set
.procamp
) nv50_head_procamp (head
, asyh
);
1918 nv50_head_atomic_check_procamp(struct nv50_head_atom
*armh
,
1919 struct nv50_head_atom
*asyh
,
1920 struct nouveau_conn_atom
*asyc
)
1922 const int vib
= asyc
->procamp
.color_vibrance
- 100;
1923 const int hue
= asyc
->procamp
.vibrant_hue
- 90;
1924 const int adj
= (vib
> 0) ? 50 : 0;
1925 asyh
->procamp
.sat
.cos
= ((vib
* 2047 + adj
) / 100) & 0xfff;
1926 asyh
->procamp
.sat
.sin
= ((hue
* 2047) / 100) & 0xfff;
1927 asyh
->set
.procamp
= true;
1931 nv50_head_atomic_check_dither(struct nv50_head_atom
*armh
,
1932 struct nv50_head_atom
*asyh
,
1933 struct nouveau_conn_atom
*asyc
)
1935 struct drm_connector
*connector
= asyc
->state
.connector
;
1938 if (asyc
->dither
.mode
== DITHERING_MODE_AUTO
) {
1939 if (asyh
->base
.depth
> connector
->display_info
.bpc
* 3)
1940 mode
= DITHERING_MODE_DYNAMIC2X2
;
1942 mode
= asyc
->dither
.mode
;
1945 if (asyc
->dither
.depth
== DITHERING_DEPTH_AUTO
) {
1946 if (connector
->display_info
.bpc
>= 8)
1947 mode
|= DITHERING_DEPTH_8BPC
;
1949 mode
|= asyc
->dither
.depth
;
1952 asyh
->dither
.enable
= mode
;
1953 asyh
->dither
.bits
= mode
>> 1;
1954 asyh
->dither
.mode
= mode
>> 3;
1955 asyh
->set
.dither
= true;
1959 nv50_head_atomic_check_view(struct nv50_head_atom
*armh
,
1960 struct nv50_head_atom
*asyh
,
1961 struct nouveau_conn_atom
*asyc
)
1963 struct drm_connector
*connector
= asyc
->state
.connector
;
1964 struct drm_display_mode
*omode
= &asyh
->state
.adjusted_mode
;
1965 struct drm_display_mode
*umode
= &asyh
->state
.mode
;
1966 int mode
= asyc
->scaler
.mode
;
1969 if (connector
->edid_blob_ptr
)
1970 edid
= (struct edid
*)connector
->edid_blob_ptr
->data
;
1974 if (!asyc
->scaler
.full
) {
1975 if (mode
== DRM_MODE_SCALE_NONE
)
1978 /* Non-EDID LVDS/eDP mode. */
1979 mode
= DRM_MODE_SCALE_FULLSCREEN
;
1982 asyh
->view
.iW
= umode
->hdisplay
;
1983 asyh
->view
.iH
= umode
->vdisplay
;
1984 asyh
->view
.oW
= omode
->hdisplay
;
1985 asyh
->view
.oH
= omode
->vdisplay
;
1986 if (omode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
1989 /* Add overscan compensation if necessary, will keep the aspect
1990 * ratio the same as the backend mode unless overridden by the
1991 * user setting both hborder and vborder properties.
1993 if ((asyc
->scaler
.underscan
.mode
== UNDERSCAN_ON
||
1994 (asyc
->scaler
.underscan
.mode
== UNDERSCAN_AUTO
&&
1995 drm_detect_hdmi_monitor(edid
)))) {
1996 u32 bX
= asyc
->scaler
.underscan
.hborder
;
1997 u32 bY
= asyc
->scaler
.underscan
.vborder
;
1998 u32 r
= (asyh
->view
.oH
<< 19) / asyh
->view
.oW
;
2001 asyh
->view
.oW
-= (bX
* 2);
2002 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2003 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2005 asyh
->view
.oW
-= (asyh
->view
.oW
>> 4) + 32;
2006 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2007 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2011 /* Handle CENTER/ASPECT scaling, taking into account the areas
2012 * removed already for overscan compensation.
2015 case DRM_MODE_SCALE_CENTER
:
2016 asyh
->view
.oW
= min((u16
)umode
->hdisplay
, asyh
->view
.oW
);
2017 asyh
->view
.oH
= min((u16
)umode
->vdisplay
, asyh
->view
.oH
);
2019 case DRM_MODE_SCALE_ASPECT
:
2020 if (asyh
->view
.oH
< asyh
->view
.oW
) {
2021 u32 r
= (asyh
->view
.iW
<< 19) / asyh
->view
.iH
;
2022 asyh
->view
.oW
= ((asyh
->view
.oH
* r
) + (r
/ 2)) >> 19;
2024 u32 r
= (asyh
->view
.iH
<< 19) / asyh
->view
.iW
;
2025 asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2032 asyh
->set
.view
= true;
2036 nv50_head_atomic_check_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
2038 struct drm_display_mode
*mode
= &asyh
->state
.adjusted_mode
;
2039 u32 ilace
= (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) ? 2 : 1;
2040 u32 vscan
= (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
) ? 2 : 1;
2041 u32 hbackp
= mode
->htotal
- mode
->hsync_end
;
2042 u32 vbackp
= (mode
->vtotal
- mode
->vsync_end
) * vscan
/ ilace
;
2043 u32 hfrontp
= mode
->hsync_start
- mode
->hdisplay
;
2044 u32 vfrontp
= (mode
->vsync_start
- mode
->vdisplay
) * vscan
/ ilace
;
2046 struct nv50_head_mode
*m
= &asyh
->mode
;
2048 m
->h
.active
= mode
->htotal
;
2049 m
->h
.synce
= mode
->hsync_end
- mode
->hsync_start
- 1;
2050 m
->h
.blanke
= m
->h
.synce
+ hbackp
;
2051 m
->h
.blanks
= mode
->htotal
- hfrontp
- 1;
2053 m
->v
.active
= mode
->vtotal
* vscan
/ ilace
;
2054 m
->v
.synce
= ((mode
->vsync_end
- mode
->vsync_start
) * vscan
/ ilace
) - 1;
2055 m
->v
.blanke
= m
->v
.synce
+ vbackp
;
2056 m
->v
.blanks
= m
->v
.active
- vfrontp
- 1;
2058 /*XXX: Safe underestimate, even "0" works */
2059 blankus
= (m
->v
.active
- mode
->vdisplay
- 2) * m
->h
.active
;
2061 blankus
/= mode
->clock
;
2062 m
->v
.blankus
= blankus
;
2064 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) {
2065 m
->v
.blank2e
= m
->v
.active
+ m
->v
.synce
+ vbackp
;
2066 m
->v
.blank2s
= m
->v
.blank2e
+ (mode
->vdisplay
* vscan
/ ilace
);
2067 m
->v
.active
= (m
->v
.active
* 2) + 1;
2068 m
->interlace
= true;
2072 m
->interlace
= false;
2074 m
->clock
= mode
->clock
;
2076 drm_mode_set_crtcinfo(mode
, CRTC_INTERLACE_HALVE_V
);
2077 asyh
->set
.mode
= true;
2081 nv50_head_atomic_check(struct drm_crtc
*crtc
, struct drm_crtc_state
*state
)
2083 struct nouveau_drm
*drm
= nouveau_drm(crtc
->dev
);
2084 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2085 struct nv50_head
*head
= nv50_head(crtc
);
2086 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2087 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2088 struct nouveau_conn_atom
*asyc
= NULL
;
2089 struct drm_connector_state
*conns
;
2090 struct drm_connector
*conn
;
2093 NV_ATOMIC(drm
, "%s atomic_check %d\n", crtc
->name
, asyh
->state
.active
);
2094 if (asyh
->state
.active
) {
2095 for_each_connector_in_state(asyh
->state
.state
, conn
, conns
, i
) {
2096 if (conns
->crtc
== crtc
) {
2097 asyc
= nouveau_conn_atom(conns
);
2102 if (armh
->state
.active
) {
2104 if (asyh
->state
.mode_changed
)
2105 asyc
->set
.scaler
= true;
2106 if (armh
->base
.depth
!= asyh
->base
.depth
)
2107 asyc
->set
.dither
= true;
2110 asyc
->set
.mask
= ~0;
2111 asyh
->set
.mask
= ~0;
2114 if (asyh
->state
.mode_changed
)
2115 nv50_head_atomic_check_mode(head
, asyh
);
2118 if (asyc
->set
.scaler
)
2119 nv50_head_atomic_check_view(armh
, asyh
, asyc
);
2120 if (asyc
->set
.dither
)
2121 nv50_head_atomic_check_dither(armh
, asyh
, asyc
);
2122 if (asyc
->set
.procamp
)
2123 nv50_head_atomic_check_procamp(armh
, asyh
, asyc
);
2126 if ((asyh
->core
.visible
= (asyh
->base
.cpp
!= 0))) {
2127 asyh
->core
.x
= asyh
->base
.x
;
2128 asyh
->core
.y
= asyh
->base
.y
;
2129 asyh
->core
.w
= asyh
->base
.w
;
2130 asyh
->core
.h
= asyh
->base
.h
;
2132 if ((asyh
->core
.visible
= asyh
->curs
.visible
)) {
2133 /*XXX: We need to either find some way of having the
2134 * primary base layer appear black, while still
2135 * being able to display the other layers, or we
2136 * need to allocate a dummy black surface here.
2140 asyh
->core
.w
= asyh
->state
.mode
.hdisplay
;
2141 asyh
->core
.h
= asyh
->state
.mode
.vdisplay
;
2143 asyh
->core
.handle
= disp
->mast
.base
.vram
.handle
;
2144 asyh
->core
.offset
= 0;
2145 asyh
->core
.format
= 0xcf;
2146 asyh
->core
.kind
= 0;
2147 asyh
->core
.layout
= 1;
2148 asyh
->core
.block
= 0;
2149 asyh
->core
.pitch
= ALIGN(asyh
->core
.w
, 64) * 4;
2150 asyh
->lut
.handle
= disp
->mast
.base
.vram
.handle
;
2151 asyh
->lut
.offset
= head
->base
.lut
.nvbo
->bo
.offset
;
2152 asyh
->set
.base
= armh
->base
.cpp
!= asyh
->base
.cpp
;
2153 asyh
->set
.ovly
= armh
->ovly
.cpp
!= asyh
->ovly
.cpp
;
2155 asyh
->core
.visible
= false;
2156 asyh
->curs
.visible
= false;
2161 if (!drm_atomic_crtc_needs_modeset(&asyh
->state
)) {
2162 if (asyh
->core
.visible
) {
2163 if (memcmp(&armh
->core
, &asyh
->core
, sizeof(asyh
->core
)))
2164 asyh
->set
.core
= true;
2166 if (armh
->core
.visible
) {
2167 asyh
->clr
.core
= true;
2170 if (asyh
->curs
.visible
) {
2171 if (memcmp(&armh
->curs
, &asyh
->curs
, sizeof(asyh
->curs
)))
2172 asyh
->set
.curs
= true;
2174 if (armh
->curs
.visible
) {
2175 asyh
->clr
.curs
= true;
2178 asyh
->clr
.core
= armh
->core
.visible
;
2179 asyh
->clr
.curs
= armh
->curs
.visible
;
2180 asyh
->set
.core
= asyh
->core
.visible
;
2181 asyh
->set
.curs
= asyh
->curs
.visible
;
2184 if (asyh
->clr
.mask
|| asyh
->set
.mask
)
2185 nv50_atom(asyh
->state
.state
)->lock_core
= true;
2190 nv50_head_lut_load(struct drm_crtc
*crtc
)
2192 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2193 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2194 void __iomem
*lut
= nvbo_kmap_obj_iovirtual(nv_crtc
->lut
.nvbo
);
2197 for (i
= 0; i
< 256; i
++) {
2198 u16 r
= nv_crtc
->lut
.r
[i
] >> 2;
2199 u16 g
= nv_crtc
->lut
.g
[i
] >> 2;
2200 u16 b
= nv_crtc
->lut
.b
[i
] >> 2;
2202 if (disp
->disp
->oclass
< GF110_DISP
) {
2203 writew(r
+ 0x0000, lut
+ (i
* 0x08) + 0);
2204 writew(g
+ 0x0000, lut
+ (i
* 0x08) + 2);
2205 writew(b
+ 0x0000, lut
+ (i
* 0x08) + 4);
2207 writew(r
+ 0x6000, lut
+ (i
* 0x20) + 0);
2208 writew(g
+ 0x6000, lut
+ (i
* 0x20) + 2);
2209 writew(b
+ 0x6000, lut
+ (i
* 0x20) + 4);
2214 static const struct drm_crtc_helper_funcs
2216 .load_lut
= nv50_head_lut_load
,
2217 .atomic_check
= nv50_head_atomic_check
,
2221 nv50_head_gamma_set(struct drm_crtc
*crtc
, u16
*r
, u16
*g
, u16
*b
,
2223 struct drm_modeset_acquire_ctx
*ctx
)
2225 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2228 for (i
= 0; i
< size
; i
++) {
2229 nv_crtc
->lut
.r
[i
] = r
[i
];
2230 nv_crtc
->lut
.g
[i
] = g
[i
];
2231 nv_crtc
->lut
.b
[i
] = b
[i
];
2234 nv50_head_lut_load(crtc
);
2239 nv50_head_atomic_destroy_state(struct drm_crtc
*crtc
,
2240 struct drm_crtc_state
*state
)
2242 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2243 __drm_atomic_helper_crtc_destroy_state(&asyh
->state
);
2247 static struct drm_crtc_state
*
2248 nv50_head_atomic_duplicate_state(struct drm_crtc
*crtc
)
2250 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2251 struct nv50_head_atom
*asyh
;
2252 if (!(asyh
= kmalloc(sizeof(*asyh
), GFP_KERNEL
)))
2254 __drm_atomic_helper_crtc_duplicate_state(crtc
, &asyh
->state
);
2255 asyh
->view
= armh
->view
;
2256 asyh
->mode
= armh
->mode
;
2257 asyh
->lut
= armh
->lut
;
2258 asyh
->core
= armh
->core
;
2259 asyh
->curs
= armh
->curs
;
2260 asyh
->base
= armh
->base
;
2261 asyh
->ovly
= armh
->ovly
;
2262 asyh
->dither
= armh
->dither
;
2263 asyh
->procamp
= armh
->procamp
;
2266 return &asyh
->state
;
2270 __drm_atomic_helper_crtc_reset(struct drm_crtc
*crtc
,
2271 struct drm_crtc_state
*state
)
2274 crtc
->funcs
->atomic_destroy_state(crtc
, crtc
->state
);
2275 crtc
->state
= state
;
2276 crtc
->state
->crtc
= crtc
;
2280 nv50_head_reset(struct drm_crtc
*crtc
)
2282 struct nv50_head_atom
*asyh
;
2284 if (WARN_ON(!(asyh
= kzalloc(sizeof(*asyh
), GFP_KERNEL
))))
2287 __drm_atomic_helper_crtc_reset(crtc
, &asyh
->state
);
2291 nv50_head_destroy(struct drm_crtc
*crtc
)
2293 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2294 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2295 struct nv50_head
*head
= nv50_head(crtc
);
2297 nv50_dmac_destroy(&head
->ovly
.base
, disp
->disp
);
2298 nv50_pioc_destroy(&head
->oimm
.base
);
2300 nouveau_bo_unmap(nv_crtc
->lut
.nvbo
);
2301 if (nv_crtc
->lut
.nvbo
)
2302 nouveau_bo_unpin(nv_crtc
->lut
.nvbo
);
2303 nouveau_bo_ref(NULL
, &nv_crtc
->lut
.nvbo
);
2305 drm_crtc_cleanup(crtc
);
2309 static const struct drm_crtc_funcs
2311 .reset
= nv50_head_reset
,
2312 .gamma_set
= nv50_head_gamma_set
,
2313 .destroy
= nv50_head_destroy
,
2314 .set_config
= drm_atomic_helper_set_config
,
2315 .page_flip
= drm_atomic_helper_page_flip
,
2316 .set_property
= drm_atomic_helper_crtc_set_property
,
2317 .atomic_duplicate_state
= nv50_head_atomic_duplicate_state
,
2318 .atomic_destroy_state
= nv50_head_atomic_destroy_state
,
2322 nv50_head_create(struct drm_device
*dev
, int index
)
2324 struct nouveau_drm
*drm
= nouveau_drm(dev
);
2325 struct nvif_device
*device
= &drm
->client
.device
;
2326 struct nv50_disp
*disp
= nv50_disp(dev
);
2327 struct nv50_head
*head
;
2328 struct nv50_base
*base
;
2329 struct nv50_curs
*curs
;
2330 struct drm_crtc
*crtc
;
2333 head
= kzalloc(sizeof(*head
), GFP_KERNEL
);
2337 head
->base
.index
= index
;
2338 for (i
= 0; i
< 256; i
++) {
2339 head
->base
.lut
.r
[i
] = i
<< 8;
2340 head
->base
.lut
.g
[i
] = i
<< 8;
2341 head
->base
.lut
.b
[i
] = i
<< 8;
2344 ret
= nv50_base_new(drm
, head
, &base
);
2346 ret
= nv50_curs_new(drm
, head
, &curs
);
2352 crtc
= &head
->base
.base
;
2353 drm_crtc_init_with_planes(dev
, crtc
, &base
->wndw
.plane
,
2354 &curs
->wndw
.plane
, &nv50_head_func
,
2355 "head-%d", head
->base
.index
);
2356 drm_crtc_helper_add(crtc
, &nv50_head_help
);
2357 drm_mode_crtc_set_gamma_size(crtc
, 256);
2359 ret
= nouveau_bo_new(&drm
->client
, 8192, 0x100, TTM_PL_FLAG_VRAM
,
2360 0, 0x0000, NULL
, NULL
, &head
->base
.lut
.nvbo
);
2362 ret
= nouveau_bo_pin(head
->base
.lut
.nvbo
, TTM_PL_FLAG_VRAM
, true);
2364 ret
= nouveau_bo_map(head
->base
.lut
.nvbo
);
2366 nouveau_bo_unpin(head
->base
.lut
.nvbo
);
2369 nouveau_bo_ref(NULL
, &head
->base
.lut
.nvbo
);
2375 /* allocate overlay resources */
2376 ret
= nv50_oimm_create(device
, disp
->disp
, index
, &head
->oimm
);
2380 ret
= nv50_ovly_create(device
, disp
->disp
, index
, disp
->sync
->bo
.offset
,
2387 nv50_head_destroy(crtc
);
2391 /******************************************************************************
2392 * Output path helpers
2393 *****************************************************************************/
2395 nv50_outp_atomic_check_view(struct drm_encoder
*encoder
,
2396 struct drm_crtc_state
*crtc_state
,
2397 struct drm_connector_state
*conn_state
,
2398 struct drm_display_mode
*native_mode
)
2400 struct drm_display_mode
*adjusted_mode
= &crtc_state
->adjusted_mode
;
2401 struct drm_display_mode
*mode
= &crtc_state
->mode
;
2402 struct drm_connector
*connector
= conn_state
->connector
;
2403 struct nouveau_conn_atom
*asyc
= nouveau_conn_atom(conn_state
);
2404 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
2406 NV_ATOMIC(drm
, "%s atomic_check\n", encoder
->name
);
2407 asyc
->scaler
.full
= false;
2411 if (asyc
->scaler
.mode
== DRM_MODE_SCALE_NONE
) {
2412 switch (connector
->connector_type
) {
2413 case DRM_MODE_CONNECTOR_LVDS
:
2414 case DRM_MODE_CONNECTOR_eDP
:
2415 /* Force use of scaler for non-EDID modes. */
2416 if (adjusted_mode
->type
& DRM_MODE_TYPE_DRIVER
)
2419 asyc
->scaler
.full
= true;
2428 if (!drm_mode_equal(adjusted_mode
, mode
)) {
2429 drm_mode_copy(adjusted_mode
, mode
);
2430 crtc_state
->mode_changed
= true;
2437 nv50_outp_atomic_check(struct drm_encoder
*encoder
,
2438 struct drm_crtc_state
*crtc_state
,
2439 struct drm_connector_state
*conn_state
)
2441 struct nouveau_connector
*nv_connector
=
2442 nouveau_connector(conn_state
->connector
);
2443 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2444 nv_connector
->native_mode
);
2447 /******************************************************************************
2449 *****************************************************************************/
2451 nv50_dac_dpms(struct drm_encoder
*encoder
, int mode
)
2453 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2454 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2456 struct nv50_disp_mthd_v1 base
;
2457 struct nv50_disp_dac_pwr_v0 pwr
;
2460 .base
.method
= NV50_DISP_MTHD_V1_DAC_PWR
,
2461 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2462 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2465 .pwr
.vsync
= (mode
!= DRM_MODE_DPMS_SUSPEND
&&
2466 mode
!= DRM_MODE_DPMS_OFF
),
2467 .pwr
.hsync
= (mode
!= DRM_MODE_DPMS_STANDBY
&&
2468 mode
!= DRM_MODE_DPMS_OFF
),
2471 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2475 nv50_dac_disable(struct drm_encoder
*encoder
)
2477 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2478 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2479 const int or = nv_encoder
->or;
2482 if (nv_encoder
->crtc
) {
2483 push
= evo_wait(mast
, 4);
2485 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2486 evo_mthd(push
, 0x0400 + (or * 0x080), 1);
2487 evo_data(push
, 0x00000000);
2489 evo_mthd(push
, 0x0180 + (or * 0x020), 1);
2490 evo_data(push
, 0x00000000);
2492 evo_kick(push
, mast
);
2496 nv_encoder
->crtc
= NULL
;
2500 nv50_dac_enable(struct drm_encoder
*encoder
)
2502 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2503 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2504 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2505 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
2508 push
= evo_wait(mast
, 8);
2510 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2511 u32 syncs
= 0x00000000;
2513 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2514 syncs
|= 0x00000001;
2515 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2516 syncs
|= 0x00000002;
2518 evo_mthd(push
, 0x0400 + (nv_encoder
->or * 0x080), 2);
2519 evo_data(push
, 1 << nv_crtc
->index
);
2520 evo_data(push
, syncs
);
2522 u32 magic
= 0x31ec6000 | (nv_crtc
->index
<< 25);
2523 u32 syncs
= 0x00000001;
2525 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2526 syncs
|= 0x00000008;
2527 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2528 syncs
|= 0x00000010;
2530 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
2531 magic
|= 0x00000001;
2533 evo_mthd(push
, 0x0404 + (nv_crtc
->index
* 0x300), 2);
2534 evo_data(push
, syncs
);
2535 evo_data(push
, magic
);
2536 evo_mthd(push
, 0x0180 + (nv_encoder
->or * 0x020), 1);
2537 evo_data(push
, 1 << nv_crtc
->index
);
2540 evo_kick(push
, mast
);
2543 nv_encoder
->crtc
= encoder
->crtc
;
2546 static enum drm_connector_status
2547 nv50_dac_detect(struct drm_encoder
*encoder
, struct drm_connector
*connector
)
2549 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2550 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2552 struct nv50_disp_mthd_v1 base
;
2553 struct nv50_disp_dac_load_v0 load
;
2556 .base
.method
= NV50_DISP_MTHD_V1_DAC_LOAD
,
2557 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2558 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2562 args
.load
.data
= nouveau_drm(encoder
->dev
)->vbios
.dactestval
;
2563 if (args
.load
.data
== 0)
2564 args
.load
.data
= 340;
2566 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2567 if (ret
|| !args
.load
.load
)
2568 return connector_status_disconnected
;
2570 return connector_status_connected
;
2573 static const struct drm_encoder_helper_funcs
2575 .dpms
= nv50_dac_dpms
,
2576 .atomic_check
= nv50_outp_atomic_check
,
2577 .enable
= nv50_dac_enable
,
2578 .disable
= nv50_dac_disable
,
2579 .detect
= nv50_dac_detect
2583 nv50_dac_destroy(struct drm_encoder
*encoder
)
2585 drm_encoder_cleanup(encoder
);
2589 static const struct drm_encoder_funcs
2591 .destroy
= nv50_dac_destroy
,
2595 nv50_dac_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
2597 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
2598 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
2599 struct nvkm_i2c_bus
*bus
;
2600 struct nouveau_encoder
*nv_encoder
;
2601 struct drm_encoder
*encoder
;
2602 int type
= DRM_MODE_ENCODER_DAC
;
2604 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
2607 nv_encoder
->dcb
= dcbe
;
2608 nv_encoder
->or = ffs(dcbe
->or) - 1;
2610 bus
= nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
2612 nv_encoder
->i2c
= &bus
->i2c
;
2614 encoder
= to_drm_encoder(nv_encoder
);
2615 encoder
->possible_crtcs
= dcbe
->heads
;
2616 encoder
->possible_clones
= 0;
2617 drm_encoder_init(connector
->dev
, encoder
, &nv50_dac_func
, type
,
2618 "dac-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
2619 drm_encoder_helper_add(encoder
, &nv50_dac_help
);
2621 drm_mode_connector_attach_encoder(connector
, encoder
);
2625 /******************************************************************************
2627 *****************************************************************************/
2629 nv50_audio_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2631 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2632 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2634 struct nv50_disp_mthd_v1 base
;
2635 struct nv50_disp_sor_hda_eld_v0 eld
;
2638 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2639 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2640 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2641 (0x0100 << nv_crtc
->index
),
2644 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2648 nv50_audio_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2650 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2651 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2652 struct nouveau_connector
*nv_connector
;
2653 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2656 struct nv50_disp_mthd_v1 mthd
;
2657 struct nv50_disp_sor_hda_eld_v0 eld
;
2659 u8 data
[sizeof(nv_connector
->base
.eld
)];
2661 .base
.mthd
.version
= 1,
2662 .base
.mthd
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2663 .base
.mthd
.hasht
= nv_encoder
->dcb
->hasht
,
2664 .base
.mthd
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2665 (0x0100 << nv_crtc
->index
),
2668 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2669 if (!drm_detect_monitor_audio(nv_connector
->edid
))
2672 drm_edid_to_eld(&nv_connector
->base
, nv_connector
->edid
);
2673 memcpy(args
.data
, nv_connector
->base
.eld
, sizeof(args
.data
));
2675 nvif_mthd(disp
->disp
, 0, &args
,
2676 sizeof(args
.base
) + drm_eld_size(args
.data
));
2679 /******************************************************************************
2681 *****************************************************************************/
2683 nv50_hdmi_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2685 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2686 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2688 struct nv50_disp_mthd_v1 base
;
2689 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2692 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2693 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2694 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2695 (0x0100 << nv_crtc
->index
),
2698 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2702 nv50_hdmi_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2704 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2705 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2706 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2708 struct nv50_disp_mthd_v1 base
;
2709 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2712 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2713 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2714 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2715 (0x0100 << nv_crtc
->index
),
2717 .pwr
.rekey
= 56, /* binary driver, and tegra, constant */
2719 struct nouveau_connector
*nv_connector
;
2722 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2723 if (!drm_detect_hdmi_monitor(nv_connector
->edid
))
2726 max_ac_packet
= mode
->htotal
- mode
->hdisplay
;
2727 max_ac_packet
-= args
.pwr
.rekey
;
2728 max_ac_packet
-= 18; /* constant from tegra */
2729 args
.pwr
.max_ac_packet
= max_ac_packet
/ 32;
2731 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2732 nv50_audio_enable(encoder
, mode
);
2735 /******************************************************************************
2737 *****************************************************************************/
2738 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2739 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2740 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2743 struct nouveau_encoder
*outp
;
2745 struct drm_dp_mst_topology_mgr mgr
;
2746 struct nv50_msto
*msto
[4];
2752 struct nv50_mstm
*mstm
;
2753 struct drm_dp_mst_port
*port
;
2754 struct drm_connector connector
;
2756 struct drm_display_mode
*native
;
2763 struct drm_encoder encoder
;
2765 struct nv50_head
*head
;
2766 struct nv50_mstc
*mstc
;
2770 static struct drm_dp_payload
*
2771 nv50_msto_payload(struct nv50_msto
*msto
)
2773 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2774 struct nv50_mstc
*mstc
= msto
->mstc
;
2775 struct nv50_mstm
*mstm
= mstc
->mstm
;
2776 int vcpi
= mstc
->port
->vcpi
.vcpi
, i
;
2778 NV_ATOMIC(drm
, "%s: vcpi %d\n", msto
->encoder
.name
, vcpi
);
2779 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2780 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2781 NV_ATOMIC(drm
, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2782 mstm
->outp
->base
.base
.name
, i
, payload
->vcpi
,
2783 payload
->start_slot
, payload
->num_slots
);
2786 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2787 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2788 if (payload
->vcpi
== vcpi
)
2796 nv50_msto_cleanup(struct nv50_msto
*msto
)
2798 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2799 struct nv50_mstc
*mstc
= msto
->mstc
;
2800 struct nv50_mstm
*mstm
= mstc
->mstm
;
2802 NV_ATOMIC(drm
, "%s: msto cleanup\n", msto
->encoder
.name
);
2803 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0 && !nv50_msto_payload(msto
))
2804 drm_dp_mst_deallocate_vcpi(&mstm
->mgr
, mstc
->port
);
2805 if (msto
->disabled
) {
2808 msto
->disabled
= false;
2813 nv50_msto_prepare(struct nv50_msto
*msto
)
2815 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2816 struct nv50_mstc
*mstc
= msto
->mstc
;
2817 struct nv50_mstm
*mstm
= mstc
->mstm
;
2819 struct nv50_disp_mthd_v1 base
;
2820 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi
;
2823 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI
,
2824 .base
.hasht
= mstm
->outp
->dcb
->hasht
,
2825 .base
.hashm
= (0xf0ff & mstm
->outp
->dcb
->hashm
) |
2826 (0x0100 << msto
->head
->base
.index
),
2829 NV_ATOMIC(drm
, "%s: msto prepare\n", msto
->encoder
.name
);
2830 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0) {
2831 struct drm_dp_payload
*payload
= nv50_msto_payload(msto
);
2833 args
.vcpi
.start_slot
= payload
->start_slot
;
2834 args
.vcpi
.num_slots
= payload
->num_slots
;
2835 args
.vcpi
.pbn
= mstc
->port
->vcpi
.pbn
;
2836 args
.vcpi
.aligned_pbn
= mstc
->port
->vcpi
.aligned_pbn
;
2840 NV_ATOMIC(drm
, "%s: %s: %02x %02x %04x %04x\n",
2841 msto
->encoder
.name
, msto
->head
->base
.base
.name
,
2842 args
.vcpi
.start_slot
, args
.vcpi
.num_slots
,
2843 args
.vcpi
.pbn
, args
.vcpi
.aligned_pbn
);
2844 nvif_mthd(&drm
->display
->disp
, 0, &args
, sizeof(args
));
2848 nv50_msto_atomic_check(struct drm_encoder
*encoder
,
2849 struct drm_crtc_state
*crtc_state
,
2850 struct drm_connector_state
*conn_state
)
2852 struct nv50_mstc
*mstc
= nv50_mstc(conn_state
->connector
);
2853 struct nv50_mstm
*mstm
= mstc
->mstm
;
2854 int bpp
= conn_state
->connector
->display_info
.bpc
* 3;
2857 mstc
->pbn
= drm_dp_calc_pbn_mode(crtc_state
->adjusted_mode
.clock
, bpp
);
2859 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2863 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2868 nv50_msto_enable(struct drm_encoder
*encoder
)
2870 struct nv50_head
*head
= nv50_head(encoder
->crtc
);
2871 struct nv50_msto
*msto
= nv50_msto(encoder
);
2872 struct nv50_mstc
*mstc
= NULL
;
2873 struct nv50_mstm
*mstm
= NULL
;
2874 struct drm_connector
*connector
;
2879 drm_for_each_connector(connector
, encoder
->dev
) {
2880 if (connector
->state
->best_encoder
== &msto
->encoder
) {
2881 mstc
= nv50_mstc(connector
);
2890 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2891 r
= drm_dp_mst_allocate_vcpi(&mstm
->mgr
, mstc
->port
, mstc
->pbn
, slots
);
2894 if (mstm
->outp
->dcb
->sorconf
.link
& 1)
2899 switch (mstc
->connector
.display_info
.bpc
) {
2900 case 6: depth
= 0x2; break;
2901 case 8: depth
= 0x5; break;
2903 default: depth
= 0x6; break;
2906 mstm
->outp
->update(mstm
->outp
, head
->base
.index
,
2907 &head
->base
.base
.state
->adjusted_mode
, proto
, depth
);
2911 mstm
->modified
= true;
2915 nv50_msto_disable(struct drm_encoder
*encoder
)
2917 struct nv50_msto
*msto
= nv50_msto(encoder
);
2918 struct nv50_mstc
*mstc
= msto
->mstc
;
2919 struct nv50_mstm
*mstm
= mstc
->mstm
;
2922 drm_dp_mst_reset_vcpi_slots(&mstm
->mgr
, mstc
->port
);
2924 mstm
->outp
->update(mstm
->outp
, msto
->head
->base
.index
, NULL
, 0, 0);
2925 mstm
->modified
= true;
2926 msto
->disabled
= true;
2929 static const struct drm_encoder_helper_funcs
2931 .disable
= nv50_msto_disable
,
2932 .enable
= nv50_msto_enable
,
2933 .atomic_check
= nv50_msto_atomic_check
,
2937 nv50_msto_destroy(struct drm_encoder
*encoder
)
2939 struct nv50_msto
*msto
= nv50_msto(encoder
);
2940 drm_encoder_cleanup(&msto
->encoder
);
2944 static const struct drm_encoder_funcs
2946 .destroy
= nv50_msto_destroy
,
2950 nv50_msto_new(struct drm_device
*dev
, u32 heads
, const char *name
, int id
,
2951 struct nv50_msto
**pmsto
)
2953 struct nv50_msto
*msto
;
2956 if (!(msto
= *pmsto
= kzalloc(sizeof(*msto
), GFP_KERNEL
)))
2959 ret
= drm_encoder_init(dev
, &msto
->encoder
, &nv50_msto
,
2960 DRM_MODE_ENCODER_DPMST
, "%s-mst-%d", name
, id
);
2967 drm_encoder_helper_add(&msto
->encoder
, &nv50_msto_help
);
2968 msto
->encoder
.possible_crtcs
= heads
;
2972 static struct drm_encoder
*
2973 nv50_mstc_atomic_best_encoder(struct drm_connector
*connector
,
2974 struct drm_connector_state
*connector_state
)
2976 struct nv50_head
*head
= nv50_head(connector_state
->crtc
);
2977 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
2979 struct nv50_mstm
*mstm
= mstc
->mstm
;
2980 return &mstm
->msto
[head
->base
.index
]->encoder
;
2985 static struct drm_encoder
*
2986 nv50_mstc_best_encoder(struct drm_connector
*connector
)
2988 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
2990 struct nv50_mstm
*mstm
= mstc
->mstm
;
2991 return &mstm
->msto
[0]->encoder
;
2996 static enum drm_mode_status
2997 nv50_mstc_mode_valid(struct drm_connector
*connector
,
2998 struct drm_display_mode
*mode
)
3004 nv50_mstc_get_modes(struct drm_connector
*connector
)
3006 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3009 mstc
->edid
= drm_dp_mst_get_edid(&mstc
->connector
, mstc
->port
->mgr
, mstc
->port
);
3010 drm_mode_connector_update_edid_property(&mstc
->connector
, mstc
->edid
);
3012 ret
= drm_add_edid_modes(&mstc
->connector
, mstc
->edid
);
3013 drm_edid_to_eld(&mstc
->connector
, mstc
->edid
);
3016 if (!mstc
->connector
.display_info
.bpc
)
3017 mstc
->connector
.display_info
.bpc
= 8;
3020 drm_mode_destroy(mstc
->connector
.dev
, mstc
->native
);
3021 mstc
->native
= nouveau_conn_native_mode(&mstc
->connector
);
3025 static const struct drm_connector_helper_funcs
3027 .get_modes
= nv50_mstc_get_modes
,
3028 .mode_valid
= nv50_mstc_mode_valid
,
3029 .best_encoder
= nv50_mstc_best_encoder
,
3030 .atomic_best_encoder
= nv50_mstc_atomic_best_encoder
,
3033 static enum drm_connector_status
3034 nv50_mstc_detect(struct drm_connector
*connector
, bool force
)
3036 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3038 return connector_status_disconnected
;
3039 return drm_dp_mst_detect_port(connector
, mstc
->port
->mgr
, mstc
->port
);
3043 nv50_mstc_destroy(struct drm_connector
*connector
)
3045 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3046 drm_connector_cleanup(&mstc
->connector
);
3050 static const struct drm_connector_funcs
3052 .dpms
= drm_atomic_helper_connector_dpms
,
3053 .reset
= nouveau_conn_reset
,
3054 .detect
= nv50_mstc_detect
,
3055 .fill_modes
= drm_helper_probe_single_connector_modes
,
3056 .set_property
= drm_atomic_helper_connector_set_property
,
3057 .destroy
= nv50_mstc_destroy
,
3058 .atomic_duplicate_state
= nouveau_conn_atomic_duplicate_state
,
3059 .atomic_destroy_state
= nouveau_conn_atomic_destroy_state
,
3060 .atomic_set_property
= nouveau_conn_atomic_set_property
,
3061 .atomic_get_property
= nouveau_conn_atomic_get_property
,
3065 nv50_mstc_new(struct nv50_mstm
*mstm
, struct drm_dp_mst_port
*port
,
3066 const char *path
, struct nv50_mstc
**pmstc
)
3068 struct drm_device
*dev
= mstm
->outp
->base
.base
.dev
;
3069 struct nv50_mstc
*mstc
;
3072 if (!(mstc
= *pmstc
= kzalloc(sizeof(*mstc
), GFP_KERNEL
)))
3077 ret
= drm_connector_init(dev
, &mstc
->connector
, &nv50_mstc
,
3078 DRM_MODE_CONNECTOR_DisplayPort
);
3085 drm_connector_helper_add(&mstc
->connector
, &nv50_mstc_help
);
3087 mstc
->connector
.funcs
->reset(&mstc
->connector
);
3088 nouveau_conn_attach_properties(&mstc
->connector
);
3090 for (i
= 0; i
< ARRAY_SIZE(mstm
->msto
) && mstm
->msto
; i
++)
3091 drm_mode_connector_attach_encoder(&mstc
->connector
, &mstm
->msto
[i
]->encoder
);
3093 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.path_property
, 0);
3094 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.tile_property
, 0);
3095 drm_mode_connector_set_path_property(&mstc
->connector
, path
);
3100 nv50_mstm_cleanup(struct nv50_mstm
*mstm
)
3102 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3103 struct drm_encoder
*encoder
;
3106 NV_ATOMIC(drm
, "%s: mstm cleanup\n", mstm
->outp
->base
.base
.name
);
3107 ret
= drm_dp_check_act_status(&mstm
->mgr
);
3109 ret
= drm_dp_update_payload_part2(&mstm
->mgr
);
3111 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3112 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3113 struct nv50_msto
*msto
= nv50_msto(encoder
);
3114 struct nv50_mstc
*mstc
= msto
->mstc
;
3115 if (mstc
&& mstc
->mstm
== mstm
)
3116 nv50_msto_cleanup(msto
);
3120 mstm
->modified
= false;
3124 nv50_mstm_prepare(struct nv50_mstm
*mstm
)
3126 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3127 struct drm_encoder
*encoder
;
3130 NV_ATOMIC(drm
, "%s: mstm prepare\n", mstm
->outp
->base
.base
.name
);
3131 ret
= drm_dp_update_payload_part1(&mstm
->mgr
);
3133 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3134 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3135 struct nv50_msto
*msto
= nv50_msto(encoder
);
3136 struct nv50_mstc
*mstc
= msto
->mstc
;
3137 if (mstc
&& mstc
->mstm
== mstm
)
3138 nv50_msto_prepare(msto
);
3144 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr
*mgr
)
3146 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3147 drm_kms_helper_hotplug_event(mstm
->outp
->base
.base
.dev
);
3151 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3152 struct drm_connector
*connector
)
3154 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3155 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3157 drm_connector_unregister(&mstc
->connector
);
3159 drm_modeset_lock_all(drm
->dev
);
3160 drm_fb_helper_remove_one_connector(&drm
->fbcon
->helper
, &mstc
->connector
);
3162 drm_modeset_unlock_all(drm
->dev
);
3164 drm_connector_unreference(&mstc
->connector
);
3168 nv50_mstm_register_connector(struct drm_connector
*connector
)
3170 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3172 drm_modeset_lock_all(drm
->dev
);
3173 drm_fb_helper_add_one_connector(&drm
->fbcon
->helper
, connector
);
3174 drm_modeset_unlock_all(drm
->dev
);
3176 drm_connector_register(connector
);
3179 static struct drm_connector
*
3180 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3181 struct drm_dp_mst_port
*port
, const char *path
)
3183 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3184 struct nv50_mstc
*mstc
;
3187 ret
= nv50_mstc_new(mstm
, port
, path
, &mstc
);
3190 mstc
->connector
.funcs
->destroy(&mstc
->connector
);
3194 return &mstc
->connector
;
3197 static const struct drm_dp_mst_topology_cbs
3199 .add_connector
= nv50_mstm_add_connector
,
3200 .register_connector
= nv50_mstm_register_connector
,
3201 .destroy_connector
= nv50_mstm_destroy_connector
,
3202 .hotplug
= nv50_mstm_hotplug
,
3206 nv50_mstm_service(struct nv50_mstm
*mstm
)
3208 struct drm_dp_aux
*aux
= mstm
->mgr
.aux
;
3209 bool handled
= true;
3214 ret
= drm_dp_dpcd_read(aux
, DP_SINK_COUNT_ESI
, esi
, 8);
3216 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3220 drm_dp_mst_hpd_irq(&mstm
->mgr
, esi
, &handled
);
3224 drm_dp_dpcd_write(aux
, DP_SINK_COUNT_ESI
+ 1, &esi
[1], 3);
3229 nv50_mstm_remove(struct nv50_mstm
*mstm
)
3232 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3236 nv50_mstm_enable(struct nv50_mstm
*mstm
, u8 dpcd
, int state
)
3238 struct nouveau_encoder
*outp
= mstm
->outp
;
3240 struct nv50_disp_mthd_v1 base
;
3241 struct nv50_disp_sor_dp_mst_link_v0 mst
;
3244 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_LINK
,
3245 .base
.hasht
= outp
->dcb
->hasht
,
3246 .base
.hashm
= outp
->dcb
->hashm
,
3249 struct nouveau_drm
*drm
= nouveau_drm(outp
->base
.base
.dev
);
3250 struct nvif_object
*disp
= &drm
->display
->disp
;
3254 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, &dpcd
);
3262 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, dpcd
);
3267 return nvif_mthd(disp
, 0, &args
, sizeof(args
));
3271 nv50_mstm_detect(struct nv50_mstm
*mstm
, u8 dpcd
[8], int allow
)
3278 if (dpcd
[0] >= 0x12) {
3279 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CAP
, &dpcd
[1]);
3283 if (!(dpcd
[1] & DP_MST_CAP
))
3289 ret
= nv50_mstm_enable(mstm
, dpcd
[0], state
);
3293 ret
= drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, state
);
3295 return nv50_mstm_enable(mstm
, dpcd
[0], 0);
3297 return mstm
->mgr
.mst_state
;
3301 nv50_mstm_fini(struct nv50_mstm
*mstm
)
3303 if (mstm
&& mstm
->mgr
.mst_state
)
3304 drm_dp_mst_topology_mgr_suspend(&mstm
->mgr
);
3308 nv50_mstm_init(struct nv50_mstm
*mstm
)
3310 if (mstm
&& mstm
->mgr
.mst_state
)
3311 drm_dp_mst_topology_mgr_resume(&mstm
->mgr
);
3315 nv50_mstm_del(struct nv50_mstm
**pmstm
)
3317 struct nv50_mstm
*mstm
= *pmstm
;
3325 nv50_mstm_new(struct nouveau_encoder
*outp
, struct drm_dp_aux
*aux
, int aux_max
,
3326 int conn_base_id
, struct nv50_mstm
**pmstm
)
3328 const int max_payloads
= hweight8(outp
->dcb
->heads
);
3329 struct drm_device
*dev
= outp
->base
.base
.dev
;
3330 struct nv50_mstm
*mstm
;
3334 /* This is a workaround for some monitors not functioning
3335 * correctly in MST mode on initial module load. I think
3336 * some bad interaction with the VBIOS may be responsible.
3338 * A good ol' off and on again seems to work here ;)
3340 ret
= drm_dp_dpcd_readb(aux
, DP_DPCD_REV
, &dpcd
);
3341 if (ret
>= 0 && dpcd
>= 0x12)
3342 drm_dp_dpcd_writeb(aux
, DP_MSTM_CTRL
, 0);
3344 if (!(mstm
= *pmstm
= kzalloc(sizeof(*mstm
), GFP_KERNEL
)))
3347 mstm
->mgr
.cbs
= &nv50_mstm
;
3349 ret
= drm_dp_mst_topology_mgr_init(&mstm
->mgr
, dev
, aux
, aux_max
,
3350 max_payloads
, conn_base_id
);
3354 for (i
= 0; i
< max_payloads
; i
++) {
3355 ret
= nv50_msto_new(dev
, outp
->dcb
->heads
, outp
->base
.base
.name
,
3364 /******************************************************************************
3366 *****************************************************************************/
3368 nv50_sor_dpms(struct drm_encoder
*encoder
, int mode
)
3370 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3371 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3373 struct nv50_disp_mthd_v1 base
;
3374 struct nv50_disp_sor_pwr_v0 pwr
;
3377 .base
.method
= NV50_DISP_MTHD_V1_SOR_PWR
,
3378 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3379 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3380 .pwr
.state
= mode
== DRM_MODE_DPMS_ON
,
3383 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
3387 nv50_sor_update(struct nouveau_encoder
*nv_encoder
, u8 head
,
3388 struct drm_display_mode
*mode
, u8 proto
, u8 depth
)
3390 struct nv50_dmac
*core
= &nv50_mast(nv_encoder
->base
.base
.dev
)->base
;
3394 nv_encoder
->ctrl
&= ~BIT(head
);
3395 if (!(nv_encoder
->ctrl
& 0x0000000f))
3396 nv_encoder
->ctrl
= 0;
3398 nv_encoder
->ctrl
|= proto
<< 8;
3399 nv_encoder
->ctrl
|= BIT(head
);
3402 if ((push
= evo_wait(core
, 6))) {
3403 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
3405 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3406 nv_encoder
->ctrl
|= 0x00001000;
3407 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3408 nv_encoder
->ctrl
|= 0x00002000;
3409 nv_encoder
->ctrl
|= depth
<< 16;
3411 evo_mthd(push
, 0x0600 + (nv_encoder
->or * 0x40), 1);
3414 u32 magic
= 0x31ec6000 | (head
<< 25);
3415 u32 syncs
= 0x00000001;
3416 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3417 syncs
|= 0x00000008;
3418 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3419 syncs
|= 0x00000010;
3420 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
3421 magic
|= 0x00000001;
3423 evo_mthd(push
, 0x0404 + (head
* 0x300), 2);
3424 evo_data(push
, syncs
| (depth
<< 6));
3425 evo_data(push
, magic
);
3427 evo_mthd(push
, 0x0200 + (nv_encoder
->or * 0x20), 1);
3429 evo_data(push
, nv_encoder
->ctrl
);
3430 evo_kick(push
, core
);
3435 nv50_sor_disable(struct drm_encoder
*encoder
)
3437 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3438 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(nv_encoder
->crtc
);
3440 nv_encoder
->crtc
= NULL
;
3443 struct nvkm_i2c_aux
*aux
= nv_encoder
->aux
;
3447 int ret
= nvkm_rdaux(aux
, DP_SET_POWER
, &pwr
, 1);
3449 pwr
&= ~DP_SET_POWER_MASK
;
3450 pwr
|= DP_SET_POWER_D3
;
3451 nvkm_wraux(aux
, DP_SET_POWER
, &pwr
, 1);
3455 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, NULL
, 0, 0);
3456 nv50_audio_disable(encoder
, nv_crtc
);
3457 nv50_hdmi_disable(&nv_encoder
->base
.base
, nv_crtc
);
3462 nv50_sor_enable(struct drm_encoder
*encoder
)
3464 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3465 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3466 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3468 struct nv50_disp_mthd_v1 base
;
3469 struct nv50_disp_sor_lvds_script_v0 lvds
;
3472 .base
.method
= NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT
,
3473 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3474 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3476 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3477 struct drm_device
*dev
= encoder
->dev
;
3478 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3479 struct nouveau_connector
*nv_connector
;
3480 struct nvbios
*bios
= &drm
->vbios
;
3484 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3485 nv_encoder
->crtc
= encoder
->crtc
;
3487 switch (nv_encoder
->dcb
->type
) {
3488 case DCB_OUTPUT_TMDS
:
3489 if (nv_encoder
->dcb
->sorconf
.link
& 1) {
3491 /* Only enable dual-link if:
3492 * - Need to (i.e. rate > 165MHz)
3494 * - Not an HDMI monitor, since there's no dual-link
3497 if (mode
->clock
>= 165000 &&
3498 nv_encoder
->dcb
->duallink_possible
&&
3499 !drm_detect_hdmi_monitor(nv_connector
->edid
))
3505 nv50_hdmi_enable(&nv_encoder
->base
.base
, mode
);
3507 case DCB_OUTPUT_LVDS
:
3510 if (bios
->fp_no_ddc
) {
3511 if (bios
->fp
.dual_link
)
3512 lvds
.lvds
.script
|= 0x0100;
3513 if (bios
->fp
.if_is_24bit
)
3514 lvds
.lvds
.script
|= 0x0200;
3516 if (nv_connector
->type
== DCB_CONNECTOR_LVDS_SPWG
) {
3517 if (((u8
*)nv_connector
->edid
)[121] == 2)
3518 lvds
.lvds
.script
|= 0x0100;
3520 if (mode
->clock
>= bios
->fp
.duallink_transition_clk
) {
3521 lvds
.lvds
.script
|= 0x0100;
3524 if (lvds
.lvds
.script
& 0x0100) {
3525 if (bios
->fp
.strapless_is_24bit
& 2)
3526 lvds
.lvds
.script
|= 0x0200;
3528 if (bios
->fp
.strapless_is_24bit
& 1)
3529 lvds
.lvds
.script
|= 0x0200;
3532 if (nv_connector
->base
.display_info
.bpc
== 8)
3533 lvds
.lvds
.script
|= 0x0200;
3536 nvif_mthd(disp
->disp
, 0, &lvds
, sizeof(lvds
));
3539 if (nv_connector
->base
.display_info
.bpc
== 6)
3542 if (nv_connector
->base
.display_info
.bpc
== 8)
3547 if (nv_encoder
->dcb
->sorconf
.link
& 1)
3552 nv50_audio_enable(encoder
, mode
);
3559 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, mode
, proto
, depth
);
3562 static const struct drm_encoder_helper_funcs
3564 .dpms
= nv50_sor_dpms
,
3565 .atomic_check
= nv50_outp_atomic_check
,
3566 .enable
= nv50_sor_enable
,
3567 .disable
= nv50_sor_disable
,
3571 nv50_sor_destroy(struct drm_encoder
*encoder
)
3573 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3574 nv50_mstm_del(&nv_encoder
->dp
.mstm
);
3575 drm_encoder_cleanup(encoder
);
3579 static const struct drm_encoder_funcs
3581 .destroy
= nv50_sor_destroy
,
3585 nv50_sor_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3587 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3588 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3589 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3590 struct nouveau_encoder
*nv_encoder
;
3591 struct drm_encoder
*encoder
;
3594 switch (dcbe
->type
) {
3595 case DCB_OUTPUT_LVDS
: type
= DRM_MODE_ENCODER_LVDS
; break;
3596 case DCB_OUTPUT_TMDS
:
3599 type
= DRM_MODE_ENCODER_TMDS
;
3603 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3606 nv_encoder
->dcb
= dcbe
;
3607 nv_encoder
->or = ffs(dcbe
->or) - 1;
3608 nv_encoder
->update
= nv50_sor_update
;
3610 encoder
= to_drm_encoder(nv_encoder
);
3611 encoder
->possible_crtcs
= dcbe
->heads
;
3612 encoder
->possible_clones
= 0;
3613 drm_encoder_init(connector
->dev
, encoder
, &nv50_sor_func
, type
,
3614 "sor-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3615 drm_encoder_helper_add(encoder
, &nv50_sor_help
);
3617 drm_mode_connector_attach_encoder(connector
, encoder
);
3619 if (dcbe
->type
== DCB_OUTPUT_DP
) {
3620 struct nvkm_i2c_aux
*aux
=
3621 nvkm_i2c_aux_find(i2c
, dcbe
->i2c_index
);
3623 nv_encoder
->i2c
= &nv_connector
->aux
.ddc
;
3624 nv_encoder
->aux
= aux
;
3627 /*TODO: Use DP Info Table to check for support. */
3628 if (nv50_disp(encoder
->dev
)->disp
->oclass
>= GF110_DISP
) {
3629 ret
= nv50_mstm_new(nv_encoder
, &nv_connector
->aux
, 16,
3630 nv_connector
->base
.base
.id
,
3631 &nv_encoder
->dp
.mstm
);
3636 struct nvkm_i2c_bus
*bus
=
3637 nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
3639 nv_encoder
->i2c
= &bus
->i2c
;
3645 /******************************************************************************
3647 *****************************************************************************/
3649 nv50_pior_dpms(struct drm_encoder
*encoder
, int mode
)
3651 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3652 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3654 struct nv50_disp_mthd_v1 base
;
3655 struct nv50_disp_pior_pwr_v0 pwr
;
3658 .base
.method
= NV50_DISP_MTHD_V1_PIOR_PWR
,
3659 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3660 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3661 .pwr
.state
= mode
== DRM_MODE_DPMS_ON
,
3662 .pwr
.type
= nv_encoder
->dcb
->type
,
3665 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
3669 nv50_pior_atomic_check(struct drm_encoder
*encoder
,
3670 struct drm_crtc_state
*crtc_state
,
3671 struct drm_connector_state
*conn_state
)
3673 int ret
= nv50_outp_atomic_check(encoder
, crtc_state
, conn_state
);
3676 crtc_state
->adjusted_mode
.clock
*= 2;
3681 nv50_pior_disable(struct drm_encoder
*encoder
)
3683 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3684 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3685 const int or = nv_encoder
->or;
3688 if (nv_encoder
->crtc
) {
3689 push
= evo_wait(mast
, 4);
3691 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3692 evo_mthd(push
, 0x0700 + (or * 0x040), 1);
3693 evo_data(push
, 0x00000000);
3695 evo_kick(push
, mast
);
3699 nv_encoder
->crtc
= NULL
;
3703 nv50_pior_enable(struct drm_encoder
*encoder
)
3705 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3706 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3707 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3708 struct nouveau_connector
*nv_connector
;
3709 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3710 u8 owner
= 1 << nv_crtc
->index
;
3714 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3715 switch (nv_connector
->base
.display_info
.bpc
) {
3716 case 10: depth
= 0x6; break;
3717 case 8: depth
= 0x5; break;
3718 case 6: depth
= 0x2; break;
3719 default: depth
= 0x0; break;
3722 switch (nv_encoder
->dcb
->type
) {
3723 case DCB_OUTPUT_TMDS
:
3732 push
= evo_wait(mast
, 8);
3734 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3735 u32 ctrl
= (depth
<< 16) | (proto
<< 8) | owner
;
3736 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3738 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3740 evo_mthd(push
, 0x0700 + (nv_encoder
->or * 0x040), 1);
3741 evo_data(push
, ctrl
);
3744 evo_kick(push
, mast
);
3747 nv_encoder
->crtc
= encoder
->crtc
;
3750 static const struct drm_encoder_helper_funcs
3752 .dpms
= nv50_pior_dpms
,
3753 .atomic_check
= nv50_pior_atomic_check
,
3754 .enable
= nv50_pior_enable
,
3755 .disable
= nv50_pior_disable
,
3759 nv50_pior_destroy(struct drm_encoder
*encoder
)
3761 drm_encoder_cleanup(encoder
);
3765 static const struct drm_encoder_funcs
3767 .destroy
= nv50_pior_destroy
,
3771 nv50_pior_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3773 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3774 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3775 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3776 struct nvkm_i2c_bus
*bus
= NULL
;
3777 struct nvkm_i2c_aux
*aux
= NULL
;
3778 struct i2c_adapter
*ddc
;
3779 struct nouveau_encoder
*nv_encoder
;
3780 struct drm_encoder
*encoder
;
3783 switch (dcbe
->type
) {
3784 case DCB_OUTPUT_TMDS
:
3785 bus
= nvkm_i2c_bus_find(i2c
, NVKM_I2C_BUS_EXT(dcbe
->extdev
));
3786 ddc
= bus
? &bus
->i2c
: NULL
;
3787 type
= DRM_MODE_ENCODER_TMDS
;
3790 aux
= nvkm_i2c_aux_find(i2c
, NVKM_I2C_AUX_EXT(dcbe
->extdev
));
3791 ddc
= aux
? &nv_connector
->aux
.ddc
: NULL
;
3792 type
= DRM_MODE_ENCODER_TMDS
;
3798 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3801 nv_encoder
->dcb
= dcbe
;
3802 nv_encoder
->or = ffs(dcbe
->or) - 1;
3803 nv_encoder
->i2c
= ddc
;
3804 nv_encoder
->aux
= aux
;
3806 encoder
= to_drm_encoder(nv_encoder
);
3807 encoder
->possible_crtcs
= dcbe
->heads
;
3808 encoder
->possible_clones
= 0;
3809 drm_encoder_init(connector
->dev
, encoder
, &nv50_pior_func
, type
,
3810 "pior-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3811 drm_encoder_helper_add(encoder
, &nv50_pior_help
);
3813 drm_mode_connector_attach_encoder(connector
, encoder
);
3817 /******************************************************************************
3819 *****************************************************************************/
3822 nv50_disp_atomic_commit_core(struct nouveau_drm
*drm
, u32 interlock
)
3824 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
3825 struct nv50_dmac
*core
= &disp
->mast
.base
;
3826 struct nv50_mstm
*mstm
;
3827 struct drm_encoder
*encoder
;
3830 NV_ATOMIC(drm
, "commit core %08x\n", interlock
);
3832 drm_for_each_encoder(encoder
, drm
->dev
) {
3833 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3834 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3835 if (mstm
&& mstm
->modified
)
3836 nv50_mstm_prepare(mstm
);
3840 if ((push
= evo_wait(core
, 5))) {
3841 evo_mthd(push
, 0x0084, 1);
3842 evo_data(push
, 0x80000000);
3843 evo_mthd(push
, 0x0080, 2);
3844 evo_data(push
, interlock
);
3845 evo_data(push
, 0x00000000);
3846 nouveau_bo_wr32(disp
->sync
, 0, 0x00000000);
3847 evo_kick(push
, core
);
3848 if (nvif_msec(&drm
->client
.device
, 2000ULL,
3849 if (nouveau_bo_rd32(disp
->sync
, 0))
3853 NV_ERROR(drm
, "EVO timeout\n");
3856 drm_for_each_encoder(encoder
, drm
->dev
) {
3857 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3858 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3859 if (mstm
&& mstm
->modified
)
3860 nv50_mstm_cleanup(mstm
);
3866 nv50_disp_atomic_commit_tail(struct drm_atomic_state
*state
)
3868 struct drm_device
*dev
= state
->dev
;
3869 struct drm_crtc_state
*crtc_state
;
3870 struct drm_crtc
*crtc
;
3871 struct drm_plane_state
*plane_state
;
3872 struct drm_plane
*plane
;
3873 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3874 struct nv50_disp
*disp
= nv50_disp(dev
);
3875 struct nv50_atom
*atom
= nv50_atom(state
);
3876 struct nv50_outp_atom
*outp
, *outt
;
3877 u32 interlock_core
= 0;
3878 u32 interlock_chan
= 0;
3881 NV_ATOMIC(drm
, "commit %d %d\n", atom
->lock_core
, atom
->flush_disable
);
3882 drm_atomic_helper_wait_for_fences(dev
, state
, false);
3883 drm_atomic_helper_wait_for_dependencies(state
);
3884 drm_atomic_helper_update_legacy_modeset_state(dev
, state
);
3886 if (atom
->lock_core
)
3887 mutex_lock(&disp
->mutex
);
3889 /* Disable head(s). */
3890 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
3891 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
3892 struct nv50_head
*head
= nv50_head(crtc
);
3894 NV_ATOMIC(drm
, "%s: clr %04x (set %04x)\n", crtc
->name
,
3895 asyh
->clr
.mask
, asyh
->set
.mask
);
3897 if (asyh
->clr
.mask
) {
3898 nv50_head_flush_clr(head
, asyh
, atom
->flush_disable
);
3899 interlock_core
|= 1;
3903 /* Disable plane(s). */
3904 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
3905 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
3906 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
3908 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", plane
->name
,
3909 asyw
->clr
.mask
, asyw
->set
.mask
);
3910 if (!asyw
->clr
.mask
)
3913 interlock_chan
|= nv50_wndw_flush_clr(wndw
, interlock_core
,
3914 atom
->flush_disable
,
3918 /* Disable output path(s). */
3919 list_for_each_entry(outp
, &atom
->outp
, head
) {
3920 const struct drm_encoder_helper_funcs
*help
;
3921 struct drm_encoder
*encoder
;
3923 encoder
= outp
->encoder
;
3924 help
= encoder
->helper_private
;
3926 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", encoder
->name
,
3927 outp
->clr
.mask
, outp
->set
.mask
);
3929 if (outp
->clr
.mask
) {
3930 help
->disable(encoder
);
3931 interlock_core
|= 1;
3932 if (outp
->flush_disable
) {
3933 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3940 /* Flush disable. */
3941 if (interlock_core
) {
3942 if (atom
->flush_disable
) {
3943 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3949 /* Update output path(s). */
3950 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
3951 const struct drm_encoder_helper_funcs
*help
;
3952 struct drm_encoder
*encoder
;
3954 encoder
= outp
->encoder
;
3955 help
= encoder
->helper_private
;
3957 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", encoder
->name
,
3958 outp
->set
.mask
, outp
->clr
.mask
);
3960 if (outp
->set
.mask
) {
3961 help
->enable(encoder
);
3965 list_del(&outp
->head
);
3969 /* Update head(s). */
3970 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
3971 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
3972 struct nv50_head
*head
= nv50_head(crtc
);
3974 NV_ATOMIC(drm
, "%s: set %04x (clr %04x)\n", crtc
->name
,
3975 asyh
->set
.mask
, asyh
->clr
.mask
);
3977 if (asyh
->set
.mask
) {
3978 nv50_head_flush_set(head
, asyh
);
3983 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
3984 if (crtc
->state
->event
)
3985 drm_crtc_vblank_get(crtc
);
3988 /* Update plane(s). */
3989 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
3990 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
3991 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
3993 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", plane
->name
,
3994 asyw
->set
.mask
, asyw
->clr
.mask
);
3995 if ( !asyw
->set
.mask
&&
3996 (!asyw
->clr
.mask
|| atom
->flush_disable
))
3999 interlock_chan
|= nv50_wndw_flush_set(wndw
, interlock_core
, asyw
);
4003 if (interlock_core
) {
4004 if (!interlock_chan
&& atom
->state
.legacy_cursor_update
) {
4005 u32
*push
= evo_wait(&disp
->mast
, 2);
4007 evo_mthd(push
, 0x0080, 1);
4008 evo_data(push
, 0x00000000);
4009 evo_kick(push
, &disp
->mast
);
4012 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4016 if (atom
->lock_core
)
4017 mutex_unlock(&disp
->mutex
);
4019 /* Wait for HW to signal completion. */
4020 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4021 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
4022 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4023 int ret
= nv50_wndw_wait_armed(wndw
, asyw
);
4025 NV_ERROR(drm
, "%s: timeout\n", plane
->name
);
4028 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4029 if (crtc
->state
->event
) {
4030 unsigned long flags
;
4031 /* Get correct count/ts if racing with vblank irq */
4032 drm_accurate_vblank_count(crtc
);
4033 spin_lock_irqsave(&crtc
->dev
->event_lock
, flags
);
4034 drm_crtc_send_vblank_event(crtc
, crtc
->state
->event
);
4035 spin_unlock_irqrestore(&crtc
->dev
->event_lock
, flags
);
4036 crtc
->state
->event
= NULL
;
4037 drm_crtc_vblank_put(crtc
);
4041 drm_atomic_helper_commit_hw_done(state
);
4042 drm_atomic_helper_cleanup_planes(dev
, state
);
4043 drm_atomic_helper_commit_cleanup_done(state
);
4044 drm_atomic_state_put(state
);
4048 nv50_disp_atomic_commit_work(struct work_struct
*work
)
4050 struct drm_atomic_state
*state
=
4051 container_of(work
, typeof(*state
), commit_work
);
4052 nv50_disp_atomic_commit_tail(state
);
4056 nv50_disp_atomic_commit(struct drm_device
*dev
,
4057 struct drm_atomic_state
*state
, bool nonblock
)
4059 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4060 struct nv50_disp
*disp
= nv50_disp(dev
);
4061 struct drm_plane_state
*plane_state
;
4062 struct drm_plane
*plane
;
4063 struct drm_crtc
*crtc
;
4064 bool active
= false;
4067 ret
= pm_runtime_get_sync(dev
->dev
);
4068 if (ret
< 0 && ret
!= -EACCES
)
4071 ret
= drm_atomic_helper_setup_commit(state
, nonblock
);
4075 INIT_WORK(&state
->commit_work
, nv50_disp_atomic_commit_work
);
4077 ret
= drm_atomic_helper_prepare_planes(dev
, state
);
4082 ret
= drm_atomic_helper_wait_for_fences(dev
, state
, true);
4087 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4088 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane_state
);
4089 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4090 if (asyw
->set
.image
) {
4091 asyw
->ntfy
.handle
= wndw
->dmac
->sync
.handle
;
4092 asyw
->ntfy
.offset
= wndw
->ntfy
;
4093 asyw
->ntfy
.awaken
= false;
4094 asyw
->set
.ntfy
= true;
4095 nouveau_bo_wr32(disp
->sync
, wndw
->ntfy
/ 4, 0x00000000);
4100 drm_atomic_helper_swap_state(state
, true);
4101 drm_atomic_state_get(state
);
4104 queue_work(system_unbound_wq
, &state
->commit_work
);
4106 nv50_disp_atomic_commit_tail(state
);
4108 drm_for_each_crtc(crtc
, dev
) {
4109 if (crtc
->state
->enable
) {
4110 if (!drm
->have_disp_power_ref
) {
4111 drm
->have_disp_power_ref
= true;
4119 if (!active
&& drm
->have_disp_power_ref
) {
4120 pm_runtime_put_autosuspend(dev
->dev
);
4121 drm
->have_disp_power_ref
= false;
4125 pm_runtime_put_autosuspend(dev
->dev
);
4129 static struct nv50_outp_atom
*
4130 nv50_disp_outp_atomic_add(struct nv50_atom
*atom
, struct drm_encoder
*encoder
)
4132 struct nv50_outp_atom
*outp
;
4134 list_for_each_entry(outp
, &atom
->outp
, head
) {
4135 if (outp
->encoder
== encoder
)
4139 outp
= kzalloc(sizeof(*outp
), GFP_KERNEL
);
4141 return ERR_PTR(-ENOMEM
);
4143 list_add(&outp
->head
, &atom
->outp
);
4144 outp
->encoder
= encoder
;
4149 nv50_disp_outp_atomic_check_clr(struct nv50_atom
*atom
,
4150 struct drm_connector
*connector
)
4152 struct drm_encoder
*encoder
= connector
->state
->best_encoder
;
4153 struct drm_crtc_state
*crtc_state
;
4154 struct drm_crtc
*crtc
;
4155 struct nv50_outp_atom
*outp
;
4157 if (!(crtc
= connector
->state
->crtc
))
4160 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4161 if (crtc
->state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4162 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4164 return PTR_ERR(outp
);
4166 if (outp
->encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
4167 outp
->flush_disable
= true;
4168 atom
->flush_disable
= true;
4170 outp
->clr
.ctrl
= true;
4171 atom
->lock_core
= true;
4178 nv50_disp_outp_atomic_check_set(struct nv50_atom
*atom
,
4179 struct drm_connector_state
*connector_state
)
4181 struct drm_encoder
*encoder
= connector_state
->best_encoder
;
4182 struct drm_crtc_state
*crtc_state
;
4183 struct drm_crtc
*crtc
;
4184 struct nv50_outp_atom
*outp
;
4186 if (!(crtc
= connector_state
->crtc
))
4189 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4190 if (crtc_state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4191 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4193 return PTR_ERR(outp
);
4195 outp
->set
.ctrl
= true;
4196 atom
->lock_core
= true;
4203 nv50_disp_atomic_check(struct drm_device
*dev
, struct drm_atomic_state
*state
)
4205 struct nv50_atom
*atom
= nv50_atom(state
);
4206 struct drm_connector_state
*connector_state
;
4207 struct drm_connector
*connector
;
4210 ret
= drm_atomic_helper_check(dev
, state
);
4214 for_each_connector_in_state(state
, connector
, connector_state
, i
) {
4215 ret
= nv50_disp_outp_atomic_check_clr(atom
, connector
);
4219 ret
= nv50_disp_outp_atomic_check_set(atom
, connector_state
);
4228 nv50_disp_atomic_state_clear(struct drm_atomic_state
*state
)
4230 struct nv50_atom
*atom
= nv50_atom(state
);
4231 struct nv50_outp_atom
*outp
, *outt
;
4233 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4234 list_del(&outp
->head
);
4238 drm_atomic_state_default_clear(state
);
4242 nv50_disp_atomic_state_free(struct drm_atomic_state
*state
)
4244 struct nv50_atom
*atom
= nv50_atom(state
);
4245 drm_atomic_state_default_release(&atom
->state
);
4249 static struct drm_atomic_state
*
4250 nv50_disp_atomic_state_alloc(struct drm_device
*dev
)
4252 struct nv50_atom
*atom
;
4253 if (!(atom
= kzalloc(sizeof(*atom
), GFP_KERNEL
)) ||
4254 drm_atomic_state_init(dev
, &atom
->state
) < 0) {
4258 INIT_LIST_HEAD(&atom
->outp
);
4259 return &atom
->state
;
4262 static const struct drm_mode_config_funcs
4264 .fb_create
= nouveau_user_framebuffer_create
,
4265 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
4266 .atomic_check
= nv50_disp_atomic_check
,
4267 .atomic_commit
= nv50_disp_atomic_commit
,
4268 .atomic_state_alloc
= nv50_disp_atomic_state_alloc
,
4269 .atomic_state_clear
= nv50_disp_atomic_state_clear
,
4270 .atomic_state_free
= nv50_disp_atomic_state_free
,
4273 /******************************************************************************
4275 *****************************************************************************/
4278 nv50_display_fini(struct drm_device
*dev
)
4280 struct nouveau_encoder
*nv_encoder
;
4281 struct drm_encoder
*encoder
;
4282 struct drm_plane
*plane
;
4284 drm_for_each_plane(plane
, dev
) {
4285 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4286 if (plane
->funcs
!= &nv50_wndw
)
4288 nv50_wndw_fini(wndw
);
4291 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4292 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4293 nv_encoder
= nouveau_encoder(encoder
);
4294 nv50_mstm_fini(nv_encoder
->dp
.mstm
);
4300 nv50_display_init(struct drm_device
*dev
)
4302 struct drm_encoder
*encoder
;
4303 struct drm_plane
*plane
;
4304 struct drm_crtc
*crtc
;
4307 push
= evo_wait(nv50_mast(dev
), 32);
4311 evo_mthd(push
, 0x0088, 1);
4312 evo_data(push
, nv50_mast(dev
)->base
.sync
.handle
);
4313 evo_kick(push
, nv50_mast(dev
));
4315 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4316 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4317 const struct drm_encoder_helper_funcs
*help
;
4318 struct nouveau_encoder
*nv_encoder
;
4320 nv_encoder
= nouveau_encoder(encoder
);
4321 help
= encoder
->helper_private
;
4322 if (help
&& help
->dpms
)
4323 help
->dpms(encoder
, DRM_MODE_DPMS_ON
);
4325 nv50_mstm_init(nv_encoder
->dp
.mstm
);
4329 drm_for_each_crtc(crtc
, dev
) {
4330 nv50_head_lut_load(crtc
);
4333 drm_for_each_plane(plane
, dev
) {
4334 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4335 if (plane
->funcs
!= &nv50_wndw
)
4337 nv50_wndw_init(wndw
);
4344 nv50_display_destroy(struct drm_device
*dev
)
4346 struct nv50_disp
*disp
= nv50_disp(dev
);
4348 nv50_dmac_destroy(&disp
->mast
.base
, disp
->disp
);
4350 nouveau_bo_unmap(disp
->sync
);
4352 nouveau_bo_unpin(disp
->sync
);
4353 nouveau_bo_ref(NULL
, &disp
->sync
);
4355 nouveau_display(dev
)->priv
= NULL
;
4359 MODULE_PARM_DESC(atomic
, "Expose atomic ioctl (default: disabled)");
4360 static int nouveau_atomic
= 0;
4361 module_param_named(atomic
, nouveau_atomic
, int, 0400);
4364 nv50_display_create(struct drm_device
*dev
)
4366 struct nvif_device
*device
= &nouveau_drm(dev
)->client
.device
;
4367 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4368 struct dcb_table
*dcb
= &drm
->vbios
.dcb
;
4369 struct drm_connector
*connector
, *tmp
;
4370 struct nv50_disp
*disp
;
4371 struct dcb_output
*dcbe
;
4374 disp
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
4378 mutex_init(&disp
->mutex
);
4380 nouveau_display(dev
)->priv
= disp
;
4381 nouveau_display(dev
)->dtor
= nv50_display_destroy
;
4382 nouveau_display(dev
)->init
= nv50_display_init
;
4383 nouveau_display(dev
)->fini
= nv50_display_fini
;
4384 disp
->disp
= &nouveau_display(dev
)->disp
;
4385 dev
->mode_config
.funcs
= &nv50_disp_func
;
4387 dev
->driver
->driver_features
|= DRIVER_ATOMIC
;
4389 /* small shared memory area we use for notifiers and semaphores */
4390 ret
= nouveau_bo_new(&drm
->client
, 4096, 0x1000, TTM_PL_FLAG_VRAM
,
4391 0, 0x0000, NULL
, NULL
, &disp
->sync
);
4393 ret
= nouveau_bo_pin(disp
->sync
, TTM_PL_FLAG_VRAM
, true);
4395 ret
= nouveau_bo_map(disp
->sync
);
4397 nouveau_bo_unpin(disp
->sync
);
4400 nouveau_bo_ref(NULL
, &disp
->sync
);
4406 /* allocate master evo channel */
4407 ret
= nv50_core_create(device
, disp
->disp
, disp
->sync
->bo
.offset
,
4412 /* create crtc objects to represent the hw heads */
4413 if (disp
->disp
->oclass
>= GF110_DISP
)
4414 crtcs
= nvif_rd32(&device
->object
, 0x022448);
4418 for (i
= 0; i
< crtcs
; i
++) {
4419 ret
= nv50_head_create(dev
, i
);
4424 /* create encoder/connector objects based on VBIOS DCB table */
4425 for (i
= 0, dcbe
= &dcb
->entry
[0]; i
< dcb
->entries
; i
++, dcbe
++) {
4426 connector
= nouveau_connector_create(dev
, dcbe
->connector
);
4427 if (IS_ERR(connector
))
4430 if (dcbe
->location
== DCB_LOC_ON_CHIP
) {
4431 switch (dcbe
->type
) {
4432 case DCB_OUTPUT_TMDS
:
4433 case DCB_OUTPUT_LVDS
:
4435 ret
= nv50_sor_create(connector
, dcbe
);
4437 case DCB_OUTPUT_ANALOG
:
4438 ret
= nv50_dac_create(connector
, dcbe
);
4445 ret
= nv50_pior_create(connector
, dcbe
);
4449 NV_WARN(drm
, "failed to create encoder %d/%d/%d: %d\n",
4450 dcbe
->location
, dcbe
->type
,
4451 ffs(dcbe
->or) - 1, ret
);
4456 /* cull any connectors we created that don't have an encoder */
4457 list_for_each_entry_safe(connector
, tmp
, &dev
->mode_config
.connector_list
, head
) {
4458 if (connector
->encoder_ids
[0])
4461 NV_WARN(drm
, "%s has no encoders, removing\n",
4463 connector
->funcs
->destroy(connector
);
4468 nv50_display_destroy(dev
);