2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
30 #include <linux/dma-mapping.h>
31 #include <linux/hdmi.h>
34 #include <drm/drm_atomic_helper.h>
35 #include <drm/drm_dp_helper.h>
36 #include <drm/drm_fb_helper.h>
37 #include <drm/drm_plane_helper.h>
38 #include <drm/drm_probe_helper.h>
39 #include <drm/drm_scdc_helper.h>
40 #include <drm/drm_edid.h>
42 #include <nvif/class.h>
43 #include <nvif/cl0002.h>
44 #include <nvif/cl5070.h>
45 #include <nvif/cl507d.h>
46 #include <nvif/event.h>
48 #include "nouveau_drv.h"
49 #include "nouveau_dma.h"
50 #include "nouveau_gem.h"
51 #include "nouveau_connector.h"
52 #include "nouveau_encoder.h"
53 #include "nouveau_fence.h"
54 #include "nouveau_fbcon.h"
56 #include <subdev/bios/dp.h>
58 /******************************************************************************
60 *****************************************************************************/
62 struct nv50_outp_atom
{
63 struct list_head head
;
65 struct drm_encoder
*encoder
;
68 union nv50_outp_atom_mask
{
76 /******************************************************************************
78 *****************************************************************************/
81 nv50_chan_create(struct nvif_device
*device
, struct nvif_object
*disp
,
82 const s32
*oclass
, u8 head
, void *data
, u32 size
,
83 struct nv50_chan
*chan
)
85 struct nvif_sclass
*sclass
;
88 chan
->device
= device
;
90 ret
= n
= nvif_object_sclass_get(disp
, &sclass
);
95 for (i
= 0; i
< n
; i
++) {
96 if (sclass
[i
].oclass
== oclass
[0]) {
97 ret
= nvif_object_init(disp
, 0, oclass
[0],
98 data
, size
, &chan
->user
);
100 nvif_object_map(&chan
->user
, NULL
, 0);
101 nvif_object_sclass_put(&sclass
);
108 nvif_object_sclass_put(&sclass
);
113 nv50_chan_destroy(struct nv50_chan
*chan
)
115 nvif_object_fini(&chan
->user
);
118 /******************************************************************************
120 *****************************************************************************/
123 nv50_dmac_destroy(struct nv50_dmac
*dmac
)
125 nvif_object_fini(&dmac
->vram
);
126 nvif_object_fini(&dmac
->sync
);
128 nv50_chan_destroy(&dmac
->base
);
130 nvif_mem_fini(&dmac
->push
);
134 nv50_dmac_create(struct nvif_device
*device
, struct nvif_object
*disp
,
135 const s32
*oclass
, u8 head
, void *data
, u32 size
, u64 syncbuf
,
136 struct nv50_dmac
*dmac
)
138 struct nouveau_cli
*cli
= (void *)device
->object
.client
;
139 struct nv50_disp_core_channel_dma_v0
*args
= data
;
140 u8 type
= NVIF_MEM_COHERENT
;
143 mutex_init(&dmac
->lock
);
145 /* Pascal added support for 47-bit physical addresses, but some
146 * parts of EVO still only accept 40-bit PAs.
148 * To avoid issues on systems with large amounts of RAM, and on
149 * systems where an IOMMU maps pages at a high address, we need
150 * to allocate push buffers in VRAM instead.
152 * This appears to match NVIDIA's behaviour on Pascal.
154 if (device
->info
.family
== NV_DEVICE_INFO_V0_PASCAL
)
155 type
|= NVIF_MEM_VRAM
;
157 ret
= nvif_mem_init_map(&cli
->mmu
, type
, 0x1000, &dmac
->push
);
161 dmac
->ptr
= dmac
->push
.object
.map
.ptr
;
163 args
->pushbuf
= nvif_handle(&dmac
->push
.object
);
165 ret
= nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
173 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000000, NV_DMA_IN_MEMORY
,
174 &(struct nv_dma_v0
) {
175 .target
= NV_DMA_V0_TARGET_VRAM
,
176 .access
= NV_DMA_V0_ACCESS_RDWR
,
177 .start
= syncbuf
+ 0x0000,
178 .limit
= syncbuf
+ 0x0fff,
179 }, sizeof(struct nv_dma_v0
),
184 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000001, NV_DMA_IN_MEMORY
,
185 &(struct nv_dma_v0
) {
186 .target
= NV_DMA_V0_TARGET_VRAM
,
187 .access
= NV_DMA_V0_ACCESS_RDWR
,
189 .limit
= device
->info
.ram_user
- 1,
190 }, sizeof(struct nv_dma_v0
),
198 /******************************************************************************
199 * EVO channel helpers
200 *****************************************************************************/
202 evo_flush(struct nv50_dmac
*dmac
)
204 /* Push buffer fetches are not coherent with BAR1, we need to ensure
205 * writes have been flushed right through to VRAM before writing PUT.
207 if (dmac
->push
.type
& NVIF_MEM_VRAM
) {
208 struct nvif_device
*device
= dmac
->base
.device
;
209 nvif_wr32(&device
->object
, 0x070000, 0x00000001);
210 nvif_msec(device
, 2000,
211 if (!(nvif_rd32(&device
->object
, 0x070000) & 0x00000002))
218 evo_wait(struct nv50_dmac
*evoc
, int nr
)
220 struct nv50_dmac
*dmac
= evoc
;
221 struct nvif_device
*device
= dmac
->base
.device
;
222 u32 put
= nvif_rd32(&dmac
->base
.user
, 0x0000) / 4;
224 mutex_lock(&dmac
->lock
);
225 if (put
+ nr
>= (PAGE_SIZE
/ 4) - 8) {
226 dmac
->ptr
[put
] = 0x20000000;
229 nvif_wr32(&dmac
->base
.user
, 0x0000, 0x00000000);
230 if (nvif_msec(device
, 2000,
231 if (!nvif_rd32(&dmac
->base
.user
, 0x0004))
234 mutex_unlock(&dmac
->lock
);
235 pr_err("nouveau: evo channel stalled\n");
242 return dmac
->ptr
+ put
;
246 evo_kick(u32
*push
, struct nv50_dmac
*evoc
)
248 struct nv50_dmac
*dmac
= evoc
;
252 nvif_wr32(&dmac
->base
.user
, 0x0000, (push
- dmac
->ptr
) << 2);
253 mutex_unlock(&dmac
->lock
);
256 /******************************************************************************
257 * Output path helpers
258 *****************************************************************************/
260 nv50_outp_release(struct nouveau_encoder
*nv_encoder
)
262 struct nv50_disp
*disp
= nv50_disp(nv_encoder
->base
.base
.dev
);
264 struct nv50_disp_mthd_v1 base
;
267 .base
.method
= NV50_DISP_MTHD_V1_RELEASE
,
268 .base
.hasht
= nv_encoder
->dcb
->hasht
,
269 .base
.hashm
= nv_encoder
->dcb
->hashm
,
272 nvif_mthd(&disp
->disp
->object
, 0, &args
, sizeof(args
));
274 nv_encoder
->link
= 0;
278 nv50_outp_acquire(struct nouveau_encoder
*nv_encoder
)
280 struct nouveau_drm
*drm
= nouveau_drm(nv_encoder
->base
.base
.dev
);
281 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
283 struct nv50_disp_mthd_v1 base
;
284 struct nv50_disp_acquire_v0 info
;
287 .base
.method
= NV50_DISP_MTHD_V1_ACQUIRE
,
288 .base
.hasht
= nv_encoder
->dcb
->hasht
,
289 .base
.hashm
= nv_encoder
->dcb
->hashm
,
293 ret
= nvif_mthd(&disp
->disp
->object
, 0, &args
, sizeof(args
));
295 NV_ERROR(drm
, "error acquiring output path: %d\n", ret
);
299 nv_encoder
->or = args
.info
.or;
300 nv_encoder
->link
= args
.info
.link
;
305 nv50_outp_atomic_check_view(struct drm_encoder
*encoder
,
306 struct drm_crtc_state
*crtc_state
,
307 struct drm_connector_state
*conn_state
,
308 struct drm_display_mode
*native_mode
)
310 struct drm_display_mode
*adjusted_mode
= &crtc_state
->adjusted_mode
;
311 struct drm_display_mode
*mode
= &crtc_state
->mode
;
312 struct drm_connector
*connector
= conn_state
->connector
;
313 struct nouveau_conn_atom
*asyc
= nouveau_conn_atom(conn_state
);
314 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
316 NV_ATOMIC(drm
, "%s atomic_check\n", encoder
->name
);
317 asyc
->scaler
.full
= false;
321 if (asyc
->scaler
.mode
== DRM_MODE_SCALE_NONE
) {
322 switch (connector
->connector_type
) {
323 case DRM_MODE_CONNECTOR_LVDS
:
324 case DRM_MODE_CONNECTOR_eDP
:
325 /* Force use of scaler for non-EDID modes. */
326 if (adjusted_mode
->type
& DRM_MODE_TYPE_DRIVER
)
329 asyc
->scaler
.full
= true;
338 if (!drm_mode_equal(adjusted_mode
, mode
)) {
339 drm_mode_copy(adjusted_mode
, mode
);
340 crtc_state
->mode_changed
= true;
347 nv50_outp_atomic_check(struct drm_encoder
*encoder
,
348 struct drm_crtc_state
*crtc_state
,
349 struct drm_connector_state
*conn_state
)
351 struct nouveau_connector
*nv_connector
=
352 nouveau_connector(conn_state
->connector
);
353 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
354 nv_connector
->native_mode
);
357 /******************************************************************************
359 *****************************************************************************/
361 nv50_dac_disable(struct drm_encoder
*encoder
)
363 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
364 struct nv50_core
*core
= nv50_disp(encoder
->dev
)->core
;
365 if (nv_encoder
->crtc
)
366 core
->func
->dac
->ctrl(core
, nv_encoder
->or, 0x00000000, NULL
);
367 nv_encoder
->crtc
= NULL
;
368 nv50_outp_release(nv_encoder
);
372 nv50_dac_enable(struct drm_encoder
*encoder
)
374 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
375 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
376 struct nv50_head_atom
*asyh
= nv50_head_atom(nv_crtc
->base
.state
);
377 struct nv50_core
*core
= nv50_disp(encoder
->dev
)->core
;
379 nv50_outp_acquire(nv_encoder
);
381 core
->func
->dac
->ctrl(core
, nv_encoder
->or, 1 << nv_crtc
->index
, asyh
);
384 nv_encoder
->crtc
= encoder
->crtc
;
387 static enum drm_connector_status
388 nv50_dac_detect(struct drm_encoder
*encoder
, struct drm_connector
*connector
)
390 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
391 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
393 struct nv50_disp_mthd_v1 base
;
394 struct nv50_disp_dac_load_v0 load
;
397 .base
.method
= NV50_DISP_MTHD_V1_DAC_LOAD
,
398 .base
.hasht
= nv_encoder
->dcb
->hasht
,
399 .base
.hashm
= nv_encoder
->dcb
->hashm
,
403 args
.load
.data
= nouveau_drm(encoder
->dev
)->vbios
.dactestval
;
404 if (args
.load
.data
== 0)
405 args
.load
.data
= 340;
407 ret
= nvif_mthd(&disp
->disp
->object
, 0, &args
, sizeof(args
));
408 if (ret
|| !args
.load
.load
)
409 return connector_status_disconnected
;
411 return connector_status_connected
;
414 static const struct drm_encoder_helper_funcs
416 .atomic_check
= nv50_outp_atomic_check
,
417 .enable
= nv50_dac_enable
,
418 .disable
= nv50_dac_disable
,
419 .detect
= nv50_dac_detect
423 nv50_dac_destroy(struct drm_encoder
*encoder
)
425 drm_encoder_cleanup(encoder
);
429 static const struct drm_encoder_funcs
431 .destroy
= nv50_dac_destroy
,
435 nv50_dac_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
437 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
438 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
439 struct nvkm_i2c_bus
*bus
;
440 struct nouveau_encoder
*nv_encoder
;
441 struct drm_encoder
*encoder
;
442 int type
= DRM_MODE_ENCODER_DAC
;
444 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
447 nv_encoder
->dcb
= dcbe
;
449 bus
= nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
451 nv_encoder
->i2c
= &bus
->i2c
;
453 encoder
= to_drm_encoder(nv_encoder
);
454 encoder
->possible_crtcs
= dcbe
->heads
;
455 encoder
->possible_clones
= 0;
456 drm_encoder_init(connector
->dev
, encoder
, &nv50_dac_func
, type
,
457 "dac-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
458 drm_encoder_helper_add(encoder
, &nv50_dac_help
);
460 drm_connector_attach_encoder(connector
, encoder
);
464 /******************************************************************************
466 *****************************************************************************/
468 nv50_audio_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
470 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
471 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
473 struct nv50_disp_mthd_v1 base
;
474 struct nv50_disp_sor_hda_eld_v0 eld
;
477 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
478 .base
.hasht
= nv_encoder
->dcb
->hasht
,
479 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
480 (0x0100 << nv_crtc
->index
),
483 nvif_mthd(&disp
->disp
->object
, 0, &args
, sizeof(args
));
487 nv50_audio_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
489 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
490 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
491 struct nouveau_connector
*nv_connector
;
492 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
495 struct nv50_disp_mthd_v1 mthd
;
496 struct nv50_disp_sor_hda_eld_v0 eld
;
498 u8 data
[sizeof(nv_connector
->base
.eld
)];
500 .base
.mthd
.version
= 1,
501 .base
.mthd
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
502 .base
.mthd
.hasht
= nv_encoder
->dcb
->hasht
,
503 .base
.mthd
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
504 (0x0100 << nv_crtc
->index
),
507 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
508 if (!drm_detect_monitor_audio(nv_connector
->edid
))
511 memcpy(args
.data
, nv_connector
->base
.eld
, sizeof(args
.data
));
513 nvif_mthd(&disp
->disp
->object
, 0, &args
,
514 sizeof(args
.base
) + drm_eld_size(args
.data
));
517 /******************************************************************************
519 *****************************************************************************/
521 nv50_hdmi_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
523 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
524 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
526 struct nv50_disp_mthd_v1 base
;
527 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
530 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
531 .base
.hasht
= nv_encoder
->dcb
->hasht
,
532 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
533 (0x0100 << nv_crtc
->index
),
536 nvif_mthd(&disp
->disp
->object
, 0, &args
, sizeof(args
));
540 nv50_hdmi_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
542 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
543 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
544 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
545 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
547 struct nv50_disp_mthd_v1 base
;
548 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
549 u8 infoframes
[2 * 17]; /* two frames, up to 17 bytes each */
552 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
553 .base
.hasht
= nv_encoder
->dcb
->hasht
,
554 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
555 (0x0100 << nv_crtc
->index
),
557 .pwr
.rekey
= 56, /* binary driver, and tegra, constant */
559 struct nouveau_connector
*nv_connector
;
560 struct drm_hdmi_info
*hdmi
;
562 union hdmi_infoframe avi_frame
;
563 union hdmi_infoframe vendor_frame
;
564 bool high_tmds_clock_ratio
= false, scrambling
= false;
569 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
570 if (!drm_detect_hdmi_monitor(nv_connector
->edid
))
573 hdmi
= &nv_connector
->base
.display_info
.hdmi
;
575 ret
= drm_hdmi_avi_infoframe_from_display_mode(&avi_frame
.avi
,
576 &nv_connector
->base
, mode
);
578 /* We have an AVI InfoFrame, populate it to the display */
579 args
.pwr
.avi_infoframe_length
580 = hdmi_infoframe_pack(&avi_frame
, args
.infoframes
, 17);
583 ret
= drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame
.vendor
.hdmi
,
584 &nv_connector
->base
, mode
);
586 /* We have a Vendor InfoFrame, populate it to the display */
587 args
.pwr
.vendor_infoframe_length
588 = hdmi_infoframe_pack(&vendor_frame
,
590 + args
.pwr
.avi_infoframe_length
,
594 max_ac_packet
= mode
->htotal
- mode
->hdisplay
;
595 max_ac_packet
-= args
.pwr
.rekey
;
596 max_ac_packet
-= 18; /* constant from tegra */
597 args
.pwr
.max_ac_packet
= max_ac_packet
/ 32;
599 if (hdmi
->scdc
.scrambling
.supported
) {
600 high_tmds_clock_ratio
= mode
->clock
> 340000;
601 scrambling
= high_tmds_clock_ratio
||
602 hdmi
->scdc
.scrambling
.low_rates
;
606 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE
* scrambling
|
607 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4
* high_tmds_clock_ratio
;
609 size
= sizeof(args
.base
)
611 + args
.pwr
.avi_infoframe_length
612 + args
.pwr
.vendor_infoframe_length
;
613 nvif_mthd(&disp
->disp
->object
, 0, &args
, size
);
615 nv50_audio_enable(encoder
, mode
);
617 /* If SCDC is supported by the downstream monitor, update
618 * divider / scrambling settings to what we programmed above.
620 if (!hdmi
->scdc
.scrambling
.supported
)
623 ret
= drm_scdc_readb(nv_encoder
->i2c
, SCDC_TMDS_CONFIG
, &config
);
625 NV_ERROR(drm
, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret
);
628 config
&= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40
| SCDC_SCRAMBLING_ENABLE
);
629 config
|= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40
* high_tmds_clock_ratio
;
630 config
|= SCDC_SCRAMBLING_ENABLE
* scrambling
;
631 ret
= drm_scdc_writeb(nv_encoder
->i2c
, SCDC_TMDS_CONFIG
, config
);
633 NV_ERROR(drm
, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
637 /******************************************************************************
639 *****************************************************************************/
640 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
641 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
642 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
645 struct nouveau_encoder
*outp
;
647 struct drm_dp_mst_topology_mgr mgr
;
648 struct nv50_msto
*msto
[4];
656 struct nv50_mstm
*mstm
;
657 struct drm_dp_mst_port
*port
;
658 struct drm_connector connector
;
660 struct drm_display_mode
*native
;
665 struct drm_encoder encoder
;
667 struct nv50_head
*head
;
668 struct nv50_mstc
*mstc
;
672 static struct drm_dp_payload
*
673 nv50_msto_payload(struct nv50_msto
*msto
)
675 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
676 struct nv50_mstc
*mstc
= msto
->mstc
;
677 struct nv50_mstm
*mstm
= mstc
->mstm
;
678 int vcpi
= mstc
->port
->vcpi
.vcpi
, i
;
680 WARN_ON(!mutex_is_locked(&mstm
->mgr
.payload_lock
));
682 NV_ATOMIC(drm
, "%s: vcpi %d\n", msto
->encoder
.name
, vcpi
);
683 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
684 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
685 NV_ATOMIC(drm
, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
686 mstm
->outp
->base
.base
.name
, i
, payload
->vcpi
,
687 payload
->start_slot
, payload
->num_slots
);
690 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
691 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
692 if (payload
->vcpi
== vcpi
)
700 nv50_msto_cleanup(struct nv50_msto
*msto
)
702 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
703 struct nv50_mstc
*mstc
= msto
->mstc
;
704 struct nv50_mstm
*mstm
= mstc
->mstm
;
709 NV_ATOMIC(drm
, "%s: msto cleanup\n", msto
->encoder
.name
);
711 drm_dp_mst_deallocate_vcpi(&mstm
->mgr
, mstc
->port
);
715 msto
->disabled
= false;
719 nv50_msto_prepare(struct nv50_msto
*msto
)
721 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
722 struct nv50_mstc
*mstc
= msto
->mstc
;
723 struct nv50_mstm
*mstm
= mstc
->mstm
;
725 struct nv50_disp_mthd_v1 base
;
726 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi
;
729 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI
,
730 .base
.hasht
= mstm
->outp
->dcb
->hasht
,
731 .base
.hashm
= (0xf0ff & mstm
->outp
->dcb
->hashm
) |
732 (0x0100 << msto
->head
->base
.index
),
735 mutex_lock(&mstm
->mgr
.payload_lock
);
737 NV_ATOMIC(drm
, "%s: msto prepare\n", msto
->encoder
.name
);
738 if (mstc
->port
->vcpi
.vcpi
> 0) {
739 struct drm_dp_payload
*payload
= nv50_msto_payload(msto
);
741 args
.vcpi
.start_slot
= payload
->start_slot
;
742 args
.vcpi
.num_slots
= payload
->num_slots
;
743 args
.vcpi
.pbn
= mstc
->port
->vcpi
.pbn
;
744 args
.vcpi
.aligned_pbn
= mstc
->port
->vcpi
.aligned_pbn
;
748 NV_ATOMIC(drm
, "%s: %s: %02x %02x %04x %04x\n",
749 msto
->encoder
.name
, msto
->head
->base
.base
.name
,
750 args
.vcpi
.start_slot
, args
.vcpi
.num_slots
,
751 args
.vcpi
.pbn
, args
.vcpi
.aligned_pbn
);
753 nvif_mthd(&drm
->display
->disp
.object
, 0, &args
, sizeof(args
));
754 mutex_unlock(&mstm
->mgr
.payload_lock
);
758 nv50_msto_atomic_check(struct drm_encoder
*encoder
,
759 struct drm_crtc_state
*crtc_state
,
760 struct drm_connector_state
*conn_state
)
762 struct drm_atomic_state
*state
= crtc_state
->state
;
763 struct drm_connector
*connector
= conn_state
->connector
;
764 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
765 struct nv50_mstm
*mstm
= mstc
->mstm
;
766 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc_state
);
769 /* When restoring duplicated states, we need to make sure that the
770 * bw remains the same and avoid recalculating it, as the connector's
771 * bpc may have changed after the state was duplicated
773 if (!state
->duplicated
)
775 drm_dp_calc_pbn_mode(crtc_state
->adjusted_mode
.clock
,
776 connector
->display_info
.bpc
* 3);
778 if (drm_atomic_crtc_needs_modeset(crtc_state
)) {
779 slots
= drm_dp_atomic_find_vcpi_slots(state
, &mstm
->mgr
,
788 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
793 nv50_msto_enable(struct drm_encoder
*encoder
)
795 struct nv50_head
*head
= nv50_head(encoder
->crtc
);
796 struct nv50_head_atom
*armh
= nv50_head_atom(head
->base
.base
.state
);
797 struct nv50_msto
*msto
= nv50_msto(encoder
);
798 struct nv50_mstc
*mstc
= NULL
;
799 struct nv50_mstm
*mstm
= NULL
;
800 struct drm_connector
*connector
;
801 struct drm_connector_list_iter conn_iter
;
805 drm_connector_list_iter_begin(encoder
->dev
, &conn_iter
);
806 drm_for_each_connector_iter(connector
, &conn_iter
) {
807 if (connector
->state
->best_encoder
== &msto
->encoder
) {
808 mstc
= nv50_mstc(connector
);
813 drm_connector_list_iter_end(&conn_iter
);
818 r
= drm_dp_mst_allocate_vcpi(&mstm
->mgr
, mstc
->port
, armh
->dp
.pbn
,
821 DRM_DEBUG_KMS("Failed to allocate VCPI\n");
824 nv50_outp_acquire(mstm
->outp
);
826 if (mstm
->outp
->link
& 1)
831 switch (mstc
->connector
.display_info
.bpc
) {
832 case 6: depth
= 0x2; break;
833 case 8: depth
= 0x5; break;
835 default: depth
= 0x6; break;
838 mstm
->outp
->update(mstm
->outp
, head
->base
.index
, armh
, proto
, depth
);
842 mstm
->modified
= true;
846 nv50_msto_disable(struct drm_encoder
*encoder
)
848 struct nv50_msto
*msto
= nv50_msto(encoder
);
849 struct nv50_mstc
*mstc
= msto
->mstc
;
850 struct nv50_mstm
*mstm
= mstc
->mstm
;
852 drm_dp_mst_reset_vcpi_slots(&mstm
->mgr
, mstc
->port
);
854 mstm
->outp
->update(mstm
->outp
, msto
->head
->base
.index
, NULL
, 0, 0);
855 mstm
->modified
= true;
857 mstm
->disabled
= true;
858 msto
->disabled
= true;
861 static const struct drm_encoder_helper_funcs
863 .disable
= nv50_msto_disable
,
864 .enable
= nv50_msto_enable
,
865 .atomic_check
= nv50_msto_atomic_check
,
869 nv50_msto_destroy(struct drm_encoder
*encoder
)
871 struct nv50_msto
*msto
= nv50_msto(encoder
);
872 drm_encoder_cleanup(&msto
->encoder
);
876 static const struct drm_encoder_funcs
878 .destroy
= nv50_msto_destroy
,
882 nv50_msto_new(struct drm_device
*dev
, u32 heads
, const char *name
, int id
,
883 struct nv50_msto
**pmsto
)
885 struct nv50_msto
*msto
;
888 if (!(msto
= *pmsto
= kzalloc(sizeof(*msto
), GFP_KERNEL
)))
891 ret
= drm_encoder_init(dev
, &msto
->encoder
, &nv50_msto
,
892 DRM_MODE_ENCODER_DPMST
, "%s-mst-%d", name
, id
);
899 drm_encoder_helper_add(&msto
->encoder
, &nv50_msto_help
);
900 msto
->encoder
.possible_crtcs
= heads
;
904 static struct drm_encoder
*
905 nv50_mstc_atomic_best_encoder(struct drm_connector
*connector
,
906 struct drm_connector_state
*connector_state
)
908 struct nv50_head
*head
= nv50_head(connector_state
->crtc
);
909 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
911 return &mstc
->mstm
->msto
[head
->base
.index
]->encoder
;
914 static struct drm_encoder
*
915 nv50_mstc_best_encoder(struct drm_connector
*connector
)
917 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
919 return &mstc
->mstm
->msto
[0]->encoder
;
922 static enum drm_mode_status
923 nv50_mstc_mode_valid(struct drm_connector
*connector
,
924 struct drm_display_mode
*mode
)
930 nv50_mstc_get_modes(struct drm_connector
*connector
)
932 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
935 mstc
->edid
= drm_dp_mst_get_edid(&mstc
->connector
, mstc
->port
->mgr
, mstc
->port
);
936 drm_connector_update_edid_property(&mstc
->connector
, mstc
->edid
);
938 ret
= drm_add_edid_modes(&mstc
->connector
, mstc
->edid
);
940 if (!mstc
->connector
.display_info
.bpc
)
941 mstc
->connector
.display_info
.bpc
= 8;
944 drm_mode_destroy(mstc
->connector
.dev
, mstc
->native
);
945 mstc
->native
= nouveau_conn_native_mode(&mstc
->connector
);
950 nv50_mstc_atomic_check(struct drm_connector
*connector
,
951 struct drm_connector_state
*new_conn_state
)
953 struct drm_atomic_state
*state
= new_conn_state
->state
;
954 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
955 struct drm_dp_mst_topology_mgr
*mgr
= &mstc
->mstm
->mgr
;
956 struct drm_connector_state
*old_conn_state
=
957 drm_atomic_get_old_connector_state(state
, connector
);
958 struct drm_crtc_state
*crtc_state
;
959 struct drm_crtc
*new_crtc
= new_conn_state
->crtc
;
961 if (!old_conn_state
->crtc
)
964 /* We only want to free VCPI if this state disables the CRTC on this
968 crtc_state
= drm_atomic_get_new_crtc_state(state
, new_crtc
);
971 !drm_atomic_crtc_needs_modeset(crtc_state
) ||
976 return drm_dp_atomic_release_vcpi_slots(state
, mgr
, mstc
->port
);
979 static const struct drm_connector_helper_funcs
981 .get_modes
= nv50_mstc_get_modes
,
982 .mode_valid
= nv50_mstc_mode_valid
,
983 .best_encoder
= nv50_mstc_best_encoder
,
984 .atomic_best_encoder
= nv50_mstc_atomic_best_encoder
,
985 .atomic_check
= nv50_mstc_atomic_check
,
988 static enum drm_connector_status
989 nv50_mstc_detect(struct drm_connector
*connector
, bool force
)
991 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
992 enum drm_connector_status conn_status
;
995 if (drm_connector_is_unregistered(connector
))
996 return connector_status_disconnected
;
998 ret
= pm_runtime_get_sync(connector
->dev
->dev
);
999 if (ret
< 0 && ret
!= -EACCES
)
1000 return connector_status_disconnected
;
1002 conn_status
= drm_dp_mst_detect_port(connector
, mstc
->port
->mgr
,
1005 pm_runtime_mark_last_busy(connector
->dev
->dev
);
1006 pm_runtime_put_autosuspend(connector
->dev
->dev
);
1011 nv50_mstc_destroy(struct drm_connector
*connector
)
1013 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
1015 drm_connector_cleanup(&mstc
->connector
);
1016 drm_dp_mst_put_port_malloc(mstc
->port
);
1021 static const struct drm_connector_funcs
1023 .reset
= nouveau_conn_reset
,
1024 .detect
= nv50_mstc_detect
,
1025 .fill_modes
= drm_helper_probe_single_connector_modes
,
1026 .destroy
= nv50_mstc_destroy
,
1027 .atomic_duplicate_state
= nouveau_conn_atomic_duplicate_state
,
1028 .atomic_destroy_state
= nouveau_conn_atomic_destroy_state
,
1029 .atomic_set_property
= nouveau_conn_atomic_set_property
,
1030 .atomic_get_property
= nouveau_conn_atomic_get_property
,
1034 nv50_mstc_new(struct nv50_mstm
*mstm
, struct drm_dp_mst_port
*port
,
1035 const char *path
, struct nv50_mstc
**pmstc
)
1037 struct drm_device
*dev
= mstm
->outp
->base
.base
.dev
;
1038 struct nv50_mstc
*mstc
;
1041 if (!(mstc
= *pmstc
= kzalloc(sizeof(*mstc
), GFP_KERNEL
)))
1046 ret
= drm_connector_init(dev
, &mstc
->connector
, &nv50_mstc
,
1047 DRM_MODE_CONNECTOR_DisplayPort
);
1054 drm_connector_helper_add(&mstc
->connector
, &nv50_mstc_help
);
1056 mstc
->connector
.funcs
->reset(&mstc
->connector
);
1057 nouveau_conn_attach_properties(&mstc
->connector
);
1059 for (i
= 0; i
< ARRAY_SIZE(mstm
->msto
) && mstm
->msto
[i
]; i
++)
1060 drm_connector_attach_encoder(&mstc
->connector
, &mstm
->msto
[i
]->encoder
);
1062 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.path_property
, 0);
1063 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.tile_property
, 0);
1064 drm_connector_set_path_property(&mstc
->connector
, path
);
1065 drm_dp_mst_get_port_malloc(port
);
1070 nv50_mstm_cleanup(struct nv50_mstm
*mstm
)
1072 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
1073 struct drm_encoder
*encoder
;
1076 NV_ATOMIC(drm
, "%s: mstm cleanup\n", mstm
->outp
->base
.base
.name
);
1077 ret
= drm_dp_check_act_status(&mstm
->mgr
);
1079 ret
= drm_dp_update_payload_part2(&mstm
->mgr
);
1081 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
1082 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
1083 struct nv50_msto
*msto
= nv50_msto(encoder
);
1084 struct nv50_mstc
*mstc
= msto
->mstc
;
1085 if (mstc
&& mstc
->mstm
== mstm
)
1086 nv50_msto_cleanup(msto
);
1090 mstm
->modified
= false;
1094 nv50_mstm_prepare(struct nv50_mstm
*mstm
)
1096 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
1097 struct drm_encoder
*encoder
;
1100 NV_ATOMIC(drm
, "%s: mstm prepare\n", mstm
->outp
->base
.base
.name
);
1101 ret
= drm_dp_update_payload_part1(&mstm
->mgr
);
1103 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
1104 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
1105 struct nv50_msto
*msto
= nv50_msto(encoder
);
1106 struct nv50_mstc
*mstc
= msto
->mstc
;
1107 if (mstc
&& mstc
->mstm
== mstm
)
1108 nv50_msto_prepare(msto
);
1112 if (mstm
->disabled
) {
1114 nv50_outp_release(mstm
->outp
);
1115 mstm
->disabled
= false;
1120 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr
*mgr
,
1121 struct drm_connector
*connector
)
1123 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
1124 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
1126 drm_connector_unregister(&mstc
->connector
);
1128 drm_fb_helper_remove_one_connector(&drm
->fbcon
->helper
, &mstc
->connector
);
1130 drm_connector_put(&mstc
->connector
);
1134 nv50_mstm_register_connector(struct drm_connector
*connector
)
1136 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
1138 drm_fb_helper_add_one_connector(&drm
->fbcon
->helper
, connector
);
1140 drm_connector_register(connector
);
1143 static struct drm_connector
*
1144 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr
*mgr
,
1145 struct drm_dp_mst_port
*port
, const char *path
)
1147 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
1148 struct nv50_mstc
*mstc
;
1151 ret
= nv50_mstc_new(mstm
, port
, path
, &mstc
);
1155 return &mstc
->connector
;
1158 static const struct drm_dp_mst_topology_cbs
1160 .add_connector
= nv50_mstm_add_connector
,
1161 .register_connector
= nv50_mstm_register_connector
,
1162 .destroy_connector
= nv50_mstm_destroy_connector
,
1166 nv50_mstm_service(struct nv50_mstm
*mstm
)
1168 struct drm_dp_aux
*aux
= mstm
? mstm
->mgr
.aux
: NULL
;
1169 bool handled
= true;
1177 ret
= drm_dp_dpcd_read(aux
, DP_SINK_COUNT_ESI
, esi
, 8);
1179 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
1183 drm_dp_mst_hpd_irq(&mstm
->mgr
, esi
, &handled
);
1187 drm_dp_dpcd_write(aux
, DP_SINK_COUNT_ESI
+ 1, &esi
[1], 3);
1192 nv50_mstm_remove(struct nv50_mstm
*mstm
)
1195 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
1199 nv50_mstm_enable(struct nv50_mstm
*mstm
, u8 dpcd
, int state
)
1201 struct nouveau_encoder
*outp
= mstm
->outp
;
1203 struct nv50_disp_mthd_v1 base
;
1204 struct nv50_disp_sor_dp_mst_link_v0 mst
;
1207 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_LINK
,
1208 .base
.hasht
= outp
->dcb
->hasht
,
1209 .base
.hashm
= outp
->dcb
->hashm
,
1212 struct nouveau_drm
*drm
= nouveau_drm(outp
->base
.base
.dev
);
1213 struct nvif_object
*disp
= &drm
->display
->disp
.object
;
1217 /* Even if we're enabling MST, start with disabling the
1218 * branching unit to clear any sink-side MST topology state
1219 * that wasn't set by us
1221 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, 0);
1226 /* Now, start initializing */
1227 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
,
1234 return nvif_mthd(disp
, 0, &args
, sizeof(args
));
1238 nv50_mstm_detect(struct nv50_mstm
*mstm
, u8 dpcd
[8], int allow
)
1240 struct drm_dp_aux
*aux
;
1242 bool old_state
, new_state
;
1248 mutex_lock(&mstm
->mgr
.lock
);
1250 old_state
= mstm
->mgr
.mst_state
;
1251 new_state
= old_state
;
1252 aux
= mstm
->mgr
.aux
;
1255 /* Just check that the MST hub is still as we expect it */
1256 ret
= drm_dp_dpcd_readb(aux
, DP_MSTM_CTRL
, &mstm_ctrl
);
1257 if (ret
< 0 || !(mstm_ctrl
& DP_MST_EN
)) {
1258 DRM_DEBUG_KMS("Hub gone, disabling MST topology\n");
1261 } else if (dpcd
[0] >= 0x12) {
1262 ret
= drm_dp_dpcd_readb(aux
, DP_MSTM_CAP
, &dpcd
[1]);
1266 if (!(dpcd
[1] & DP_MST_CAP
))
1272 if (new_state
== old_state
) {
1273 mutex_unlock(&mstm
->mgr
.lock
);
1277 ret
= nv50_mstm_enable(mstm
, dpcd
[0], new_state
);
1281 mutex_unlock(&mstm
->mgr
.lock
);
1283 ret
= drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, new_state
);
1285 return nv50_mstm_enable(mstm
, dpcd
[0], 0);
1290 mutex_unlock(&mstm
->mgr
.lock
);
1295 nv50_mstm_fini(struct nv50_mstm
*mstm
)
1297 if (mstm
&& mstm
->mgr
.mst_state
)
1298 drm_dp_mst_topology_mgr_suspend(&mstm
->mgr
);
1302 nv50_mstm_init(struct nv50_mstm
*mstm
)
1306 if (!mstm
|| !mstm
->mgr
.mst_state
)
1309 ret
= drm_dp_mst_topology_mgr_resume(&mstm
->mgr
);
1311 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
1312 drm_kms_helper_hotplug_event(mstm
->mgr
.dev
);
1317 nv50_mstm_del(struct nv50_mstm
**pmstm
)
1319 struct nv50_mstm
*mstm
= *pmstm
;
1321 drm_dp_mst_topology_mgr_destroy(&mstm
->mgr
);
1328 nv50_mstm_new(struct nouveau_encoder
*outp
, struct drm_dp_aux
*aux
, int aux_max
,
1329 int conn_base_id
, struct nv50_mstm
**pmstm
)
1331 const int max_payloads
= hweight8(outp
->dcb
->heads
);
1332 struct drm_device
*dev
= outp
->base
.base
.dev
;
1333 struct nv50_mstm
*mstm
;
1337 /* This is a workaround for some monitors not functioning
1338 * correctly in MST mode on initial module load. I think
1339 * some bad interaction with the VBIOS may be responsible.
1341 * A good ol' off and on again seems to work here ;)
1343 ret
= drm_dp_dpcd_readb(aux
, DP_DPCD_REV
, &dpcd
);
1344 if (ret
>= 0 && dpcd
>= 0x12)
1345 drm_dp_dpcd_writeb(aux
, DP_MSTM_CTRL
, 0);
1347 if (!(mstm
= *pmstm
= kzalloc(sizeof(*mstm
), GFP_KERNEL
)))
1350 mstm
->mgr
.cbs
= &nv50_mstm
;
1352 ret
= drm_dp_mst_topology_mgr_init(&mstm
->mgr
, dev
, aux
, aux_max
,
1353 max_payloads
, conn_base_id
);
1357 for (i
= 0; i
< max_payloads
; i
++) {
1358 ret
= nv50_msto_new(dev
, outp
->dcb
->heads
, outp
->base
.base
.name
,
1367 /******************************************************************************
1369 *****************************************************************************/
1371 nv50_sor_update(struct nouveau_encoder
*nv_encoder
, u8 head
,
1372 struct nv50_head_atom
*asyh
, u8 proto
, u8 depth
)
1374 struct nv50_disp
*disp
= nv50_disp(nv_encoder
->base
.base
.dev
);
1375 struct nv50_core
*core
= disp
->core
;
1378 nv_encoder
->ctrl
&= ~BIT(head
);
1379 if (!(nv_encoder
->ctrl
& 0x0000000f))
1380 nv_encoder
->ctrl
= 0;
1382 nv_encoder
->ctrl
|= proto
<< 8;
1383 nv_encoder
->ctrl
|= BIT(head
);
1384 asyh
->or.depth
= depth
;
1387 core
->func
->sor
->ctrl(core
, nv_encoder
->or, nv_encoder
->ctrl
, asyh
);
1391 nv50_sor_disable(struct drm_encoder
*encoder
)
1393 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
1394 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(nv_encoder
->crtc
);
1396 nv_encoder
->crtc
= NULL
;
1399 struct nvkm_i2c_aux
*aux
= nv_encoder
->aux
;
1403 int ret
= nvkm_rdaux(aux
, DP_SET_POWER
, &pwr
, 1);
1405 pwr
&= ~DP_SET_POWER_MASK
;
1406 pwr
|= DP_SET_POWER_D3
;
1407 nvkm_wraux(aux
, DP_SET_POWER
, &pwr
, 1);
1411 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, NULL
, 0, 0);
1412 nv50_audio_disable(encoder
, nv_crtc
);
1413 nv50_hdmi_disable(&nv_encoder
->base
.base
, nv_crtc
);
1414 nv50_outp_release(nv_encoder
);
1419 nv50_sor_enable(struct drm_encoder
*encoder
)
1421 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
1422 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
1423 struct nv50_head_atom
*asyh
= nv50_head_atom(nv_crtc
->base
.state
);
1424 struct drm_display_mode
*mode
= &asyh
->state
.adjusted_mode
;
1426 struct nv50_disp_mthd_v1 base
;
1427 struct nv50_disp_sor_lvds_script_v0 lvds
;
1430 .base
.method
= NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT
,
1431 .base
.hasht
= nv_encoder
->dcb
->hasht
,
1432 .base
.hashm
= nv_encoder
->dcb
->hashm
,
1434 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
1435 struct drm_device
*dev
= encoder
->dev
;
1436 struct nouveau_drm
*drm
= nouveau_drm(dev
);
1437 struct nouveau_connector
*nv_connector
;
1438 struct nvbios
*bios
= &drm
->vbios
;
1442 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
1443 nv_encoder
->crtc
= encoder
->crtc
;
1444 nv50_outp_acquire(nv_encoder
);
1446 switch (nv_encoder
->dcb
->type
) {
1447 case DCB_OUTPUT_TMDS
:
1448 if (nv_encoder
->link
& 1) {
1450 /* Only enable dual-link if:
1451 * - Need to (i.e. rate > 165MHz)
1453 * - Not an HDMI monitor, since there's no dual-link
1456 if (mode
->clock
>= 165000 &&
1457 nv_encoder
->dcb
->duallink_possible
&&
1458 !drm_detect_hdmi_monitor(nv_connector
->edid
))
1464 nv50_hdmi_enable(&nv_encoder
->base
.base
, mode
);
1466 case DCB_OUTPUT_LVDS
:
1469 if (bios
->fp_no_ddc
) {
1470 if (bios
->fp
.dual_link
)
1471 lvds
.lvds
.script
|= 0x0100;
1472 if (bios
->fp
.if_is_24bit
)
1473 lvds
.lvds
.script
|= 0x0200;
1475 if (nv_connector
->type
== DCB_CONNECTOR_LVDS_SPWG
) {
1476 if (((u8
*)nv_connector
->edid
)[121] == 2)
1477 lvds
.lvds
.script
|= 0x0100;
1479 if (mode
->clock
>= bios
->fp
.duallink_transition_clk
) {
1480 lvds
.lvds
.script
|= 0x0100;
1483 if (lvds
.lvds
.script
& 0x0100) {
1484 if (bios
->fp
.strapless_is_24bit
& 2)
1485 lvds
.lvds
.script
|= 0x0200;
1487 if (bios
->fp
.strapless_is_24bit
& 1)
1488 lvds
.lvds
.script
|= 0x0200;
1491 if (nv_connector
->base
.display_info
.bpc
== 8)
1492 lvds
.lvds
.script
|= 0x0200;
1495 nvif_mthd(&disp
->disp
->object
, 0, &lvds
, sizeof(lvds
));
1498 if (nv_connector
->base
.display_info
.bpc
== 6)
1501 if (nv_connector
->base
.display_info
.bpc
== 8)
1506 if (nv_encoder
->link
& 1)
1511 nv50_audio_enable(encoder
, mode
);
1518 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, asyh
, proto
, depth
);
1521 static const struct drm_encoder_helper_funcs
1523 .atomic_check
= nv50_outp_atomic_check
,
1524 .enable
= nv50_sor_enable
,
1525 .disable
= nv50_sor_disable
,
1529 nv50_sor_destroy(struct drm_encoder
*encoder
)
1531 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
1532 nv50_mstm_del(&nv_encoder
->dp
.mstm
);
1533 drm_encoder_cleanup(encoder
);
1537 static const struct drm_encoder_funcs
1539 .destroy
= nv50_sor_destroy
,
1543 nv50_sor_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
1545 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
1546 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
1547 struct nvkm_bios
*bios
= nvxx_bios(&drm
->client
.device
);
1548 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
1549 struct nouveau_encoder
*nv_encoder
;
1550 struct drm_encoder
*encoder
;
1551 u8 ver
, hdr
, cnt
, len
;
1555 switch (dcbe
->type
) {
1556 case DCB_OUTPUT_LVDS
: type
= DRM_MODE_ENCODER_LVDS
; break;
1557 case DCB_OUTPUT_TMDS
:
1560 type
= DRM_MODE_ENCODER_TMDS
;
1564 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
1567 nv_encoder
->dcb
= dcbe
;
1568 nv_encoder
->update
= nv50_sor_update
;
1570 encoder
= to_drm_encoder(nv_encoder
);
1571 encoder
->possible_crtcs
= dcbe
->heads
;
1572 encoder
->possible_clones
= 0;
1573 drm_encoder_init(connector
->dev
, encoder
, &nv50_sor_func
, type
,
1574 "sor-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
1575 drm_encoder_helper_add(encoder
, &nv50_sor_help
);
1577 drm_connector_attach_encoder(connector
, encoder
);
1579 if (dcbe
->type
== DCB_OUTPUT_DP
) {
1580 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
1581 struct nvkm_i2c_aux
*aux
=
1582 nvkm_i2c_aux_find(i2c
, dcbe
->i2c_index
);
1584 if (disp
->disp
->object
.oclass
< GF110_DISP
) {
1585 /* HW has no support for address-only
1586 * transactions, so we're required to
1587 * use custom I2C-over-AUX code.
1589 nv_encoder
->i2c
= &aux
->i2c
;
1591 nv_encoder
->i2c
= &nv_connector
->aux
.ddc
;
1593 nv_encoder
->aux
= aux
;
1596 if ((data
= nvbios_dp_table(bios
, &ver
, &hdr
, &cnt
, &len
)) &&
1597 ver
>= 0x40 && (nvbios_rd08(bios
, data
+ 0x08) & 0x04)) {
1598 ret
= nv50_mstm_new(nv_encoder
, &nv_connector
->aux
, 16,
1599 nv_connector
->base
.base
.id
,
1600 &nv_encoder
->dp
.mstm
);
1605 struct nvkm_i2c_bus
*bus
=
1606 nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
1608 nv_encoder
->i2c
= &bus
->i2c
;
1614 /******************************************************************************
1616 *****************************************************************************/
1618 nv50_pior_atomic_check(struct drm_encoder
*encoder
,
1619 struct drm_crtc_state
*crtc_state
,
1620 struct drm_connector_state
*conn_state
)
1622 int ret
= nv50_outp_atomic_check(encoder
, crtc_state
, conn_state
);
1625 crtc_state
->adjusted_mode
.clock
*= 2;
1630 nv50_pior_disable(struct drm_encoder
*encoder
)
1632 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
1633 struct nv50_core
*core
= nv50_disp(encoder
->dev
)->core
;
1634 if (nv_encoder
->crtc
)
1635 core
->func
->pior
->ctrl(core
, nv_encoder
->or, 0x00000000, NULL
);
1636 nv_encoder
->crtc
= NULL
;
1637 nv50_outp_release(nv_encoder
);
1641 nv50_pior_enable(struct drm_encoder
*encoder
)
1643 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
1644 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
1645 struct nouveau_connector
*nv_connector
;
1646 struct nv50_head_atom
*asyh
= nv50_head_atom(nv_crtc
->base
.state
);
1647 struct nv50_core
*core
= nv50_disp(encoder
->dev
)->core
;
1648 u8 owner
= 1 << nv_crtc
->index
;
1651 nv50_outp_acquire(nv_encoder
);
1653 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
1654 switch (nv_connector
->base
.display_info
.bpc
) {
1655 case 10: asyh
->or.depth
= 0x6; break;
1656 case 8: asyh
->or.depth
= 0x5; break;
1657 case 6: asyh
->or.depth
= 0x2; break;
1658 default: asyh
->or.depth
= 0x0; break;
1661 switch (nv_encoder
->dcb
->type
) {
1662 case DCB_OUTPUT_TMDS
:
1671 core
->func
->pior
->ctrl(core
, nv_encoder
->or, (proto
<< 8) | owner
, asyh
);
1672 nv_encoder
->crtc
= encoder
->crtc
;
1675 static const struct drm_encoder_helper_funcs
1677 .atomic_check
= nv50_pior_atomic_check
,
1678 .enable
= nv50_pior_enable
,
1679 .disable
= nv50_pior_disable
,
1683 nv50_pior_destroy(struct drm_encoder
*encoder
)
1685 drm_encoder_cleanup(encoder
);
1689 static const struct drm_encoder_funcs
1691 .destroy
= nv50_pior_destroy
,
1695 nv50_pior_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
1697 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
1698 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
1699 struct nvkm_i2c_bus
*bus
= NULL
;
1700 struct nvkm_i2c_aux
*aux
= NULL
;
1701 struct i2c_adapter
*ddc
;
1702 struct nouveau_encoder
*nv_encoder
;
1703 struct drm_encoder
*encoder
;
1706 switch (dcbe
->type
) {
1707 case DCB_OUTPUT_TMDS
:
1708 bus
= nvkm_i2c_bus_find(i2c
, NVKM_I2C_BUS_EXT(dcbe
->extdev
));
1709 ddc
= bus
? &bus
->i2c
: NULL
;
1710 type
= DRM_MODE_ENCODER_TMDS
;
1713 aux
= nvkm_i2c_aux_find(i2c
, NVKM_I2C_AUX_EXT(dcbe
->extdev
));
1714 ddc
= aux
? &aux
->i2c
: NULL
;
1715 type
= DRM_MODE_ENCODER_TMDS
;
1721 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
1724 nv_encoder
->dcb
= dcbe
;
1725 nv_encoder
->i2c
= ddc
;
1726 nv_encoder
->aux
= aux
;
1728 encoder
= to_drm_encoder(nv_encoder
);
1729 encoder
->possible_crtcs
= dcbe
->heads
;
1730 encoder
->possible_clones
= 0;
1731 drm_encoder_init(connector
->dev
, encoder
, &nv50_pior_func
, type
,
1732 "pior-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
1733 drm_encoder_helper_add(encoder
, &nv50_pior_help
);
1735 drm_connector_attach_encoder(connector
, encoder
);
1739 /******************************************************************************
1741 *****************************************************************************/
1744 nv50_disp_atomic_commit_core(struct drm_atomic_state
*state
, u32
*interlock
)
1746 struct nouveau_drm
*drm
= nouveau_drm(state
->dev
);
1747 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1748 struct nv50_core
*core
= disp
->core
;
1749 struct nv50_mstm
*mstm
;
1750 struct drm_encoder
*encoder
;
1752 NV_ATOMIC(drm
, "commit core %08x\n", interlock
[NV50_DISP_INTERLOCK_BASE
]);
1754 drm_for_each_encoder(encoder
, drm
->dev
) {
1755 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
1756 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
1757 if (mstm
&& mstm
->modified
)
1758 nv50_mstm_prepare(mstm
);
1762 core
->func
->ntfy_init(disp
->sync
, NV50_DISP_CORE_NTFY
);
1763 core
->func
->update(core
, interlock
, true);
1764 if (core
->func
->ntfy_wait_done(disp
->sync
, NV50_DISP_CORE_NTFY
,
1765 disp
->core
->chan
.base
.device
))
1766 NV_ERROR(drm
, "core notifier timeout\n");
1768 drm_for_each_encoder(encoder
, drm
->dev
) {
1769 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
1770 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
1771 if (mstm
&& mstm
->modified
)
1772 nv50_mstm_cleanup(mstm
);
1778 nv50_disp_atomic_commit_wndw(struct drm_atomic_state
*state
, u32
*interlock
)
1780 struct drm_plane_state
*new_plane_state
;
1781 struct drm_plane
*plane
;
1784 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
1785 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1786 if (interlock
[wndw
->interlock
.type
] & wndw
->interlock
.data
) {
1787 if (wndw
->func
->update
)
1788 wndw
->func
->update(wndw
, interlock
);
1794 nv50_disp_atomic_commit_tail(struct drm_atomic_state
*state
)
1796 struct drm_device
*dev
= state
->dev
;
1797 struct drm_crtc_state
*new_crtc_state
, *old_crtc_state
;
1798 struct drm_crtc
*crtc
;
1799 struct drm_plane_state
*new_plane_state
;
1800 struct drm_plane
*plane
;
1801 struct nouveau_drm
*drm
= nouveau_drm(dev
);
1802 struct nv50_disp
*disp
= nv50_disp(dev
);
1803 struct nv50_atom
*atom
= nv50_atom(state
);
1804 struct nv50_outp_atom
*outp
, *outt
;
1805 u32 interlock
[NV50_DISP_INTERLOCK__SIZE
] = {};
1808 NV_ATOMIC(drm
, "commit %d %d\n", atom
->lock_core
, atom
->flush_disable
);
1809 drm_atomic_helper_wait_for_fences(dev
, state
, false);
1810 drm_atomic_helper_wait_for_dependencies(state
);
1811 drm_atomic_helper_update_legacy_modeset_state(dev
, state
);
1813 if (atom
->lock_core
)
1814 mutex_lock(&disp
->mutex
);
1816 /* Disable head(s). */
1817 for_each_oldnew_crtc_in_state(state
, crtc
, old_crtc_state
, new_crtc_state
, i
) {
1818 struct nv50_head_atom
*asyh
= nv50_head_atom(new_crtc_state
);
1819 struct nv50_head
*head
= nv50_head(crtc
);
1821 NV_ATOMIC(drm
, "%s: clr %04x (set %04x)\n", crtc
->name
,
1822 asyh
->clr
.mask
, asyh
->set
.mask
);
1823 if (old_crtc_state
->active
&& !new_crtc_state
->active
)
1824 drm_crtc_vblank_off(crtc
);
1826 if (asyh
->clr
.mask
) {
1827 nv50_head_flush_clr(head
, asyh
, atom
->flush_disable
);
1828 interlock
[NV50_DISP_INTERLOCK_CORE
] |= 1;
1832 /* Disable plane(s). */
1833 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
1834 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
1835 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1837 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", plane
->name
,
1838 asyw
->clr
.mask
, asyw
->set
.mask
);
1839 if (!asyw
->clr
.mask
)
1842 nv50_wndw_flush_clr(wndw
, interlock
, atom
->flush_disable
, asyw
);
1845 /* Disable output path(s). */
1846 list_for_each_entry(outp
, &atom
->outp
, head
) {
1847 const struct drm_encoder_helper_funcs
*help
;
1848 struct drm_encoder
*encoder
;
1850 encoder
= outp
->encoder
;
1851 help
= encoder
->helper_private
;
1853 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", encoder
->name
,
1854 outp
->clr
.mask
, outp
->set
.mask
);
1856 if (outp
->clr
.mask
) {
1857 help
->disable(encoder
);
1858 interlock
[NV50_DISP_INTERLOCK_CORE
] |= 1;
1859 if (outp
->flush_disable
) {
1860 nv50_disp_atomic_commit_wndw(state
, interlock
);
1861 nv50_disp_atomic_commit_core(state
, interlock
);
1862 memset(interlock
, 0x00, sizeof(interlock
));
1867 /* Flush disable. */
1868 if (interlock
[NV50_DISP_INTERLOCK_CORE
]) {
1869 if (atom
->flush_disable
) {
1870 nv50_disp_atomic_commit_wndw(state
, interlock
);
1871 nv50_disp_atomic_commit_core(state
, interlock
);
1872 memset(interlock
, 0x00, sizeof(interlock
));
1876 /* Update output path(s). */
1877 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
1878 const struct drm_encoder_helper_funcs
*help
;
1879 struct drm_encoder
*encoder
;
1881 encoder
= outp
->encoder
;
1882 help
= encoder
->helper_private
;
1884 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", encoder
->name
,
1885 outp
->set
.mask
, outp
->clr
.mask
);
1887 if (outp
->set
.mask
) {
1888 help
->enable(encoder
);
1889 interlock
[NV50_DISP_INTERLOCK_CORE
] = 1;
1892 list_del(&outp
->head
);
1896 /* Update head(s). */
1897 for_each_oldnew_crtc_in_state(state
, crtc
, old_crtc_state
, new_crtc_state
, i
) {
1898 struct nv50_head_atom
*asyh
= nv50_head_atom(new_crtc_state
);
1899 struct nv50_head
*head
= nv50_head(crtc
);
1901 NV_ATOMIC(drm
, "%s: set %04x (clr %04x)\n", crtc
->name
,
1902 asyh
->set
.mask
, asyh
->clr
.mask
);
1904 if (asyh
->set
.mask
) {
1905 nv50_head_flush_set(head
, asyh
);
1906 interlock
[NV50_DISP_INTERLOCK_CORE
] = 1;
1909 if (new_crtc_state
->active
) {
1910 if (!old_crtc_state
->active
)
1911 drm_crtc_vblank_on(crtc
);
1912 if (new_crtc_state
->event
)
1913 drm_crtc_vblank_get(crtc
);
1917 /* Update plane(s). */
1918 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
1919 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
1920 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1922 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", plane
->name
,
1923 asyw
->set
.mask
, asyw
->clr
.mask
);
1924 if ( !asyw
->set
.mask
&&
1925 (!asyw
->clr
.mask
|| atom
->flush_disable
))
1928 nv50_wndw_flush_set(wndw
, interlock
, asyw
);
1932 nv50_disp_atomic_commit_wndw(state
, interlock
);
1934 if (interlock
[NV50_DISP_INTERLOCK_CORE
]) {
1935 if (interlock
[NV50_DISP_INTERLOCK_BASE
] ||
1936 interlock
[NV50_DISP_INTERLOCK_OVLY
] ||
1937 interlock
[NV50_DISP_INTERLOCK_WNDW
] ||
1938 !atom
->state
.legacy_cursor_update
)
1939 nv50_disp_atomic_commit_core(state
, interlock
);
1941 disp
->core
->func
->update(disp
->core
, interlock
, false);
1944 if (atom
->lock_core
)
1945 mutex_unlock(&disp
->mutex
);
1947 /* Wait for HW to signal completion. */
1948 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
1949 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
1950 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1951 int ret
= nv50_wndw_wait_armed(wndw
, asyw
);
1953 NV_ERROR(drm
, "%s: timeout\n", plane
->name
);
1956 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
1957 if (new_crtc_state
->event
) {
1958 unsigned long flags
;
1959 /* Get correct count/ts if racing with vblank irq */
1960 if (new_crtc_state
->active
)
1961 drm_crtc_accurate_vblank_count(crtc
);
1962 spin_lock_irqsave(&crtc
->dev
->event_lock
, flags
);
1963 drm_crtc_send_vblank_event(crtc
, new_crtc_state
->event
);
1964 spin_unlock_irqrestore(&crtc
->dev
->event_lock
, flags
);
1966 new_crtc_state
->event
= NULL
;
1967 if (new_crtc_state
->active
)
1968 drm_crtc_vblank_put(crtc
);
1972 drm_atomic_helper_commit_hw_done(state
);
1973 drm_atomic_helper_cleanup_planes(dev
, state
);
1974 drm_atomic_helper_commit_cleanup_done(state
);
1975 drm_atomic_state_put(state
);
1979 nv50_disp_atomic_commit_work(struct work_struct
*work
)
1981 struct drm_atomic_state
*state
=
1982 container_of(work
, typeof(*state
), commit_work
);
1983 nv50_disp_atomic_commit_tail(state
);
1987 nv50_disp_atomic_commit(struct drm_device
*dev
,
1988 struct drm_atomic_state
*state
, bool nonblock
)
1990 struct nouveau_drm
*drm
= nouveau_drm(dev
);
1991 struct drm_plane_state
*new_plane_state
;
1992 struct drm_plane
*plane
;
1993 struct drm_crtc
*crtc
;
1994 bool active
= false;
1997 ret
= pm_runtime_get_sync(dev
->dev
);
1998 if (ret
< 0 && ret
!= -EACCES
)
2001 ret
= drm_atomic_helper_setup_commit(state
, nonblock
);
2005 INIT_WORK(&state
->commit_work
, nv50_disp_atomic_commit_work
);
2007 ret
= drm_atomic_helper_prepare_planes(dev
, state
);
2012 ret
= drm_atomic_helper_wait_for_fences(dev
, state
, true);
2017 ret
= drm_atomic_helper_swap_state(state
, true);
2021 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
2022 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
2023 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
2025 if (asyw
->set
.image
)
2026 nv50_wndw_ntfy_enable(wndw
, asyw
);
2029 drm_atomic_state_get(state
);
2032 queue_work(system_unbound_wq
, &state
->commit_work
);
2034 nv50_disp_atomic_commit_tail(state
);
2036 drm_for_each_crtc(crtc
, dev
) {
2037 if (crtc
->state
->active
) {
2038 if (!drm
->have_disp_power_ref
) {
2039 drm
->have_disp_power_ref
= true;
2047 if (!active
&& drm
->have_disp_power_ref
) {
2048 pm_runtime_put_autosuspend(dev
->dev
);
2049 drm
->have_disp_power_ref
= false;
2054 drm_atomic_helper_cleanup_planes(dev
, state
);
2056 pm_runtime_put_autosuspend(dev
->dev
);
2060 static struct nv50_outp_atom
*
2061 nv50_disp_outp_atomic_add(struct nv50_atom
*atom
, struct drm_encoder
*encoder
)
2063 struct nv50_outp_atom
*outp
;
2065 list_for_each_entry(outp
, &atom
->outp
, head
) {
2066 if (outp
->encoder
== encoder
)
2070 outp
= kzalloc(sizeof(*outp
), GFP_KERNEL
);
2072 return ERR_PTR(-ENOMEM
);
2074 list_add(&outp
->head
, &atom
->outp
);
2075 outp
->encoder
= encoder
;
2080 nv50_disp_outp_atomic_check_clr(struct nv50_atom
*atom
,
2081 struct drm_connector_state
*old_connector_state
)
2083 struct drm_encoder
*encoder
= old_connector_state
->best_encoder
;
2084 struct drm_crtc_state
*old_crtc_state
, *new_crtc_state
;
2085 struct drm_crtc
*crtc
;
2086 struct nv50_outp_atom
*outp
;
2088 if (!(crtc
= old_connector_state
->crtc
))
2091 old_crtc_state
= drm_atomic_get_old_crtc_state(&atom
->state
, crtc
);
2092 new_crtc_state
= drm_atomic_get_new_crtc_state(&atom
->state
, crtc
);
2093 if (old_crtc_state
->active
&& drm_atomic_crtc_needs_modeset(new_crtc_state
)) {
2094 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
2096 return PTR_ERR(outp
);
2098 if (outp
->encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
2099 outp
->flush_disable
= true;
2100 atom
->flush_disable
= true;
2102 outp
->clr
.ctrl
= true;
2103 atom
->lock_core
= true;
2110 nv50_disp_outp_atomic_check_set(struct nv50_atom
*atom
,
2111 struct drm_connector_state
*connector_state
)
2113 struct drm_encoder
*encoder
= connector_state
->best_encoder
;
2114 struct drm_crtc_state
*new_crtc_state
;
2115 struct drm_crtc
*crtc
;
2116 struct nv50_outp_atom
*outp
;
2118 if (!(crtc
= connector_state
->crtc
))
2121 new_crtc_state
= drm_atomic_get_new_crtc_state(&atom
->state
, crtc
);
2122 if (new_crtc_state
->active
&& drm_atomic_crtc_needs_modeset(new_crtc_state
)) {
2123 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
2125 return PTR_ERR(outp
);
2127 outp
->set
.ctrl
= true;
2128 atom
->lock_core
= true;
2135 nv50_disp_atomic_check(struct drm_device
*dev
, struct drm_atomic_state
*state
)
2137 struct nv50_atom
*atom
= nv50_atom(state
);
2138 struct drm_connector_state
*old_connector_state
, *new_connector_state
;
2139 struct drm_connector
*connector
;
2140 struct drm_crtc_state
*new_crtc_state
;
2141 struct drm_crtc
*crtc
;
2144 /* We need to handle colour management on a per-plane basis. */
2145 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
2146 if (new_crtc_state
->color_mgmt_changed
) {
2147 ret
= drm_atomic_add_affected_planes(state
, crtc
);
2153 ret
= drm_atomic_helper_check(dev
, state
);
2157 for_each_oldnew_connector_in_state(state
, connector
, old_connector_state
, new_connector_state
, i
) {
2158 ret
= nv50_disp_outp_atomic_check_clr(atom
, old_connector_state
);
2162 ret
= nv50_disp_outp_atomic_check_set(atom
, new_connector_state
);
2167 ret
= drm_dp_mst_atomic_check(state
);
2175 nv50_disp_atomic_state_clear(struct drm_atomic_state
*state
)
2177 struct nv50_atom
*atom
= nv50_atom(state
);
2178 struct nv50_outp_atom
*outp
, *outt
;
2180 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
2181 list_del(&outp
->head
);
2185 drm_atomic_state_default_clear(state
);
2189 nv50_disp_atomic_state_free(struct drm_atomic_state
*state
)
2191 struct nv50_atom
*atom
= nv50_atom(state
);
2192 drm_atomic_state_default_release(&atom
->state
);
2196 static struct drm_atomic_state
*
2197 nv50_disp_atomic_state_alloc(struct drm_device
*dev
)
2199 struct nv50_atom
*atom
;
2200 if (!(atom
= kzalloc(sizeof(*atom
), GFP_KERNEL
)) ||
2201 drm_atomic_state_init(dev
, &atom
->state
) < 0) {
2205 INIT_LIST_HEAD(&atom
->outp
);
2206 return &atom
->state
;
2209 static const struct drm_mode_config_funcs
2211 .fb_create
= nouveau_user_framebuffer_create
,
2212 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
2213 .atomic_check
= nv50_disp_atomic_check
,
2214 .atomic_commit
= nv50_disp_atomic_commit
,
2215 .atomic_state_alloc
= nv50_disp_atomic_state_alloc
,
2216 .atomic_state_clear
= nv50_disp_atomic_state_clear
,
2217 .atomic_state_free
= nv50_disp_atomic_state_free
,
2220 /******************************************************************************
2222 *****************************************************************************/
2225 nv50_display_fini(struct drm_device
*dev
, bool suspend
)
2227 struct nouveau_encoder
*nv_encoder
;
2228 struct drm_encoder
*encoder
;
2229 struct drm_plane
*plane
;
2231 drm_for_each_plane(plane
, dev
) {
2232 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
2233 if (plane
->funcs
!= &nv50_wndw
)
2235 nv50_wndw_fini(wndw
);
2238 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
2239 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
2240 nv_encoder
= nouveau_encoder(encoder
);
2241 nv50_mstm_fini(nv_encoder
->dp
.mstm
);
2247 nv50_display_init(struct drm_device
*dev
, bool resume
, bool runtime
)
2249 struct nv50_core
*core
= nv50_disp(dev
)->core
;
2250 struct drm_encoder
*encoder
;
2251 struct drm_plane
*plane
;
2253 core
->func
->init(core
);
2255 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
2256 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
2257 struct nouveau_encoder
*nv_encoder
=
2258 nouveau_encoder(encoder
);
2259 nv50_mstm_init(nv_encoder
->dp
.mstm
);
2263 drm_for_each_plane(plane
, dev
) {
2264 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
2265 if (plane
->funcs
!= &nv50_wndw
)
2267 nv50_wndw_init(wndw
);
2274 nv50_display_destroy(struct drm_device
*dev
)
2276 struct nv50_disp
*disp
= nv50_disp(dev
);
2278 nv50_core_del(&disp
->core
);
2280 nouveau_bo_unmap(disp
->sync
);
2282 nouveau_bo_unpin(disp
->sync
);
2283 nouveau_bo_ref(NULL
, &disp
->sync
);
2285 nouveau_display(dev
)->priv
= NULL
;
2290 nv50_display_create(struct drm_device
*dev
)
2292 struct nvif_device
*device
= &nouveau_drm(dev
)->client
.device
;
2293 struct nouveau_drm
*drm
= nouveau_drm(dev
);
2294 struct dcb_table
*dcb
= &drm
->vbios
.dcb
;
2295 struct drm_connector
*connector
, *tmp
;
2296 struct nv50_disp
*disp
;
2297 struct dcb_output
*dcbe
;
2300 disp
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
2304 mutex_init(&disp
->mutex
);
2306 nouveau_display(dev
)->priv
= disp
;
2307 nouveau_display(dev
)->dtor
= nv50_display_destroy
;
2308 nouveau_display(dev
)->init
= nv50_display_init
;
2309 nouveau_display(dev
)->fini
= nv50_display_fini
;
2310 disp
->disp
= &nouveau_display(dev
)->disp
;
2311 dev
->mode_config
.funcs
= &nv50_disp_func
;
2312 dev
->mode_config
.quirk_addfb_prefer_xbgr_30bpp
= true;
2314 /* small shared memory area we use for notifiers and semaphores */
2315 ret
= nouveau_bo_new(&drm
->client
, 4096, 0x1000, TTM_PL_FLAG_VRAM
,
2316 0, 0x0000, NULL
, NULL
, &disp
->sync
);
2318 ret
= nouveau_bo_pin(disp
->sync
, TTM_PL_FLAG_VRAM
, true);
2320 ret
= nouveau_bo_map(disp
->sync
);
2322 nouveau_bo_unpin(disp
->sync
);
2325 nouveau_bo_ref(NULL
, &disp
->sync
);
2331 /* allocate master evo channel */
2332 ret
= nv50_core_new(drm
, &disp
->core
);
2336 /* create crtc objects to represent the hw heads */
2337 if (disp
->disp
->object
.oclass
>= GV100_DISP
)
2338 crtcs
= nvif_rd32(&device
->object
, 0x610060) & 0xff;
2340 if (disp
->disp
->object
.oclass
>= GF110_DISP
)
2341 crtcs
= nvif_rd32(&device
->object
, 0x612004) & 0xf;
2345 for (i
= 0; i
< fls(crtcs
); i
++) {
2346 if (!(crtcs
& (1 << i
)))
2348 ret
= nv50_head_create(dev
, i
);
2353 /* create encoder/connector objects based on VBIOS DCB table */
2354 for (i
= 0, dcbe
= &dcb
->entry
[0]; i
< dcb
->entries
; i
++, dcbe
++) {
2355 connector
= nouveau_connector_create(dev
, dcbe
);
2356 if (IS_ERR(connector
))
2359 if (dcbe
->location
== DCB_LOC_ON_CHIP
) {
2360 switch (dcbe
->type
) {
2361 case DCB_OUTPUT_TMDS
:
2362 case DCB_OUTPUT_LVDS
:
2364 ret
= nv50_sor_create(connector
, dcbe
);
2366 case DCB_OUTPUT_ANALOG
:
2367 ret
= nv50_dac_create(connector
, dcbe
);
2374 ret
= nv50_pior_create(connector
, dcbe
);
2378 NV_WARN(drm
, "failed to create encoder %d/%d/%d: %d\n",
2379 dcbe
->location
, dcbe
->type
,
2380 ffs(dcbe
->or) - 1, ret
);
2385 /* cull any connectors we created that don't have an encoder */
2386 list_for_each_entry_safe(connector
, tmp
, &dev
->mode_config
.connector_list
, head
) {
2387 if (connector
->encoder_ids
[0])
2390 NV_WARN(drm
, "%s has no encoders, removing\n",
2392 connector
->funcs
->destroy(connector
);
2395 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */
2396 dev
->vblank_disable_immediate
= true;
2400 nv50_display_destroy(dev
);