2 * Copyright © 2008 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Keith Packard <keithp@keithp.com>
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
30 #include <linux/export.h>
34 #include "drm_crtc_helper.h"
36 #include "intel_drv.h"
39 #include "drm_dp_helper.h"
41 #define DP_RECEIVER_CAP_SIZE 0xf
42 #define DP_LINK_STATUS_SIZE 6
43 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
45 #define DP_LINK_CONFIGURATION_SIZE 9
48 struct intel_encoder base
;
51 uint8_t link_configuration
[DP_LINK_CONFIGURATION_SIZE
];
53 enum hdmi_force_audio force_audio
;
58 uint8_t dpcd
[DP_RECEIVER_CAP_SIZE
];
59 struct i2c_adapter adapter
;
60 struct i2c_algo_dp_aux_data algo
;
63 int panel_power_up_delay
;
64 int panel_power_down_delay
;
65 int panel_power_cycle_delay
;
66 int backlight_on_delay
;
67 int backlight_off_delay
;
68 struct drm_display_mode
*panel_fixed_mode
; /* for eDP */
69 struct delayed_work panel_vdd_work
;
71 struct edid
*edid
; /* cached EDID for eDP */
76 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
77 * @intel_dp: DP struct
79 * If a CPU or PCH DP output is attached to an eDP panel, this function
80 * will return true, and false otherwise.
82 static bool is_edp(struct intel_dp
*intel_dp
)
84 return intel_dp
->base
.type
== INTEL_OUTPUT_EDP
;
88 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
89 * @intel_dp: DP struct
91 * Returns true if the given DP struct corresponds to a PCH DP port attached
92 * to an eDP panel, false otherwise. Helpful for determining whether we
93 * may need FDI resources for a given DP output or not.
95 static bool is_pch_edp(struct intel_dp
*intel_dp
)
97 return intel_dp
->is_pch_edp
;
101 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
102 * @intel_dp: DP struct
104 * Returns true if the given DP struct corresponds to a CPU eDP port.
106 static bool is_cpu_edp(struct intel_dp
*intel_dp
)
108 return is_edp(intel_dp
) && !is_pch_edp(intel_dp
);
111 static struct intel_dp
*enc_to_intel_dp(struct drm_encoder
*encoder
)
113 return container_of(encoder
, struct intel_dp
, base
.base
);
116 static struct intel_dp
*intel_attached_dp(struct drm_connector
*connector
)
118 return container_of(intel_attached_encoder(connector
),
119 struct intel_dp
, base
);
123 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
124 * @encoder: DRM encoder
126 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed
127 * by intel_display.c.
129 bool intel_encoder_is_pch_edp(struct drm_encoder
*encoder
)
131 struct intel_dp
*intel_dp
;
136 intel_dp
= enc_to_intel_dp(encoder
);
138 return is_pch_edp(intel_dp
);
141 static void intel_dp_start_link_train(struct intel_dp
*intel_dp
);
142 static void intel_dp_complete_link_train(struct intel_dp
*intel_dp
);
143 static void intel_dp_link_down(struct intel_dp
*intel_dp
);
146 intel_edp_link_config(struct intel_encoder
*intel_encoder
,
147 int *lane_num
, int *link_bw
)
149 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
151 *lane_num
= intel_dp
->lane_count
;
152 if (intel_dp
->link_bw
== DP_LINK_BW_1_62
)
154 else if (intel_dp
->link_bw
== DP_LINK_BW_2_7
)
159 intel_edp_target_clock(struct intel_encoder
*intel_encoder
,
160 struct drm_display_mode
*mode
)
162 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
164 if (intel_dp
->panel_fixed_mode
)
165 return intel_dp
->panel_fixed_mode
->clock
;
171 intel_dp_max_lane_count(struct intel_dp
*intel_dp
)
173 int max_lane_count
= intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & 0x1f;
174 switch (max_lane_count
) {
175 case 1: case 2: case 4:
180 return max_lane_count
;
184 intel_dp_max_link_bw(struct intel_dp
*intel_dp
)
186 int max_link_bw
= intel_dp
->dpcd
[DP_MAX_LINK_RATE
];
188 switch (max_link_bw
) {
189 case DP_LINK_BW_1_62
:
193 max_link_bw
= DP_LINK_BW_1_62
;
200 intel_dp_link_clock(uint8_t link_bw
)
202 if (link_bw
== DP_LINK_BW_2_7
)
209 * The units on the numbers in the next two are... bizarre. Examples will
210 * make it clearer; this one parallels an example in the eDP spec.
212 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
214 * 270000 * 1 * 8 / 10 == 216000
216 * The actual data capacity of that configuration is 2.16Gbit/s, so the
217 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz -
218 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
219 * 119000. At 18bpp that's 2142000 kilobits per second.
221 * Thus the strange-looking division by 10 in intel_dp_link_required, to
222 * get the result in decakilobits instead of kilobits.
226 intel_dp_link_required(int pixel_clock
, int bpp
)
228 return (pixel_clock
* bpp
+ 9) / 10;
232 intel_dp_max_data_rate(int max_link_clock
, int max_lanes
)
234 return (max_link_clock
* max_lanes
* 8) / 10;
238 intel_dp_adjust_dithering(struct intel_dp
*intel_dp
,
239 struct drm_display_mode
*mode
,
242 int max_link_clock
= intel_dp_link_clock(intel_dp_max_link_bw(intel_dp
));
243 int max_lanes
= intel_dp_max_lane_count(intel_dp
);
244 int max_rate
, mode_rate
;
246 mode_rate
= intel_dp_link_required(mode
->clock
, 24);
247 max_rate
= intel_dp_max_data_rate(max_link_clock
, max_lanes
);
249 if (mode_rate
> max_rate
) {
250 mode_rate
= intel_dp_link_required(mode
->clock
, 18);
251 if (mode_rate
> max_rate
)
256 |= INTEL_MODE_DP_FORCE_6BPC
;
265 intel_dp_mode_valid(struct drm_connector
*connector
,
266 struct drm_display_mode
*mode
)
268 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
270 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
271 if (mode
->hdisplay
> intel_dp
->panel_fixed_mode
->hdisplay
)
274 if (mode
->vdisplay
> intel_dp
->panel_fixed_mode
->vdisplay
)
278 if (!intel_dp_adjust_dithering(intel_dp
, mode
, false))
279 return MODE_CLOCK_HIGH
;
281 if (mode
->clock
< 10000)
282 return MODE_CLOCK_LOW
;
284 if (mode
->flags
& DRM_MODE_FLAG_DBLCLK
)
285 return MODE_H_ILLEGAL
;
291 pack_aux(uint8_t *src
, int src_bytes
)
298 for (i
= 0; i
< src_bytes
; i
++)
299 v
|= ((uint32_t) src
[i
]) << ((3-i
) * 8);
304 unpack_aux(uint32_t src
, uint8_t *dst
, int dst_bytes
)
309 for (i
= 0; i
< dst_bytes
; i
++)
310 dst
[i
] = src
>> ((3-i
) * 8);
313 /* hrawclock is 1/4 the FSB frequency */
315 intel_hrawclk(struct drm_device
*dev
)
317 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
320 clkcfg
= I915_READ(CLKCFG
);
321 switch (clkcfg
& CLKCFG_FSB_MASK
) {
330 case CLKCFG_FSB_1067
:
332 case CLKCFG_FSB_1333
:
334 /* these two are just a guess; one of them might be right */
335 case CLKCFG_FSB_1600
:
336 case CLKCFG_FSB_1600_ALT
:
343 static bool ironlake_edp_have_panel_power(struct intel_dp
*intel_dp
)
345 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
346 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
348 return (I915_READ(PCH_PP_STATUS
) & PP_ON
) != 0;
351 static bool ironlake_edp_have_panel_vdd(struct intel_dp
*intel_dp
)
353 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
354 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
356 return (I915_READ(PCH_PP_CONTROL
) & EDP_FORCE_VDD
) != 0;
360 intel_dp_check_edp(struct intel_dp
*intel_dp
)
362 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
363 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
365 if (!is_edp(intel_dp
))
367 if (!ironlake_edp_have_panel_power(intel_dp
) && !ironlake_edp_have_panel_vdd(intel_dp
)) {
368 WARN(1, "eDP powered off while attempting aux channel communication.\n");
369 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
370 I915_READ(PCH_PP_STATUS
),
371 I915_READ(PCH_PP_CONTROL
));
376 intel_dp_aux_ch(struct intel_dp
*intel_dp
,
377 uint8_t *send
, int send_bytes
,
378 uint8_t *recv
, int recv_size
)
380 uint32_t output_reg
= intel_dp
->output_reg
;
381 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
382 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
383 uint32_t ch_ctl
= output_reg
+ 0x10;
384 uint32_t ch_data
= ch_ctl
+ 4;
388 uint32_t aux_clock_divider
;
391 intel_dp_check_edp(intel_dp
);
392 /* The clock divider is based off the hrawclk,
393 * and would like to run at 2MHz. So, take the
394 * hrawclk value and divide by 2 and use that
396 * Note that PCH attached eDP panels should use a 125MHz input
399 if (is_cpu_edp(intel_dp
)) {
400 if (IS_GEN6(dev
) || IS_GEN7(dev
))
401 aux_clock_divider
= 200; /* SNB & IVB eDP input clock at 400Mhz */
403 aux_clock_divider
= 225; /* eDP input clock at 450Mhz */
404 } else if (HAS_PCH_SPLIT(dev
))
405 aux_clock_divider
= 63; /* IRL input clock fixed at 125Mhz */
407 aux_clock_divider
= intel_hrawclk(dev
) / 2;
414 /* Try to wait for any previous AUX channel activity */
415 for (try = 0; try < 3; try++) {
416 status
= I915_READ(ch_ctl
);
417 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
423 WARN(1, "dp_aux_ch not started status 0x%08x\n",
428 /* Must try at least 3 times according to DP spec */
429 for (try = 0; try < 5; try++) {
430 /* Load the send data into the aux channel data registers */
431 for (i
= 0; i
< send_bytes
; i
+= 4)
432 I915_WRITE(ch_data
+ i
,
433 pack_aux(send
+ i
, send_bytes
- i
));
435 /* Send the command and wait for it to complete */
437 DP_AUX_CH_CTL_SEND_BUSY
|
438 DP_AUX_CH_CTL_TIME_OUT_400us
|
439 (send_bytes
<< DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
) |
440 (precharge
<< DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT
) |
441 (aux_clock_divider
<< DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT
) |
443 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
444 DP_AUX_CH_CTL_RECEIVE_ERROR
);
446 status
= I915_READ(ch_ctl
);
447 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
452 /* Clear done status and any errors */
456 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
457 DP_AUX_CH_CTL_RECEIVE_ERROR
);
459 if (status
& (DP_AUX_CH_CTL_TIME_OUT_ERROR
|
460 DP_AUX_CH_CTL_RECEIVE_ERROR
))
462 if (status
& DP_AUX_CH_CTL_DONE
)
466 if ((status
& DP_AUX_CH_CTL_DONE
) == 0) {
467 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status
);
471 /* Check for timeout or receive error.
472 * Timeouts occur when the sink is not connected
474 if (status
& DP_AUX_CH_CTL_RECEIVE_ERROR
) {
475 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status
);
479 /* Timeouts occur when the device isn't connected, so they're
480 * "normal" -- don't fill the kernel log with these */
481 if (status
& DP_AUX_CH_CTL_TIME_OUT_ERROR
) {
482 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status
);
486 /* Unload any bytes sent back from the other side */
487 recv_bytes
= ((status
& DP_AUX_CH_CTL_MESSAGE_SIZE_MASK
) >>
488 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
);
489 if (recv_bytes
> recv_size
)
490 recv_bytes
= recv_size
;
492 for (i
= 0; i
< recv_bytes
; i
+= 4)
493 unpack_aux(I915_READ(ch_data
+ i
),
494 recv
+ i
, recv_bytes
- i
);
499 /* Write data to the aux channel in native mode */
501 intel_dp_aux_native_write(struct intel_dp
*intel_dp
,
502 uint16_t address
, uint8_t *send
, int send_bytes
)
509 intel_dp_check_edp(intel_dp
);
512 msg
[0] = AUX_NATIVE_WRITE
<< 4;
513 msg
[1] = address
>> 8;
514 msg
[2] = address
& 0xff;
515 msg
[3] = send_bytes
- 1;
516 memcpy(&msg
[4], send
, send_bytes
);
517 msg_bytes
= send_bytes
+ 4;
519 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
, &ack
, 1);
522 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
524 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
532 /* Write a single byte to the aux channel in native mode */
534 intel_dp_aux_native_write_1(struct intel_dp
*intel_dp
,
535 uint16_t address
, uint8_t byte
)
537 return intel_dp_aux_native_write(intel_dp
, address
, &byte
, 1);
540 /* read bytes from a native aux channel */
542 intel_dp_aux_native_read(struct intel_dp
*intel_dp
,
543 uint16_t address
, uint8_t *recv
, int recv_bytes
)
552 intel_dp_check_edp(intel_dp
);
553 msg
[0] = AUX_NATIVE_READ
<< 4;
554 msg
[1] = address
>> 8;
555 msg
[2] = address
& 0xff;
556 msg
[3] = recv_bytes
- 1;
559 reply_bytes
= recv_bytes
+ 1;
562 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
,
569 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
) {
570 memcpy(recv
, reply
+ 1, ret
- 1);
573 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
581 intel_dp_i2c_aux_ch(struct i2c_adapter
*adapter
, int mode
,
582 uint8_t write_byte
, uint8_t *read_byte
)
584 struct i2c_algo_dp_aux_data
*algo_data
= adapter
->algo_data
;
585 struct intel_dp
*intel_dp
= container_of(adapter
,
588 uint16_t address
= algo_data
->address
;
596 intel_dp_check_edp(intel_dp
);
597 /* Set up the command byte */
598 if (mode
& MODE_I2C_READ
)
599 msg
[0] = AUX_I2C_READ
<< 4;
601 msg
[0] = AUX_I2C_WRITE
<< 4;
603 if (!(mode
& MODE_I2C_STOP
))
604 msg
[0] |= AUX_I2C_MOT
<< 4;
606 msg
[1] = address
>> 8;
627 for (retry
= 0; retry
< 5; retry
++) {
628 ret
= intel_dp_aux_ch(intel_dp
,
632 DRM_DEBUG_KMS("aux_ch failed %d\n", ret
);
636 switch (reply
[0] & AUX_NATIVE_REPLY_MASK
) {
637 case AUX_NATIVE_REPLY_ACK
:
638 /* I2C-over-AUX Reply field is only valid
639 * when paired with AUX ACK.
642 case AUX_NATIVE_REPLY_NACK
:
643 DRM_DEBUG_KMS("aux_ch native nack\n");
645 case AUX_NATIVE_REPLY_DEFER
:
649 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
654 switch (reply
[0] & AUX_I2C_REPLY_MASK
) {
655 case AUX_I2C_REPLY_ACK
:
656 if (mode
== MODE_I2C_READ
) {
657 *read_byte
= reply
[1];
659 return reply_bytes
- 1;
660 case AUX_I2C_REPLY_NACK
:
661 DRM_DEBUG_KMS("aux_i2c nack\n");
663 case AUX_I2C_REPLY_DEFER
:
664 DRM_DEBUG_KMS("aux_i2c defer\n");
668 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply
[0]);
673 DRM_ERROR("too many retries, giving up\n");
677 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
);
678 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
);
681 intel_dp_i2c_init(struct intel_dp
*intel_dp
,
682 struct intel_connector
*intel_connector
, const char *name
)
686 DRM_DEBUG_KMS("i2c_init %s\n", name
);
687 intel_dp
->algo
.running
= false;
688 intel_dp
->algo
.address
= 0;
689 intel_dp
->algo
.aux_ch
= intel_dp_i2c_aux_ch
;
691 memset(&intel_dp
->adapter
, '\0', sizeof(intel_dp
->adapter
));
692 intel_dp
->adapter
.owner
= THIS_MODULE
;
693 intel_dp
->adapter
.class = I2C_CLASS_DDC
;
694 strncpy(intel_dp
->adapter
.name
, name
, sizeof(intel_dp
->adapter
.name
) - 1);
695 intel_dp
->adapter
.name
[sizeof(intel_dp
->adapter
.name
) - 1] = '\0';
696 intel_dp
->adapter
.algo_data
= &intel_dp
->algo
;
697 intel_dp
->adapter
.dev
.parent
= &intel_connector
->base
.kdev
;
699 ironlake_edp_panel_vdd_on(intel_dp
);
700 ret
= i2c_dp_aux_add_bus(&intel_dp
->adapter
);
701 ironlake_edp_panel_vdd_off(intel_dp
, false);
706 intel_dp_mode_fixup(struct drm_encoder
*encoder
,
707 const struct drm_display_mode
*mode
,
708 struct drm_display_mode
*adjusted_mode
)
710 struct drm_device
*dev
= encoder
->dev
;
711 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
712 int lane_count
, clock
;
713 int max_lane_count
= intel_dp_max_lane_count(intel_dp
);
714 int max_clock
= intel_dp_max_link_bw(intel_dp
) == DP_LINK_BW_2_7
? 1 : 0;
716 static int bws
[2] = { DP_LINK_BW_1_62
, DP_LINK_BW_2_7
};
718 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
719 intel_fixed_panel_mode(intel_dp
->panel_fixed_mode
, adjusted_mode
);
720 intel_pch_panel_fitting(dev
, DRM_MODE_SCALE_FULLSCREEN
,
721 mode
, adjusted_mode
);
724 if (adjusted_mode
->flags
& DRM_MODE_FLAG_DBLCLK
)
727 DRM_DEBUG_KMS("DP link computation with max lane count %i "
728 "max bw %02x pixel clock %iKHz\n",
729 max_lane_count
, bws
[max_clock
], adjusted_mode
->clock
);
731 if (!intel_dp_adjust_dithering(intel_dp
, adjusted_mode
, true))
734 bpp
= adjusted_mode
->private_flags
& INTEL_MODE_DP_FORCE_6BPC
? 18 : 24;
735 mode_rate
= intel_dp_link_required(adjusted_mode
->clock
, bpp
);
737 for (clock
= 0; clock
<= max_clock
; clock
++) {
738 for (lane_count
= 1; lane_count
<= max_lane_count
; lane_count
<<= 1) {
739 int link_avail
= intel_dp_max_data_rate(intel_dp_link_clock(bws
[clock
]), lane_count
);
741 if (mode_rate
<= link_avail
) {
742 intel_dp
->link_bw
= bws
[clock
];
743 intel_dp
->lane_count
= lane_count
;
744 adjusted_mode
->clock
= intel_dp_link_clock(intel_dp
->link_bw
);
745 DRM_DEBUG_KMS("DP link bw %02x lane "
746 "count %d clock %d bpp %d\n",
747 intel_dp
->link_bw
, intel_dp
->lane_count
,
748 adjusted_mode
->clock
, bpp
);
749 DRM_DEBUG_KMS("DP link bw required %i available %i\n",
750 mode_rate
, link_avail
);
759 struct intel_dp_m_n
{
768 intel_reduce_ratio(uint32_t *num
, uint32_t *den
)
770 while (*num
> 0xffffff || *den
> 0xffffff) {
777 intel_dp_compute_m_n(int bpp
,
781 struct intel_dp_m_n
*m_n
)
784 m_n
->gmch_m
= (pixel_clock
* bpp
) >> 3;
785 m_n
->gmch_n
= link_clock
* nlanes
;
786 intel_reduce_ratio(&m_n
->gmch_m
, &m_n
->gmch_n
);
787 m_n
->link_m
= pixel_clock
;
788 m_n
->link_n
= link_clock
;
789 intel_reduce_ratio(&m_n
->link_m
, &m_n
->link_n
);
793 intel_dp_set_m_n(struct drm_crtc
*crtc
, struct drm_display_mode
*mode
,
794 struct drm_display_mode
*adjusted_mode
)
796 struct drm_device
*dev
= crtc
->dev
;
797 struct intel_encoder
*encoder
;
798 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
799 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
801 struct intel_dp_m_n m_n
;
802 int pipe
= intel_crtc
->pipe
;
805 * Find the lane count in the intel_encoder private
807 for_each_encoder_on_crtc(dev
, crtc
, encoder
) {
808 struct intel_dp
*intel_dp
= enc_to_intel_dp(&encoder
->base
);
810 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
811 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
813 lane_count
= intel_dp
->lane_count
;
819 * Compute the GMCH and Link ratios. The '3' here is
820 * the number of bytes_per_pixel post-LUT, which we always
821 * set up for 8-bits of R/G/B, or 3 bytes total.
823 intel_dp_compute_m_n(intel_crtc
->bpp
, lane_count
,
824 mode
->clock
, adjusted_mode
->clock
, &m_n
);
826 if (HAS_PCH_SPLIT(dev
)) {
827 I915_WRITE(TRANSDATA_M1(pipe
),
828 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
830 I915_WRITE(TRANSDATA_N1(pipe
), m_n
.gmch_n
);
831 I915_WRITE(TRANSDPLINK_M1(pipe
), m_n
.link_m
);
832 I915_WRITE(TRANSDPLINK_N1(pipe
), m_n
.link_n
);
834 I915_WRITE(PIPE_GMCH_DATA_M(pipe
),
835 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
837 I915_WRITE(PIPE_GMCH_DATA_N(pipe
), m_n
.gmch_n
);
838 I915_WRITE(PIPE_DP_LINK_M(pipe
), m_n
.link_m
);
839 I915_WRITE(PIPE_DP_LINK_N(pipe
), m_n
.link_n
);
843 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
);
844 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
);
847 intel_dp_mode_set(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
,
848 struct drm_display_mode
*adjusted_mode
)
850 struct drm_device
*dev
= encoder
->dev
;
851 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
852 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
853 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
854 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
856 /* Turn on the eDP PLL if needed */
857 if (is_edp(intel_dp
)) {
858 if (!is_pch_edp(intel_dp
))
859 ironlake_edp_pll_on(encoder
);
861 ironlake_edp_pll_off(encoder
);
865 * There are four kinds of DP registers:
872 * IBX PCH and CPU are the same for almost everything,
873 * except that the CPU DP PLL is configured in this
876 * CPT PCH is quite different, having many bits moved
877 * to the TRANS_DP_CTL register instead. That
878 * configuration happens (oddly) in ironlake_pch_enable
881 /* Preserve the BIOS-computed detected bit. This is
882 * supposed to be read-only.
884 intel_dp
->DP
= I915_READ(intel_dp
->output_reg
) & DP_DETECTED
;
885 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
887 /* Handle DP bits in common between all three register formats */
889 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
891 switch (intel_dp
->lane_count
) {
893 intel_dp
->DP
|= DP_PORT_WIDTH_1
;
896 intel_dp
->DP
|= DP_PORT_WIDTH_2
;
899 intel_dp
->DP
|= DP_PORT_WIDTH_4
;
902 if (intel_dp
->has_audio
) {
903 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
904 pipe_name(intel_crtc
->pipe
));
905 intel_dp
->DP
|= DP_AUDIO_OUTPUT_ENABLE
;
906 intel_write_eld(encoder
, adjusted_mode
);
908 memset(intel_dp
->link_configuration
, 0, DP_LINK_CONFIGURATION_SIZE
);
909 intel_dp
->link_configuration
[0] = intel_dp
->link_bw
;
910 intel_dp
->link_configuration
[1] = intel_dp
->lane_count
;
911 intel_dp
->link_configuration
[8] = DP_SET_ANSI_8B10B
;
913 * Check for DPCD version > 1.1 and enhanced framing support
915 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
916 (intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & DP_ENHANCED_FRAME_CAP
)) {
917 intel_dp
->link_configuration
[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
920 /* Split out the IBX/CPU vs CPT settings */
922 if (is_cpu_edp(intel_dp
) && IS_GEN7(dev
)) {
923 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
924 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
925 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
926 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
927 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
929 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
930 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
932 intel_dp
->DP
|= intel_crtc
->pipe
<< 29;
934 /* don't miss out required setting for eDP */
935 intel_dp
->DP
|= DP_PLL_ENABLE
;
936 if (adjusted_mode
->clock
< 200000)
937 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
939 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
940 } else if (!HAS_PCH_CPT(dev
) || is_cpu_edp(intel_dp
)) {
941 intel_dp
->DP
|= intel_dp
->color_range
;
943 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
944 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
945 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
946 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
947 intel_dp
->DP
|= DP_LINK_TRAIN_OFF
;
949 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
950 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
952 if (intel_crtc
->pipe
== 1)
953 intel_dp
->DP
|= DP_PIPEB_SELECT
;
955 if (is_cpu_edp(intel_dp
)) {
956 /* don't miss out required setting for eDP */
957 intel_dp
->DP
|= DP_PLL_ENABLE
;
958 if (adjusted_mode
->clock
< 200000)
959 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
961 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
964 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
968 #define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
969 #define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
971 #define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
972 #define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
974 #define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
975 #define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
977 static void ironlake_wait_panel_status(struct intel_dp
*intel_dp
,
981 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
982 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
984 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
986 I915_READ(PCH_PP_STATUS
),
987 I915_READ(PCH_PP_CONTROL
));
989 if (_wait_for((I915_READ(PCH_PP_STATUS
) & mask
) == value
, 5000, 10)) {
990 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
991 I915_READ(PCH_PP_STATUS
),
992 I915_READ(PCH_PP_CONTROL
));
996 static void ironlake_wait_panel_on(struct intel_dp
*intel_dp
)
998 DRM_DEBUG_KMS("Wait for panel power on\n");
999 ironlake_wait_panel_status(intel_dp
, IDLE_ON_MASK
, IDLE_ON_VALUE
);
1002 static void ironlake_wait_panel_off(struct intel_dp
*intel_dp
)
1004 DRM_DEBUG_KMS("Wait for panel power off time\n");
1005 ironlake_wait_panel_status(intel_dp
, IDLE_OFF_MASK
, IDLE_OFF_VALUE
);
1008 static void ironlake_wait_panel_power_cycle(struct intel_dp
*intel_dp
)
1010 DRM_DEBUG_KMS("Wait for panel power cycle\n");
1011 ironlake_wait_panel_status(intel_dp
, IDLE_CYCLE_MASK
, IDLE_CYCLE_VALUE
);
1015 /* Read the current pp_control value, unlocking the register if it
1019 static u32
ironlake_get_pp_control(struct drm_i915_private
*dev_priv
)
1021 u32 control
= I915_READ(PCH_PP_CONTROL
);
1023 control
&= ~PANEL_UNLOCK_MASK
;
1024 control
|= PANEL_UNLOCK_REGS
;
1028 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
)
1030 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1031 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1034 if (!is_edp(intel_dp
))
1036 DRM_DEBUG_KMS("Turn eDP VDD on\n");
1038 WARN(intel_dp
->want_panel_vdd
,
1039 "eDP VDD already requested on\n");
1041 intel_dp
->want_panel_vdd
= true;
1043 if (ironlake_edp_have_panel_vdd(intel_dp
)) {
1044 DRM_DEBUG_KMS("eDP VDD already on\n");
1048 if (!ironlake_edp_have_panel_power(intel_dp
))
1049 ironlake_wait_panel_power_cycle(intel_dp
);
1051 pp
= ironlake_get_pp_control(dev_priv
);
1052 pp
|= EDP_FORCE_VDD
;
1053 I915_WRITE(PCH_PP_CONTROL
, pp
);
1054 POSTING_READ(PCH_PP_CONTROL
);
1055 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1056 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1059 * If the panel wasn't on, delay before accessing aux channel
1061 if (!ironlake_edp_have_panel_power(intel_dp
)) {
1062 DRM_DEBUG_KMS("eDP was not running\n");
1063 msleep(intel_dp
->panel_power_up_delay
);
1067 static void ironlake_panel_vdd_off_sync(struct intel_dp
*intel_dp
)
1069 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1070 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1073 if (!intel_dp
->want_panel_vdd
&& ironlake_edp_have_panel_vdd(intel_dp
)) {
1074 pp
= ironlake_get_pp_control(dev_priv
);
1075 pp
&= ~EDP_FORCE_VDD
;
1076 I915_WRITE(PCH_PP_CONTROL
, pp
);
1077 POSTING_READ(PCH_PP_CONTROL
);
1079 /* Make sure sequencer is idle before allowing subsequent activity */
1080 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1081 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1083 msleep(intel_dp
->panel_power_down_delay
);
1087 static void ironlake_panel_vdd_work(struct work_struct
*__work
)
1089 struct intel_dp
*intel_dp
= container_of(to_delayed_work(__work
),
1090 struct intel_dp
, panel_vdd_work
);
1091 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1093 mutex_lock(&dev
->mode_config
.mutex
);
1094 ironlake_panel_vdd_off_sync(intel_dp
);
1095 mutex_unlock(&dev
->mode_config
.mutex
);
1098 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
)
1100 if (!is_edp(intel_dp
))
1103 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp
->want_panel_vdd
);
1104 WARN(!intel_dp
->want_panel_vdd
, "eDP VDD not forced on");
1106 intel_dp
->want_panel_vdd
= false;
1109 ironlake_panel_vdd_off_sync(intel_dp
);
1112 * Queue the timer to fire a long
1113 * time from now (relative to the power down delay)
1114 * to keep the panel power up across a sequence of operations
1116 schedule_delayed_work(&intel_dp
->panel_vdd_work
,
1117 msecs_to_jiffies(intel_dp
->panel_power_cycle_delay
* 5));
1121 static void ironlake_edp_panel_on(struct intel_dp
*intel_dp
)
1123 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1124 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1127 if (!is_edp(intel_dp
))
1130 DRM_DEBUG_KMS("Turn eDP power on\n");
1132 if (ironlake_edp_have_panel_power(intel_dp
)) {
1133 DRM_DEBUG_KMS("eDP power already on\n");
1137 ironlake_wait_panel_power_cycle(intel_dp
);
1139 pp
= ironlake_get_pp_control(dev_priv
);
1141 /* ILK workaround: disable reset around power sequence */
1142 pp
&= ~PANEL_POWER_RESET
;
1143 I915_WRITE(PCH_PP_CONTROL
, pp
);
1144 POSTING_READ(PCH_PP_CONTROL
);
1147 pp
|= POWER_TARGET_ON
;
1149 pp
|= PANEL_POWER_RESET
;
1151 I915_WRITE(PCH_PP_CONTROL
, pp
);
1152 POSTING_READ(PCH_PP_CONTROL
);
1154 ironlake_wait_panel_on(intel_dp
);
1157 pp
|= PANEL_POWER_RESET
; /* restore panel reset bit */
1158 I915_WRITE(PCH_PP_CONTROL
, pp
);
1159 POSTING_READ(PCH_PP_CONTROL
);
1163 static void ironlake_edp_panel_off(struct intel_dp
*intel_dp
)
1165 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1166 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1169 if (!is_edp(intel_dp
))
1172 DRM_DEBUG_KMS("Turn eDP power off\n");
1174 WARN(!intel_dp
->want_panel_vdd
, "Need VDD to turn off panel\n");
1176 pp
= ironlake_get_pp_control(dev_priv
);
1177 /* We need to switch off panel power _and_ force vdd, for otherwise some
1178 * panels get very unhappy and cease to work. */
1179 pp
&= ~(POWER_TARGET_ON
| EDP_FORCE_VDD
| PANEL_POWER_RESET
| EDP_BLC_ENABLE
);
1180 I915_WRITE(PCH_PP_CONTROL
, pp
);
1181 POSTING_READ(PCH_PP_CONTROL
);
1183 intel_dp
->want_panel_vdd
= false;
1185 ironlake_wait_panel_off(intel_dp
);
1188 static void ironlake_edp_backlight_on(struct intel_dp
*intel_dp
)
1190 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1191 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1194 if (!is_edp(intel_dp
))
1197 DRM_DEBUG_KMS("\n");
1199 * If we enable the backlight right away following a panel power
1200 * on, we may see slight flicker as the panel syncs with the eDP
1201 * link. So delay a bit to make sure the image is solid before
1202 * allowing it to appear.
1204 msleep(intel_dp
->backlight_on_delay
);
1205 pp
= ironlake_get_pp_control(dev_priv
);
1206 pp
|= EDP_BLC_ENABLE
;
1207 I915_WRITE(PCH_PP_CONTROL
, pp
);
1208 POSTING_READ(PCH_PP_CONTROL
);
1211 static void ironlake_edp_backlight_off(struct intel_dp
*intel_dp
)
1213 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1214 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1217 if (!is_edp(intel_dp
))
1220 DRM_DEBUG_KMS("\n");
1221 pp
= ironlake_get_pp_control(dev_priv
);
1222 pp
&= ~EDP_BLC_ENABLE
;
1223 I915_WRITE(PCH_PP_CONTROL
, pp
);
1224 POSTING_READ(PCH_PP_CONTROL
);
1225 msleep(intel_dp
->backlight_off_delay
);
1228 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
)
1230 struct drm_device
*dev
= encoder
->dev
;
1231 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1234 DRM_DEBUG_KMS("\n");
1235 dpa_ctl
= I915_READ(DP_A
);
1236 dpa_ctl
|= DP_PLL_ENABLE
;
1237 I915_WRITE(DP_A
, dpa_ctl
);
1242 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
)
1244 struct drm_device
*dev
= encoder
->dev
;
1245 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1248 dpa_ctl
= I915_READ(DP_A
);
1249 dpa_ctl
&= ~DP_PLL_ENABLE
;
1250 I915_WRITE(DP_A
, dpa_ctl
);
1255 /* If the sink supports it, try to set the power state appropriately */
1256 static void intel_dp_sink_dpms(struct intel_dp
*intel_dp
, int mode
)
1260 /* Should have a valid DPCD by this point */
1261 if (intel_dp
->dpcd
[DP_DPCD_REV
] < 0x11)
1264 if (mode
!= DRM_MODE_DPMS_ON
) {
1265 ret
= intel_dp_aux_native_write_1(intel_dp
, DP_SET_POWER
,
1268 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1271 * When turning on, we need to retry for 1ms to give the sink
1274 for (i
= 0; i
< 3; i
++) {
1275 ret
= intel_dp_aux_native_write_1(intel_dp
,
1285 static void intel_dp_prepare(struct drm_encoder
*encoder
)
1287 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1290 /* Make sure the panel is off before trying to change the mode. But also
1291 * ensure that we have vdd while we switch off the panel. */
1292 ironlake_edp_panel_vdd_on(intel_dp
);
1293 ironlake_edp_backlight_off(intel_dp
);
1294 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1295 ironlake_edp_panel_off(intel_dp
);
1296 intel_dp_link_down(intel_dp
);
1299 static void intel_dp_commit(struct drm_encoder
*encoder
)
1301 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1302 struct drm_device
*dev
= encoder
->dev
;
1303 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1305 ironlake_edp_panel_vdd_on(intel_dp
);
1306 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1307 intel_dp_start_link_train(intel_dp
);
1308 ironlake_edp_panel_on(intel_dp
);
1309 ironlake_edp_panel_vdd_off(intel_dp
, true);
1310 intel_dp_complete_link_train(intel_dp
);
1311 ironlake_edp_backlight_on(intel_dp
);
1313 intel_dp
->dpms_mode
= DRM_MODE_DPMS_ON
;
1315 if (HAS_PCH_CPT(dev
))
1316 intel_cpt_verify_modeset(dev
, intel_crtc
->pipe
);
1320 intel_dp_dpms(struct drm_encoder
*encoder
, int mode
)
1322 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1323 struct drm_device
*dev
= encoder
->dev
;
1324 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1325 uint32_t dp_reg
= I915_READ(intel_dp
->output_reg
);
1327 if (mode
!= DRM_MODE_DPMS_ON
) {
1328 /* Switching the panel off requires vdd. */
1329 ironlake_edp_panel_vdd_on(intel_dp
);
1330 ironlake_edp_backlight_off(intel_dp
);
1331 intel_dp_sink_dpms(intel_dp
, mode
);
1332 ironlake_edp_panel_off(intel_dp
);
1333 intel_dp_link_down(intel_dp
);
1335 if (is_cpu_edp(intel_dp
))
1336 ironlake_edp_pll_off(encoder
);
1338 if (is_cpu_edp(intel_dp
))
1339 ironlake_edp_pll_on(encoder
);
1341 ironlake_edp_panel_vdd_on(intel_dp
);
1342 intel_dp_sink_dpms(intel_dp
, mode
);
1343 if (!(dp_reg
& DP_PORT_EN
)) {
1344 intel_dp_start_link_train(intel_dp
);
1345 ironlake_edp_panel_on(intel_dp
);
1346 ironlake_edp_panel_vdd_off(intel_dp
, true);
1347 intel_dp_complete_link_train(intel_dp
);
1349 ironlake_edp_panel_vdd_off(intel_dp
, false);
1350 ironlake_edp_backlight_on(intel_dp
);
1352 intel_dp
->dpms_mode
= mode
;
1356 * Native read with retry for link status and receiver capability reads for
1357 * cases where the sink may still be asleep.
1360 intel_dp_aux_native_read_retry(struct intel_dp
*intel_dp
, uint16_t address
,
1361 uint8_t *recv
, int recv_bytes
)
1366 * Sinks are *supposed* to come up within 1ms from an off state,
1367 * but we're also supposed to retry 3 times per the spec.
1369 for (i
= 0; i
< 3; i
++) {
1370 ret
= intel_dp_aux_native_read(intel_dp
, address
, recv
,
1372 if (ret
== recv_bytes
)
1381 * Fetch AUX CH registers 0x202 - 0x207 which contain
1382 * link status information
1385 intel_dp_get_link_status(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1387 return intel_dp_aux_native_read_retry(intel_dp
,
1390 DP_LINK_STATUS_SIZE
);
1394 intel_dp_link_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1397 return link_status
[r
- DP_LANE0_1_STATUS
];
1401 intel_get_adjust_request_voltage(uint8_t adjust_request
[2],
1404 int s
= ((lane
& 1) ?
1405 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT
:
1406 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT
);
1407 uint8_t l
= adjust_request
[lane
>>1];
1409 return ((l
>> s
) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT
;
1413 intel_get_adjust_request_pre_emphasis(uint8_t adjust_request
[2],
1416 int s
= ((lane
& 1) ?
1417 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT
:
1418 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT
);
1419 uint8_t l
= adjust_request
[lane
>>1];
1421 return ((l
>> s
) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT
;
1426 static char *voltage_names
[] = {
1427 "0.4V", "0.6V", "0.8V", "1.2V"
1429 static char *pre_emph_names
[] = {
1430 "0dB", "3.5dB", "6dB", "9.5dB"
1432 static char *link_train_names
[] = {
1433 "pattern 1", "pattern 2", "idle", "off"
1438 * These are source-specific values; current Intel hardware supports
1439 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1443 intel_dp_voltage_max(struct intel_dp
*intel_dp
)
1445 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1447 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
))
1448 return DP_TRAIN_VOLTAGE_SWING_800
;
1449 else if (HAS_PCH_CPT(dev
) && !is_cpu_edp(intel_dp
))
1450 return DP_TRAIN_VOLTAGE_SWING_1200
;
1452 return DP_TRAIN_VOLTAGE_SWING_800
;
1456 intel_dp_pre_emphasis_max(struct intel_dp
*intel_dp
, uint8_t voltage_swing
)
1458 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1460 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1461 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1462 case DP_TRAIN_VOLTAGE_SWING_400
:
1463 return DP_TRAIN_PRE_EMPHASIS_6
;
1464 case DP_TRAIN_VOLTAGE_SWING_600
:
1465 case DP_TRAIN_VOLTAGE_SWING_800
:
1466 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1468 return DP_TRAIN_PRE_EMPHASIS_0
;
1471 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1472 case DP_TRAIN_VOLTAGE_SWING_400
:
1473 return DP_TRAIN_PRE_EMPHASIS_6
;
1474 case DP_TRAIN_VOLTAGE_SWING_600
:
1475 return DP_TRAIN_PRE_EMPHASIS_6
;
1476 case DP_TRAIN_VOLTAGE_SWING_800
:
1477 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1478 case DP_TRAIN_VOLTAGE_SWING_1200
:
1480 return DP_TRAIN_PRE_EMPHASIS_0
;
1486 intel_get_adjust_train(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1491 uint8_t *adjust_request
= link_status
+ (DP_ADJUST_REQUEST_LANE0_1
- DP_LANE0_1_STATUS
);
1492 uint8_t voltage_max
;
1493 uint8_t preemph_max
;
1495 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1496 uint8_t this_v
= intel_get_adjust_request_voltage(adjust_request
, lane
);
1497 uint8_t this_p
= intel_get_adjust_request_pre_emphasis(adjust_request
, lane
);
1505 voltage_max
= intel_dp_voltage_max(intel_dp
);
1506 if (v
>= voltage_max
)
1507 v
= voltage_max
| DP_TRAIN_MAX_SWING_REACHED
;
1509 preemph_max
= intel_dp_pre_emphasis_max(intel_dp
, v
);
1510 if (p
>= preemph_max
)
1511 p
= preemph_max
| DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
1513 for (lane
= 0; lane
< 4; lane
++)
1514 intel_dp
->train_set
[lane
] = v
| p
;
1518 intel_dp_signal_levels(uint8_t train_set
)
1520 uint32_t signal_levels
= 0;
1522 switch (train_set
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1523 case DP_TRAIN_VOLTAGE_SWING_400
:
1525 signal_levels
|= DP_VOLTAGE_0_4
;
1527 case DP_TRAIN_VOLTAGE_SWING_600
:
1528 signal_levels
|= DP_VOLTAGE_0_6
;
1530 case DP_TRAIN_VOLTAGE_SWING_800
:
1531 signal_levels
|= DP_VOLTAGE_0_8
;
1533 case DP_TRAIN_VOLTAGE_SWING_1200
:
1534 signal_levels
|= DP_VOLTAGE_1_2
;
1537 switch (train_set
& DP_TRAIN_PRE_EMPHASIS_MASK
) {
1538 case DP_TRAIN_PRE_EMPHASIS_0
:
1540 signal_levels
|= DP_PRE_EMPHASIS_0
;
1542 case DP_TRAIN_PRE_EMPHASIS_3_5
:
1543 signal_levels
|= DP_PRE_EMPHASIS_3_5
;
1545 case DP_TRAIN_PRE_EMPHASIS_6
:
1546 signal_levels
|= DP_PRE_EMPHASIS_6
;
1548 case DP_TRAIN_PRE_EMPHASIS_9_5
:
1549 signal_levels
|= DP_PRE_EMPHASIS_9_5
;
1552 return signal_levels
;
1555 /* Gen6's DP voltage swing and pre-emphasis control */
1557 intel_gen6_edp_signal_levels(uint8_t train_set
)
1559 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1560 DP_TRAIN_PRE_EMPHASIS_MASK
);
1561 switch (signal_levels
) {
1562 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1563 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1564 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1565 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1566 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B
;
1567 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1568 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_6
:
1569 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B
;
1570 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1571 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1572 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B
;
1573 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1574 case DP_TRAIN_VOLTAGE_SWING_1200
| DP_TRAIN_PRE_EMPHASIS_0
:
1575 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B
;
1577 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1578 "0x%x\n", signal_levels
);
1579 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1583 /* Gen7's DP voltage swing and pre-emphasis control */
1585 intel_gen7_edp_signal_levels(uint8_t train_set
)
1587 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1588 DP_TRAIN_PRE_EMPHASIS_MASK
);
1589 switch (signal_levels
) {
1590 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1591 return EDP_LINK_TRAIN_400MV_0DB_IVB
;
1592 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1593 return EDP_LINK_TRAIN_400MV_3_5DB_IVB
;
1594 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1595 return EDP_LINK_TRAIN_400MV_6DB_IVB
;
1597 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1598 return EDP_LINK_TRAIN_600MV_0DB_IVB
;
1599 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1600 return EDP_LINK_TRAIN_600MV_3_5DB_IVB
;
1602 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1603 return EDP_LINK_TRAIN_800MV_0DB_IVB
;
1604 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1605 return EDP_LINK_TRAIN_800MV_3_5DB_IVB
;
1608 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1609 "0x%x\n", signal_levels
);
1610 return EDP_LINK_TRAIN_500MV_0DB_IVB
;
1615 intel_get_lane_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1618 int s
= (lane
& 1) * 4;
1619 uint8_t l
= link_status
[lane
>>1];
1621 return (l
>> s
) & 0xf;
1624 /* Check for clock recovery is done on all channels */
1626 intel_clock_recovery_ok(uint8_t link_status
[DP_LINK_STATUS_SIZE
], int lane_count
)
1629 uint8_t lane_status
;
1631 for (lane
= 0; lane
< lane_count
; lane
++) {
1632 lane_status
= intel_get_lane_status(link_status
, lane
);
1633 if ((lane_status
& DP_LANE_CR_DONE
) == 0)
1639 /* Check to see if channel eq is done on all channels */
1640 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1641 DP_LANE_CHANNEL_EQ_DONE|\
1642 DP_LANE_SYMBOL_LOCKED)
1644 intel_channel_eq_ok(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1647 uint8_t lane_status
;
1650 lane_align
= intel_dp_link_status(link_status
,
1651 DP_LANE_ALIGN_STATUS_UPDATED
);
1652 if ((lane_align
& DP_INTERLANE_ALIGN_DONE
) == 0)
1654 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1655 lane_status
= intel_get_lane_status(link_status
, lane
);
1656 if ((lane_status
& CHANNEL_EQ_BITS
) != CHANNEL_EQ_BITS
)
1663 intel_dp_set_link_train(struct intel_dp
*intel_dp
,
1664 uint32_t dp_reg_value
,
1665 uint8_t dp_train_pat
)
1667 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1668 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1671 I915_WRITE(intel_dp
->output_reg
, dp_reg_value
);
1672 POSTING_READ(intel_dp
->output_reg
);
1674 intel_dp_aux_native_write_1(intel_dp
,
1675 DP_TRAINING_PATTERN_SET
,
1678 ret
= intel_dp_aux_native_write(intel_dp
,
1679 DP_TRAINING_LANE0_SET
,
1680 intel_dp
->train_set
,
1681 intel_dp
->lane_count
);
1682 if (ret
!= intel_dp
->lane_count
)
1688 /* Enable corresponding port and start training pattern 1 */
1690 intel_dp_start_link_train(struct intel_dp
*intel_dp
)
1692 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1693 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1694 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1697 bool clock_recovery
= false;
1698 int voltage_tries
, loop_tries
;
1700 uint32_t DP
= intel_dp
->DP
;
1703 * On CPT we have to enable the port in training pattern 1, which
1704 * will happen below in intel_dp_set_link_train. Otherwise, enable
1705 * the port and wait for it to become active.
1707 if (!HAS_PCH_CPT(dev
)) {
1708 I915_WRITE(intel_dp
->output_reg
, intel_dp
->DP
);
1709 POSTING_READ(intel_dp
->output_reg
);
1710 intel_wait_for_vblank(dev
, intel_crtc
->pipe
);
1713 /* Write the link configuration data */
1714 intel_dp_aux_native_write(intel_dp
, DP_LINK_BW_SET
,
1715 intel_dp
->link_configuration
,
1716 DP_LINK_CONFIGURATION_SIZE
);
1720 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1721 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1723 DP
&= ~DP_LINK_TRAIN_MASK
;
1724 memset(intel_dp
->train_set
, 0, 4);
1728 clock_recovery
= false;
1730 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1731 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1732 uint32_t signal_levels
;
1735 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1736 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1737 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1738 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1739 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1740 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1742 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1743 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels
);
1744 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1747 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1748 reg
= DP
| DP_LINK_TRAIN_PAT_1_CPT
;
1750 reg
= DP
| DP_LINK_TRAIN_PAT_1
;
1752 if (!intel_dp_set_link_train(intel_dp
, reg
,
1753 DP_TRAINING_PATTERN_1
|
1754 DP_LINK_SCRAMBLING_DISABLE
))
1756 /* Set training pattern 1 */
1759 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
1760 DRM_ERROR("failed to get link status\n");
1764 if (intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1765 DRM_DEBUG_KMS("clock recovery OK\n");
1766 clock_recovery
= true;
1770 /* Check to see if we've tried the max voltage */
1771 for (i
= 0; i
< intel_dp
->lane_count
; i
++)
1772 if ((intel_dp
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
1774 if (i
== intel_dp
->lane_count
&& voltage_tries
== 5) {
1776 if (loop_tries
== 5) {
1777 DRM_DEBUG_KMS("too many full retries, give up\n");
1780 memset(intel_dp
->train_set
, 0, 4);
1785 /* Check to see if we've tried the same voltage 5 times */
1786 if ((intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
1788 if (voltage_tries
== 5) {
1789 DRM_DEBUG_KMS("too many voltage retries, give up\n");
1794 voltage
= intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
1796 /* Compute new intel_dp->train_set as requested by target */
1797 intel_get_adjust_train(intel_dp
, link_status
);
1804 intel_dp_complete_link_train(struct intel_dp
*intel_dp
)
1806 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1807 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1808 bool channel_eq
= false;
1809 int tries
, cr_tries
;
1811 uint32_t DP
= intel_dp
->DP
;
1813 /* channel equalization */
1818 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1819 uint32_t signal_levels
;
1820 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1823 DRM_ERROR("failed to train DP, aborting\n");
1824 intel_dp_link_down(intel_dp
);
1828 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1829 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1830 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1831 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1832 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1833 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1835 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1836 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1839 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1840 reg
= DP
| DP_LINK_TRAIN_PAT_2_CPT
;
1842 reg
= DP
| DP_LINK_TRAIN_PAT_2
;
1844 /* channel eq pattern */
1845 if (!intel_dp_set_link_train(intel_dp
, reg
,
1846 DP_TRAINING_PATTERN_2
|
1847 DP_LINK_SCRAMBLING_DISABLE
))
1851 if (!intel_dp_get_link_status(intel_dp
, link_status
))
1854 /* Make sure clock is still ok */
1855 if (!intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1856 intel_dp_start_link_train(intel_dp
);
1861 if (intel_channel_eq_ok(intel_dp
, link_status
)) {
1866 /* Try 5 times, then try clock recovery if that fails */
1868 intel_dp_link_down(intel_dp
);
1869 intel_dp_start_link_train(intel_dp
);
1875 /* Compute new intel_dp->train_set as requested by target */
1876 intel_get_adjust_train(intel_dp
, link_status
);
1880 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1881 reg
= DP
| DP_LINK_TRAIN_OFF_CPT
;
1883 reg
= DP
| DP_LINK_TRAIN_OFF
;
1885 I915_WRITE(intel_dp
->output_reg
, reg
);
1886 POSTING_READ(intel_dp
->output_reg
);
1887 intel_dp_aux_native_write_1(intel_dp
,
1888 DP_TRAINING_PATTERN_SET
, DP_TRAINING_PATTERN_DISABLE
);
1892 intel_dp_link_down(struct intel_dp
*intel_dp
)
1894 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1895 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1896 uint32_t DP
= intel_dp
->DP
;
1898 if ((I915_READ(intel_dp
->output_reg
) & DP_PORT_EN
) == 0)
1901 DRM_DEBUG_KMS("\n");
1903 if (is_edp(intel_dp
)) {
1904 DP
&= ~DP_PLL_ENABLE
;
1905 I915_WRITE(intel_dp
->output_reg
, DP
);
1906 POSTING_READ(intel_dp
->output_reg
);
1910 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
))) {
1911 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1912 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE_CPT
);
1914 DP
&= ~DP_LINK_TRAIN_MASK
;
1915 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE
);
1917 POSTING_READ(intel_dp
->output_reg
);
1921 if (is_edp(intel_dp
)) {
1922 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1923 DP
|= DP_LINK_TRAIN_OFF_CPT
;
1925 DP
|= DP_LINK_TRAIN_OFF
;
1928 if (HAS_PCH_IBX(dev
) &&
1929 I915_READ(intel_dp
->output_reg
) & DP_PIPEB_SELECT
) {
1930 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
1932 /* Hardware workaround: leaving our transcoder select
1933 * set to transcoder B while it's off will prevent the
1934 * corresponding HDMI output on transcoder A.
1936 * Combine this with another hardware workaround:
1937 * transcoder select bit can only be cleared while the
1940 DP
&= ~DP_PIPEB_SELECT
;
1941 I915_WRITE(intel_dp
->output_reg
, DP
);
1943 /* Changes to enable or select take place the vblank
1944 * after being written.
1947 /* We can arrive here never having been attached
1948 * to a CRTC, for instance, due to inheriting
1949 * random state from the BIOS.
1951 * If the pipe is not running, play safe and
1952 * wait for the clocks to stabilise before
1955 POSTING_READ(intel_dp
->output_reg
);
1958 intel_wait_for_vblank(dev
, to_intel_crtc(crtc
)->pipe
);
1961 DP
&= ~DP_AUDIO_OUTPUT_ENABLE
;
1962 I915_WRITE(intel_dp
->output_reg
, DP
& ~DP_PORT_EN
);
1963 POSTING_READ(intel_dp
->output_reg
);
1964 msleep(intel_dp
->panel_power_down_delay
);
1968 intel_dp_get_dpcd(struct intel_dp
*intel_dp
)
1970 if (intel_dp_aux_native_read_retry(intel_dp
, 0x000, intel_dp
->dpcd
,
1971 sizeof(intel_dp
->dpcd
)) &&
1972 (intel_dp
->dpcd
[DP_DPCD_REV
] != 0)) {
1980 intel_dp_probe_oui(struct intel_dp
*intel_dp
)
1984 if (!(intel_dp
->dpcd
[DP_DOWN_STREAM_PORT_COUNT
] & DP_OUI_SUPPORT
))
1987 ironlake_edp_panel_vdd_on(intel_dp
);
1989 if (intel_dp_aux_native_read_retry(intel_dp
, DP_SINK_OUI
, buf
, 3))
1990 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
1991 buf
[0], buf
[1], buf
[2]);
1993 if (intel_dp_aux_native_read_retry(intel_dp
, DP_BRANCH_OUI
, buf
, 3))
1994 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
1995 buf
[0], buf
[1], buf
[2]);
1997 ironlake_edp_panel_vdd_off(intel_dp
, false);
2001 intel_dp_get_sink_irq(struct intel_dp
*intel_dp
, u8
*sink_irq_vector
)
2005 ret
= intel_dp_aux_native_read_retry(intel_dp
,
2006 DP_DEVICE_SERVICE_IRQ_VECTOR
,
2007 sink_irq_vector
, 1);
2015 intel_dp_handle_test_request(struct intel_dp
*intel_dp
)
2017 /* NAK by default */
2018 intel_dp_aux_native_write_1(intel_dp
, DP_TEST_RESPONSE
, DP_TEST_ACK
);
2022 * According to DP spec
2025 * 2. Configure link according to Receiver Capabilities
2026 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
2027 * 4. Check link status on receipt of hot-plug interrupt
2031 intel_dp_check_link_status(struct intel_dp
*intel_dp
)
2034 u8 link_status
[DP_LINK_STATUS_SIZE
];
2036 if (intel_dp
->dpms_mode
!= DRM_MODE_DPMS_ON
)
2039 if (!intel_dp
->base
.base
.crtc
)
2042 /* Try to read receiver status if the link appears to be up */
2043 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
2044 intel_dp_link_down(intel_dp
);
2048 /* Now read the DPCD to see if it's actually running */
2049 if (!intel_dp_get_dpcd(intel_dp
)) {
2050 intel_dp_link_down(intel_dp
);
2054 /* Try to read the source of the interrupt */
2055 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
2056 intel_dp_get_sink_irq(intel_dp
, &sink_irq_vector
)) {
2057 /* Clear interrupt source */
2058 intel_dp_aux_native_write_1(intel_dp
,
2059 DP_DEVICE_SERVICE_IRQ_VECTOR
,
2062 if (sink_irq_vector
& DP_AUTOMATED_TEST_REQUEST
)
2063 intel_dp_handle_test_request(intel_dp
);
2064 if (sink_irq_vector
& (DP_CP_IRQ
| DP_SINK_SPECIFIC_IRQ
))
2065 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
2068 if (!intel_channel_eq_ok(intel_dp
, link_status
)) {
2069 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2070 drm_get_encoder_name(&intel_dp
->base
.base
));
2071 intel_dp_start_link_train(intel_dp
);
2072 intel_dp_complete_link_train(intel_dp
);
2076 static enum drm_connector_status
2077 intel_dp_detect_dpcd(struct intel_dp
*intel_dp
)
2079 if (intel_dp_get_dpcd(intel_dp
))
2080 return connector_status_connected
;
2081 return connector_status_disconnected
;
2084 static enum drm_connector_status
2085 ironlake_dp_detect(struct intel_dp
*intel_dp
)
2087 enum drm_connector_status status
;
2089 /* Can't disconnect eDP, but you can close the lid... */
2090 if (is_edp(intel_dp
)) {
2091 status
= intel_panel_detect(intel_dp
->base
.base
.dev
);
2092 if (status
== connector_status_unknown
)
2093 status
= connector_status_connected
;
2097 return intel_dp_detect_dpcd(intel_dp
);
2100 static enum drm_connector_status
2101 g4x_dp_detect(struct intel_dp
*intel_dp
)
2103 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2104 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2107 switch (intel_dp
->output_reg
) {
2109 bit
= DPB_HOTPLUG_LIVE_STATUS
;
2112 bit
= DPC_HOTPLUG_LIVE_STATUS
;
2115 bit
= DPD_HOTPLUG_LIVE_STATUS
;
2118 return connector_status_unknown
;
2121 if ((I915_READ(PORT_HOTPLUG_STAT
) & bit
) == 0)
2122 return connector_status_disconnected
;
2124 return intel_dp_detect_dpcd(intel_dp
);
2127 static struct edid
*
2128 intel_dp_get_edid(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2130 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2134 if (is_edp(intel_dp
)) {
2135 if (!intel_dp
->edid
)
2138 size
= (intel_dp
->edid
->extensions
+ 1) * EDID_LENGTH
;
2139 edid
= kmalloc(size
, GFP_KERNEL
);
2143 memcpy(edid
, intel_dp
->edid
, size
);
2147 edid
= drm_get_edid(connector
, adapter
);
2152 intel_dp_get_edid_modes(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2154 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2157 if (is_edp(intel_dp
)) {
2158 drm_mode_connector_update_edid_property(connector
,
2160 ret
= drm_add_edid_modes(connector
, intel_dp
->edid
);
2161 drm_edid_to_eld(connector
,
2163 connector
->display_info
.raw_edid
= NULL
;
2164 return intel_dp
->edid_mode_count
;
2167 ret
= intel_ddc_get_modes(connector
, adapter
);
2173 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2175 * \return true if DP port is connected.
2176 * \return false if DP port is disconnected.
2178 static enum drm_connector_status
2179 intel_dp_detect(struct drm_connector
*connector
, bool force
)
2181 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2182 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2183 enum drm_connector_status status
;
2184 struct edid
*edid
= NULL
;
2186 intel_dp
->has_audio
= false;
2188 if (HAS_PCH_SPLIT(dev
))
2189 status
= ironlake_dp_detect(intel_dp
);
2191 status
= g4x_dp_detect(intel_dp
);
2193 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
2194 intel_dp
->dpcd
[0], intel_dp
->dpcd
[1], intel_dp
->dpcd
[2],
2195 intel_dp
->dpcd
[3], intel_dp
->dpcd
[4], intel_dp
->dpcd
[5],
2196 intel_dp
->dpcd
[6], intel_dp
->dpcd
[7]);
2198 if (status
!= connector_status_connected
)
2201 intel_dp_probe_oui(intel_dp
);
2203 if (intel_dp
->force_audio
!= HDMI_AUDIO_AUTO
) {
2204 intel_dp
->has_audio
= (intel_dp
->force_audio
== HDMI_AUDIO_ON
);
2206 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2208 intel_dp
->has_audio
= drm_detect_monitor_audio(edid
);
2209 connector
->display_info
.raw_edid
= NULL
;
2214 return connector_status_connected
;
2217 static int intel_dp_get_modes(struct drm_connector
*connector
)
2219 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2220 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2221 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2224 /* We should parse the EDID data and find out if it has an audio sink
2227 ret
= intel_dp_get_edid_modes(connector
, &intel_dp
->adapter
);
2229 if (is_edp(intel_dp
) && !intel_dp
->panel_fixed_mode
) {
2230 struct drm_display_mode
*newmode
;
2231 list_for_each_entry(newmode
, &connector
->probed_modes
,
2233 if ((newmode
->type
& DRM_MODE_TYPE_PREFERRED
)) {
2234 intel_dp
->panel_fixed_mode
=
2235 drm_mode_duplicate(dev
, newmode
);
2243 /* if eDP has no EDID, try to use fixed panel mode from VBT */
2244 if (is_edp(intel_dp
)) {
2245 /* initialize panel mode from VBT if available for eDP */
2246 if (intel_dp
->panel_fixed_mode
== NULL
&& dev_priv
->lfp_lvds_vbt_mode
!= NULL
) {
2247 intel_dp
->panel_fixed_mode
=
2248 drm_mode_duplicate(dev
, dev_priv
->lfp_lvds_vbt_mode
);
2249 if (intel_dp
->panel_fixed_mode
) {
2250 intel_dp
->panel_fixed_mode
->type
|=
2251 DRM_MODE_TYPE_PREFERRED
;
2254 if (intel_dp
->panel_fixed_mode
) {
2255 struct drm_display_mode
*mode
;
2256 mode
= drm_mode_duplicate(dev
, intel_dp
->panel_fixed_mode
);
2257 drm_mode_probed_add(connector
, mode
);
2265 intel_dp_detect_audio(struct drm_connector
*connector
)
2267 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2269 bool has_audio
= false;
2271 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2273 has_audio
= drm_detect_monitor_audio(edid
);
2275 connector
->display_info
.raw_edid
= NULL
;
2283 intel_dp_set_property(struct drm_connector
*connector
,
2284 struct drm_property
*property
,
2287 struct drm_i915_private
*dev_priv
= connector
->dev
->dev_private
;
2288 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2291 ret
= drm_connector_property_set_value(connector
, property
, val
);
2295 if (property
== dev_priv
->force_audio_property
) {
2299 if (i
== intel_dp
->force_audio
)
2302 intel_dp
->force_audio
= i
;
2304 if (i
== HDMI_AUDIO_AUTO
)
2305 has_audio
= intel_dp_detect_audio(connector
);
2307 has_audio
= (i
== HDMI_AUDIO_ON
);
2309 if (has_audio
== intel_dp
->has_audio
)
2312 intel_dp
->has_audio
= has_audio
;
2316 if (property
== dev_priv
->broadcast_rgb_property
) {
2317 if (val
== !!intel_dp
->color_range
)
2320 intel_dp
->color_range
= val
? DP_COLOR_RANGE_16_235
: 0;
2327 if (intel_dp
->base
.base
.crtc
) {
2328 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
2329 drm_crtc_helper_set_mode(crtc
, &crtc
->mode
,
2338 intel_dp_destroy(struct drm_connector
*connector
)
2340 struct drm_device
*dev
= connector
->dev
;
2342 if (intel_dpd_is_edp(dev
))
2343 intel_panel_destroy_backlight(dev
);
2345 drm_sysfs_connector_remove(connector
);
2346 drm_connector_cleanup(connector
);
2350 static void intel_dp_encoder_destroy(struct drm_encoder
*encoder
)
2352 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
2354 i2c_del_adapter(&intel_dp
->adapter
);
2355 drm_encoder_cleanup(encoder
);
2356 if (is_edp(intel_dp
)) {
2357 kfree(intel_dp
->edid
);
2358 cancel_delayed_work_sync(&intel_dp
->panel_vdd_work
);
2359 ironlake_panel_vdd_off_sync(intel_dp
);
2364 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs
= {
2365 .dpms
= intel_dp_dpms
,
2366 .mode_fixup
= intel_dp_mode_fixup
,
2367 .prepare
= intel_dp_prepare
,
2368 .mode_set
= intel_dp_mode_set
,
2369 .commit
= intel_dp_commit
,
2372 static const struct drm_connector_funcs intel_dp_connector_funcs
= {
2373 .dpms
= drm_helper_connector_dpms
,
2374 .detect
= intel_dp_detect
,
2375 .fill_modes
= drm_helper_probe_single_connector_modes
,
2376 .set_property
= intel_dp_set_property
,
2377 .destroy
= intel_dp_destroy
,
2380 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs
= {
2381 .get_modes
= intel_dp_get_modes
,
2382 .mode_valid
= intel_dp_mode_valid
,
2383 .best_encoder
= intel_best_encoder
,
2386 static const struct drm_encoder_funcs intel_dp_enc_funcs
= {
2387 .destroy
= intel_dp_encoder_destroy
,
2391 intel_dp_hot_plug(struct intel_encoder
*intel_encoder
)
2393 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
2395 intel_dp_check_link_status(intel_dp
);
2398 /* Return which DP Port should be selected for Transcoder DP control */
2400 intel_trans_dp_port_sel(struct drm_crtc
*crtc
)
2402 struct drm_device
*dev
= crtc
->dev
;
2403 struct intel_encoder
*encoder
;
2405 for_each_encoder_on_crtc(dev
, crtc
, encoder
) {
2406 struct intel_dp
*intel_dp
= enc_to_intel_dp(&encoder
->base
);
2408 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
2409 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
2410 return intel_dp
->output_reg
;
2416 /* check the VBT to see whether the eDP is on DP-D port */
2417 bool intel_dpd_is_edp(struct drm_device
*dev
)
2419 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2420 struct child_device_config
*p_child
;
2423 if (!dev_priv
->child_dev_num
)
2426 for (i
= 0; i
< dev_priv
->child_dev_num
; i
++) {
2427 p_child
= dev_priv
->child_dev
+ i
;
2429 if (p_child
->dvo_port
== PORT_IDPD
&&
2430 p_child
->device_type
== DEVICE_TYPE_eDP
)
2437 intel_dp_add_properties(struct intel_dp
*intel_dp
, struct drm_connector
*connector
)
2439 intel_attach_force_audio_property(connector
);
2440 intel_attach_broadcast_rgb_property(connector
);
2444 intel_dp_init(struct drm_device
*dev
, int output_reg
)
2446 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2447 struct drm_connector
*connector
;
2448 struct intel_dp
*intel_dp
;
2449 struct intel_encoder
*intel_encoder
;
2450 struct intel_connector
*intel_connector
;
2451 const char *name
= NULL
;
2454 intel_dp
= kzalloc(sizeof(struct intel_dp
), GFP_KERNEL
);
2458 intel_dp
->output_reg
= output_reg
;
2459 intel_dp
->dpms_mode
= -1;
2461 intel_connector
= kzalloc(sizeof(struct intel_connector
), GFP_KERNEL
);
2462 if (!intel_connector
) {
2466 intel_encoder
= &intel_dp
->base
;
2468 if (HAS_PCH_SPLIT(dev
) && output_reg
== PCH_DP_D
)
2469 if (intel_dpd_is_edp(dev
))
2470 intel_dp
->is_pch_edp
= true;
2472 if (output_reg
== DP_A
|| is_pch_edp(intel_dp
)) {
2473 type
= DRM_MODE_CONNECTOR_eDP
;
2474 intel_encoder
->type
= INTEL_OUTPUT_EDP
;
2476 type
= DRM_MODE_CONNECTOR_DisplayPort
;
2477 intel_encoder
->type
= INTEL_OUTPUT_DISPLAYPORT
;
2480 connector
= &intel_connector
->base
;
2481 drm_connector_init(dev
, connector
, &intel_dp_connector_funcs
, type
);
2482 drm_connector_helper_add(connector
, &intel_dp_connector_helper_funcs
);
2484 connector
->polled
= DRM_CONNECTOR_POLL_HPD
;
2486 if (output_reg
== DP_B
|| output_reg
== PCH_DP_B
)
2487 intel_encoder
->clone_mask
= (1 << INTEL_DP_B_CLONE_BIT
);
2488 else if (output_reg
== DP_C
|| output_reg
== PCH_DP_C
)
2489 intel_encoder
->clone_mask
= (1 << INTEL_DP_C_CLONE_BIT
);
2490 else if (output_reg
== DP_D
|| output_reg
== PCH_DP_D
)
2491 intel_encoder
->clone_mask
= (1 << INTEL_DP_D_CLONE_BIT
);
2493 if (is_edp(intel_dp
)) {
2494 intel_encoder
->clone_mask
= (1 << INTEL_EDP_CLONE_BIT
);
2495 INIT_DELAYED_WORK(&intel_dp
->panel_vdd_work
,
2496 ironlake_panel_vdd_work
);
2499 intel_encoder
->crtc_mask
= (1 << 0) | (1 << 1) | (1 << 2);
2501 connector
->interlace_allowed
= true;
2502 connector
->doublescan_allowed
= 0;
2504 drm_encoder_init(dev
, &intel_encoder
->base
, &intel_dp_enc_funcs
,
2505 DRM_MODE_ENCODER_TMDS
);
2506 drm_encoder_helper_add(&intel_encoder
->base
, &intel_dp_helper_funcs
);
2508 intel_connector_attach_encoder(intel_connector
, intel_encoder
);
2509 drm_sysfs_connector_add(connector
);
2511 /* Set up the DDC bus. */
2512 switch (output_reg
) {
2518 dev_priv
->hotplug_supported_mask
|=
2519 DPB_HOTPLUG_INT_STATUS
;
2524 dev_priv
->hotplug_supported_mask
|=
2525 DPC_HOTPLUG_INT_STATUS
;
2530 dev_priv
->hotplug_supported_mask
|=
2531 DPD_HOTPLUG_INT_STATUS
;
2536 /* Cache some DPCD data in the eDP case */
2537 if (is_edp(intel_dp
)) {
2538 struct edp_power_seq cur
, vbt
;
2539 u32 pp_on
, pp_off
, pp_div
;
2541 pp_on
= I915_READ(PCH_PP_ON_DELAYS
);
2542 pp_off
= I915_READ(PCH_PP_OFF_DELAYS
);
2543 pp_div
= I915_READ(PCH_PP_DIVISOR
);
2545 if (!pp_on
|| !pp_off
|| !pp_div
) {
2546 DRM_INFO("bad panel power sequencing delays, disabling panel\n");
2547 intel_dp_encoder_destroy(&intel_dp
->base
.base
);
2548 intel_dp_destroy(&intel_connector
->base
);
2552 /* Pull timing values out of registers */
2553 cur
.t1_t3
= (pp_on
& PANEL_POWER_UP_DELAY_MASK
) >>
2554 PANEL_POWER_UP_DELAY_SHIFT
;
2556 cur
.t8
= (pp_on
& PANEL_LIGHT_ON_DELAY_MASK
) >>
2557 PANEL_LIGHT_ON_DELAY_SHIFT
;
2559 cur
.t9
= (pp_off
& PANEL_LIGHT_OFF_DELAY_MASK
) >>
2560 PANEL_LIGHT_OFF_DELAY_SHIFT
;
2562 cur
.t10
= (pp_off
& PANEL_POWER_DOWN_DELAY_MASK
) >>
2563 PANEL_POWER_DOWN_DELAY_SHIFT
;
2565 cur
.t11_t12
= ((pp_div
& PANEL_POWER_CYCLE_DELAY_MASK
) >>
2566 PANEL_POWER_CYCLE_DELAY_SHIFT
) * 1000;
2568 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2569 cur
.t1_t3
, cur
.t8
, cur
.t9
, cur
.t10
, cur
.t11_t12
);
2571 vbt
= dev_priv
->edp
.pps
;
2573 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2574 vbt
.t1_t3
, vbt
.t8
, vbt
.t9
, vbt
.t10
, vbt
.t11_t12
);
2576 #define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10)
2578 intel_dp
->panel_power_up_delay
= get_delay(t1_t3
);
2579 intel_dp
->backlight_on_delay
= get_delay(t8
);
2580 intel_dp
->backlight_off_delay
= get_delay(t9
);
2581 intel_dp
->panel_power_down_delay
= get_delay(t10
);
2582 intel_dp
->panel_power_cycle_delay
= get_delay(t11_t12
);
2584 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2585 intel_dp
->panel_power_up_delay
, intel_dp
->panel_power_down_delay
,
2586 intel_dp
->panel_power_cycle_delay
);
2588 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2589 intel_dp
->backlight_on_delay
, intel_dp
->backlight_off_delay
);
2592 intel_dp_i2c_init(intel_dp
, intel_connector
, name
);
2594 if (is_edp(intel_dp
)) {
2598 ironlake_edp_panel_vdd_on(intel_dp
);
2599 ret
= intel_dp_get_dpcd(intel_dp
);
2600 ironlake_edp_panel_vdd_off(intel_dp
, false);
2603 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11)
2604 dev_priv
->no_aux_handshake
=
2605 intel_dp
->dpcd
[DP_MAX_DOWNSPREAD
] &
2606 DP_NO_AUX_HANDSHAKE_LINK_TRAINING
;
2608 /* if this fails, presume the device is a ghost */
2609 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2610 intel_dp_encoder_destroy(&intel_dp
->base
.base
);
2611 intel_dp_destroy(&intel_connector
->base
);
2615 ironlake_edp_panel_vdd_on(intel_dp
);
2616 edid
= drm_get_edid(connector
, &intel_dp
->adapter
);
2618 drm_mode_connector_update_edid_property(connector
,
2620 intel_dp
->edid_mode_count
=
2621 drm_add_edid_modes(connector
, edid
);
2622 drm_edid_to_eld(connector
, edid
);
2623 intel_dp
->edid
= edid
;
2625 ironlake_edp_panel_vdd_off(intel_dp
, false);
2628 intel_encoder
->hot_plug
= intel_dp_hot_plug
;
2630 if (is_edp(intel_dp
)) {
2631 dev_priv
->int_edp_connector
= connector
;
2632 intel_panel_setup_backlight(dev
);
2635 intel_dp_add_properties(intel_dp
, connector
);
2637 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2638 * 0xd. Failure to do so will result in spurious interrupts being
2639 * generated on the port when a cable is not attached.
2641 if (IS_G4X(dev
) && !IS_GM45(dev
)) {
2642 u32 temp
= I915_READ(PEG_BAND_GAP_DATA
);
2643 I915_WRITE(PEG_BAND_GAP_DATA
, (temp
& ~0xf) | 0xd);