2 * Copyright © 2008 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Keith Packard <keithp@keithp.com>
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
30 #include <linux/export.h>
34 #include "drm_crtc_helper.h"
35 #include "intel_drv.h"
38 #include "drm_dp_helper.h"
40 #define DP_RECEIVER_CAP_SIZE 0xf
41 #define DP_LINK_STATUS_SIZE 6
42 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
44 #define DP_LINK_CONFIGURATION_SIZE 9
47 struct intel_encoder base
;
50 uint8_t link_configuration
[DP_LINK_CONFIGURATION_SIZE
];
57 uint8_t dpcd
[DP_RECEIVER_CAP_SIZE
];
58 struct i2c_adapter adapter
;
59 struct i2c_algo_dp_aux_data algo
;
62 int panel_power_up_delay
;
63 int panel_power_down_delay
;
64 int panel_power_cycle_delay
;
65 int backlight_on_delay
;
66 int backlight_off_delay
;
67 struct drm_display_mode
*panel_fixed_mode
; /* for eDP */
68 struct delayed_work panel_vdd_work
;
73 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
74 * @intel_dp: DP struct
76 * If a CPU or PCH DP output is attached to an eDP panel, this function
77 * will return true, and false otherwise.
79 static bool is_edp(struct intel_dp
*intel_dp
)
81 return intel_dp
->base
.type
== INTEL_OUTPUT_EDP
;
85 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
86 * @intel_dp: DP struct
88 * Returns true if the given DP struct corresponds to a PCH DP port attached
89 * to an eDP panel, false otherwise. Helpful for determining whether we
90 * may need FDI resources for a given DP output or not.
92 static bool is_pch_edp(struct intel_dp
*intel_dp
)
94 return intel_dp
->is_pch_edp
;
98 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
99 * @intel_dp: DP struct
101 * Returns true if the given DP struct corresponds to a CPU eDP port.
103 static bool is_cpu_edp(struct intel_dp
*intel_dp
)
105 return is_edp(intel_dp
) && !is_pch_edp(intel_dp
);
108 static struct intel_dp
*enc_to_intel_dp(struct drm_encoder
*encoder
)
110 return container_of(encoder
, struct intel_dp
, base
.base
);
113 static struct intel_dp
*intel_attached_dp(struct drm_connector
*connector
)
115 return container_of(intel_attached_encoder(connector
),
116 struct intel_dp
, base
);
120 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
121 * @encoder: DRM encoder
123 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed
124 * by intel_display.c.
126 bool intel_encoder_is_pch_edp(struct drm_encoder
*encoder
)
128 struct intel_dp
*intel_dp
;
133 intel_dp
= enc_to_intel_dp(encoder
);
135 return is_pch_edp(intel_dp
);
138 static void intel_dp_start_link_train(struct intel_dp
*intel_dp
);
139 static void intel_dp_complete_link_train(struct intel_dp
*intel_dp
);
140 static void intel_dp_link_down(struct intel_dp
*intel_dp
);
143 intel_edp_link_config(struct intel_encoder
*intel_encoder
,
144 int *lane_num
, int *link_bw
)
146 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
148 *lane_num
= intel_dp
->lane_count
;
149 if (intel_dp
->link_bw
== DP_LINK_BW_1_62
)
151 else if (intel_dp
->link_bw
== DP_LINK_BW_2_7
)
156 intel_dp_max_lane_count(struct intel_dp
*intel_dp
)
158 int max_lane_count
= intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & 0x1f;
159 switch (max_lane_count
) {
160 case 1: case 2: case 4:
165 return max_lane_count
;
169 intel_dp_max_link_bw(struct intel_dp
*intel_dp
)
171 int max_link_bw
= intel_dp
->dpcd
[DP_MAX_LINK_RATE
];
173 switch (max_link_bw
) {
174 case DP_LINK_BW_1_62
:
178 max_link_bw
= DP_LINK_BW_1_62
;
185 intel_dp_link_clock(uint8_t link_bw
)
187 if (link_bw
== DP_LINK_BW_2_7
)
194 * The units on the numbers in the next two are... bizarre. Examples will
195 * make it clearer; this one parallels an example in the eDP spec.
197 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
199 * 270000 * 1 * 8 / 10 == 216000
201 * The actual data capacity of that configuration is 2.16Gbit/s, so the
202 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz -
203 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
204 * 119000. At 18bpp that's 2142000 kilobits per second.
206 * Thus the strange-looking division by 10 in intel_dp_link_required, to
207 * get the result in decakilobits instead of kilobits.
211 intel_dp_link_required(int pixel_clock
, int bpp
)
213 return (pixel_clock
* bpp
+ 9) / 10;
217 intel_dp_max_data_rate(int max_link_clock
, int max_lanes
)
219 return (max_link_clock
* max_lanes
* 8) / 10;
223 intel_dp_mode_valid(struct drm_connector
*connector
,
224 struct drm_display_mode
*mode
)
226 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
227 int max_link_clock
= intel_dp_link_clock(intel_dp_max_link_bw(intel_dp
));
228 int max_lanes
= intel_dp_max_lane_count(intel_dp
);
229 int max_rate
, mode_rate
;
231 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
232 if (mode
->hdisplay
> intel_dp
->panel_fixed_mode
->hdisplay
)
235 if (mode
->vdisplay
> intel_dp
->panel_fixed_mode
->vdisplay
)
239 mode_rate
= intel_dp_link_required(mode
->clock
, 24);
240 max_rate
= intel_dp_max_data_rate(max_link_clock
, max_lanes
);
242 if (mode_rate
> max_rate
) {
243 mode_rate
= intel_dp_link_required(mode
->clock
, 18);
244 if (mode_rate
> max_rate
)
245 return MODE_CLOCK_HIGH
;
247 mode
->private_flags
|= INTEL_MODE_DP_FORCE_6BPC
;
250 if (mode
->clock
< 10000)
251 return MODE_CLOCK_LOW
;
257 pack_aux(uint8_t *src
, int src_bytes
)
264 for (i
= 0; i
< src_bytes
; i
++)
265 v
|= ((uint32_t) src
[i
]) << ((3-i
) * 8);
270 unpack_aux(uint32_t src
, uint8_t *dst
, int dst_bytes
)
275 for (i
= 0; i
< dst_bytes
; i
++)
276 dst
[i
] = src
>> ((3-i
) * 8);
279 /* hrawclock is 1/4 the FSB frequency */
281 intel_hrawclk(struct drm_device
*dev
)
283 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
286 clkcfg
= I915_READ(CLKCFG
);
287 switch (clkcfg
& CLKCFG_FSB_MASK
) {
296 case CLKCFG_FSB_1067
:
298 case CLKCFG_FSB_1333
:
300 /* these two are just a guess; one of them might be right */
301 case CLKCFG_FSB_1600
:
302 case CLKCFG_FSB_1600_ALT
:
309 static bool ironlake_edp_have_panel_power(struct intel_dp
*intel_dp
)
311 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
312 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
314 return (I915_READ(PCH_PP_STATUS
) & PP_ON
) != 0;
317 static bool ironlake_edp_have_panel_vdd(struct intel_dp
*intel_dp
)
319 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
320 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
322 return (I915_READ(PCH_PP_CONTROL
) & EDP_FORCE_VDD
) != 0;
326 intel_dp_check_edp(struct intel_dp
*intel_dp
)
328 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
329 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
331 if (!is_edp(intel_dp
))
333 if (!ironlake_edp_have_panel_power(intel_dp
) && !ironlake_edp_have_panel_vdd(intel_dp
)) {
334 WARN(1, "eDP powered off while attempting aux channel communication.\n");
335 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
336 I915_READ(PCH_PP_STATUS
),
337 I915_READ(PCH_PP_CONTROL
));
342 intel_dp_aux_ch(struct intel_dp
*intel_dp
,
343 uint8_t *send
, int send_bytes
,
344 uint8_t *recv
, int recv_size
)
346 uint32_t output_reg
= intel_dp
->output_reg
;
347 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
348 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
349 uint32_t ch_ctl
= output_reg
+ 0x10;
350 uint32_t ch_data
= ch_ctl
+ 4;
354 uint32_t aux_clock_divider
;
357 intel_dp_check_edp(intel_dp
);
358 /* The clock divider is based off the hrawclk,
359 * and would like to run at 2MHz. So, take the
360 * hrawclk value and divide by 2 and use that
362 * Note that PCH attached eDP panels should use a 125MHz input
365 if (is_cpu_edp(intel_dp
)) {
366 if (IS_GEN6(dev
) || IS_GEN7(dev
))
367 aux_clock_divider
= 200; /* SNB & IVB eDP input clock at 400Mhz */
369 aux_clock_divider
= 225; /* eDP input clock at 450Mhz */
370 } else if (HAS_PCH_SPLIT(dev
))
371 aux_clock_divider
= 62; /* IRL input clock fixed at 125Mhz */
373 aux_clock_divider
= intel_hrawclk(dev
) / 2;
380 /* Try to wait for any previous AUX channel activity */
381 for (try = 0; try < 3; try++) {
382 status
= I915_READ(ch_ctl
);
383 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
389 WARN(1, "dp_aux_ch not started status 0x%08x\n",
394 /* Must try at least 3 times according to DP spec */
395 for (try = 0; try < 5; try++) {
396 /* Load the send data into the aux channel data registers */
397 for (i
= 0; i
< send_bytes
; i
+= 4)
398 I915_WRITE(ch_data
+ i
,
399 pack_aux(send
+ i
, send_bytes
- i
));
401 /* Send the command and wait for it to complete */
403 DP_AUX_CH_CTL_SEND_BUSY
|
404 DP_AUX_CH_CTL_TIME_OUT_400us
|
405 (send_bytes
<< DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
) |
406 (precharge
<< DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT
) |
407 (aux_clock_divider
<< DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT
) |
409 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
410 DP_AUX_CH_CTL_RECEIVE_ERROR
);
412 status
= I915_READ(ch_ctl
);
413 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
418 /* Clear done status and any errors */
422 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
423 DP_AUX_CH_CTL_RECEIVE_ERROR
);
424 if (status
& DP_AUX_CH_CTL_DONE
)
428 if ((status
& DP_AUX_CH_CTL_DONE
) == 0) {
429 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status
);
433 /* Check for timeout or receive error.
434 * Timeouts occur when the sink is not connected
436 if (status
& DP_AUX_CH_CTL_RECEIVE_ERROR
) {
437 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status
);
441 /* Timeouts occur when the device isn't connected, so they're
442 * "normal" -- don't fill the kernel log with these */
443 if (status
& DP_AUX_CH_CTL_TIME_OUT_ERROR
) {
444 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status
);
448 /* Unload any bytes sent back from the other side */
449 recv_bytes
= ((status
& DP_AUX_CH_CTL_MESSAGE_SIZE_MASK
) >>
450 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
);
451 if (recv_bytes
> recv_size
)
452 recv_bytes
= recv_size
;
454 for (i
= 0; i
< recv_bytes
; i
+= 4)
455 unpack_aux(I915_READ(ch_data
+ i
),
456 recv
+ i
, recv_bytes
- i
);
461 /* Write data to the aux channel in native mode */
463 intel_dp_aux_native_write(struct intel_dp
*intel_dp
,
464 uint16_t address
, uint8_t *send
, int send_bytes
)
471 intel_dp_check_edp(intel_dp
);
474 msg
[0] = AUX_NATIVE_WRITE
<< 4;
475 msg
[1] = address
>> 8;
476 msg
[2] = address
& 0xff;
477 msg
[3] = send_bytes
- 1;
478 memcpy(&msg
[4], send
, send_bytes
);
479 msg_bytes
= send_bytes
+ 4;
481 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
, &ack
, 1);
484 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
486 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
494 /* Write a single byte to the aux channel in native mode */
496 intel_dp_aux_native_write_1(struct intel_dp
*intel_dp
,
497 uint16_t address
, uint8_t byte
)
499 return intel_dp_aux_native_write(intel_dp
, address
, &byte
, 1);
502 /* read bytes from a native aux channel */
504 intel_dp_aux_native_read(struct intel_dp
*intel_dp
,
505 uint16_t address
, uint8_t *recv
, int recv_bytes
)
514 intel_dp_check_edp(intel_dp
);
515 msg
[0] = AUX_NATIVE_READ
<< 4;
516 msg
[1] = address
>> 8;
517 msg
[2] = address
& 0xff;
518 msg
[3] = recv_bytes
- 1;
521 reply_bytes
= recv_bytes
+ 1;
524 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
,
531 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
) {
532 memcpy(recv
, reply
+ 1, ret
- 1);
535 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
543 intel_dp_i2c_aux_ch(struct i2c_adapter
*adapter
, int mode
,
544 uint8_t write_byte
, uint8_t *read_byte
)
546 struct i2c_algo_dp_aux_data
*algo_data
= adapter
->algo_data
;
547 struct intel_dp
*intel_dp
= container_of(adapter
,
550 uint16_t address
= algo_data
->address
;
558 intel_dp_check_edp(intel_dp
);
559 /* Set up the command byte */
560 if (mode
& MODE_I2C_READ
)
561 msg
[0] = AUX_I2C_READ
<< 4;
563 msg
[0] = AUX_I2C_WRITE
<< 4;
565 if (!(mode
& MODE_I2C_STOP
))
566 msg
[0] |= AUX_I2C_MOT
<< 4;
568 msg
[1] = address
>> 8;
589 for (retry
= 0; retry
< 5; retry
++) {
590 ret
= intel_dp_aux_ch(intel_dp
,
594 DRM_DEBUG_KMS("aux_ch failed %d\n", ret
);
598 switch (reply
[0] & AUX_NATIVE_REPLY_MASK
) {
599 case AUX_NATIVE_REPLY_ACK
:
600 /* I2C-over-AUX Reply field is only valid
601 * when paired with AUX ACK.
604 case AUX_NATIVE_REPLY_NACK
:
605 DRM_DEBUG_KMS("aux_ch native nack\n");
607 case AUX_NATIVE_REPLY_DEFER
:
611 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
616 switch (reply
[0] & AUX_I2C_REPLY_MASK
) {
617 case AUX_I2C_REPLY_ACK
:
618 if (mode
== MODE_I2C_READ
) {
619 *read_byte
= reply
[1];
621 return reply_bytes
- 1;
622 case AUX_I2C_REPLY_NACK
:
623 DRM_DEBUG_KMS("aux_i2c nack\n");
625 case AUX_I2C_REPLY_DEFER
:
626 DRM_DEBUG_KMS("aux_i2c defer\n");
630 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply
[0]);
635 DRM_ERROR("too many retries, giving up\n");
639 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
);
640 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
);
643 intel_dp_i2c_init(struct intel_dp
*intel_dp
,
644 struct intel_connector
*intel_connector
, const char *name
)
648 DRM_DEBUG_KMS("i2c_init %s\n", name
);
649 intel_dp
->algo
.running
= false;
650 intel_dp
->algo
.address
= 0;
651 intel_dp
->algo
.aux_ch
= intel_dp_i2c_aux_ch
;
653 memset(&intel_dp
->adapter
, '\0', sizeof(intel_dp
->adapter
));
654 intel_dp
->adapter
.owner
= THIS_MODULE
;
655 intel_dp
->adapter
.class = I2C_CLASS_DDC
;
656 strncpy(intel_dp
->adapter
.name
, name
, sizeof(intel_dp
->adapter
.name
) - 1);
657 intel_dp
->adapter
.name
[sizeof(intel_dp
->adapter
.name
) - 1] = '\0';
658 intel_dp
->adapter
.algo_data
= &intel_dp
->algo
;
659 intel_dp
->adapter
.dev
.parent
= &intel_connector
->base
.kdev
;
661 ironlake_edp_panel_vdd_on(intel_dp
);
662 ret
= i2c_dp_aux_add_bus(&intel_dp
->adapter
);
663 ironlake_edp_panel_vdd_off(intel_dp
, false);
668 intel_dp_mode_fixup(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
,
669 struct drm_display_mode
*adjusted_mode
)
671 struct drm_device
*dev
= encoder
->dev
;
672 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
673 int lane_count
, clock
;
674 int max_lane_count
= intel_dp_max_lane_count(intel_dp
);
675 int max_clock
= intel_dp_max_link_bw(intel_dp
) == DP_LINK_BW_2_7
? 1 : 0;
676 int bpp
= mode
->private_flags
& INTEL_MODE_DP_FORCE_6BPC
? 18 : 24;
677 static int bws
[2] = { DP_LINK_BW_1_62
, DP_LINK_BW_2_7
};
679 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
680 intel_fixed_panel_mode(intel_dp
->panel_fixed_mode
, adjusted_mode
);
681 intel_pch_panel_fitting(dev
, DRM_MODE_SCALE_FULLSCREEN
,
682 mode
, adjusted_mode
);
684 * the mode->clock is used to calculate the Data&Link M/N
685 * of the pipe. For the eDP the fixed clock should be used.
687 mode
->clock
= intel_dp
->panel_fixed_mode
->clock
;
690 for (lane_count
= 1; lane_count
<= max_lane_count
; lane_count
<<= 1) {
691 for (clock
= 0; clock
<= max_clock
; clock
++) {
692 int link_avail
= intel_dp_max_data_rate(intel_dp_link_clock(bws
[clock
]), lane_count
);
694 if (intel_dp_link_required(mode
->clock
, bpp
)
696 intel_dp
->link_bw
= bws
[clock
];
697 intel_dp
->lane_count
= lane_count
;
698 adjusted_mode
->clock
= intel_dp_link_clock(intel_dp
->link_bw
);
699 DRM_DEBUG_KMS("Display port link bw %02x lane "
700 "count %d clock %d\n",
701 intel_dp
->link_bw
, intel_dp
->lane_count
,
702 adjusted_mode
->clock
);
711 struct intel_dp_m_n
{
720 intel_reduce_ratio(uint32_t *num
, uint32_t *den
)
722 while (*num
> 0xffffff || *den
> 0xffffff) {
729 intel_dp_compute_m_n(int bpp
,
733 struct intel_dp_m_n
*m_n
)
736 m_n
->gmch_m
= (pixel_clock
* bpp
) >> 3;
737 m_n
->gmch_n
= link_clock
* nlanes
;
738 intel_reduce_ratio(&m_n
->gmch_m
, &m_n
->gmch_n
);
739 m_n
->link_m
= pixel_clock
;
740 m_n
->link_n
= link_clock
;
741 intel_reduce_ratio(&m_n
->link_m
, &m_n
->link_n
);
745 intel_dp_set_m_n(struct drm_crtc
*crtc
, struct drm_display_mode
*mode
,
746 struct drm_display_mode
*adjusted_mode
)
748 struct drm_device
*dev
= crtc
->dev
;
749 struct drm_mode_config
*mode_config
= &dev
->mode_config
;
750 struct drm_encoder
*encoder
;
751 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
752 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
754 struct intel_dp_m_n m_n
;
755 int pipe
= intel_crtc
->pipe
;
758 * Find the lane count in the intel_encoder private
760 list_for_each_entry(encoder
, &mode_config
->encoder_list
, head
) {
761 struct intel_dp
*intel_dp
;
763 if (encoder
->crtc
!= crtc
)
766 intel_dp
= enc_to_intel_dp(encoder
);
767 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
768 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
770 lane_count
= intel_dp
->lane_count
;
776 * Compute the GMCH and Link ratios. The '3' here is
777 * the number of bytes_per_pixel post-LUT, which we always
778 * set up for 8-bits of R/G/B, or 3 bytes total.
780 intel_dp_compute_m_n(intel_crtc
->bpp
, lane_count
,
781 mode
->clock
, adjusted_mode
->clock
, &m_n
);
783 if (HAS_PCH_SPLIT(dev
)) {
784 I915_WRITE(TRANSDATA_M1(pipe
),
785 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
787 I915_WRITE(TRANSDATA_N1(pipe
), m_n
.gmch_n
);
788 I915_WRITE(TRANSDPLINK_M1(pipe
), m_n
.link_m
);
789 I915_WRITE(TRANSDPLINK_N1(pipe
), m_n
.link_n
);
791 I915_WRITE(PIPE_GMCH_DATA_M(pipe
),
792 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
794 I915_WRITE(PIPE_GMCH_DATA_N(pipe
), m_n
.gmch_n
);
795 I915_WRITE(PIPE_DP_LINK_M(pipe
), m_n
.link_m
);
796 I915_WRITE(PIPE_DP_LINK_N(pipe
), m_n
.link_n
);
800 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
);
801 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
);
804 intel_dp_mode_set(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
,
805 struct drm_display_mode
*adjusted_mode
)
807 struct drm_device
*dev
= encoder
->dev
;
808 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
809 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
810 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
811 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
813 /* Turn on the eDP PLL if needed */
814 if (is_edp(intel_dp
)) {
815 if (!is_pch_edp(intel_dp
))
816 ironlake_edp_pll_on(encoder
);
818 ironlake_edp_pll_off(encoder
);
822 * There are four kinds of DP registers:
829 * IBX PCH and CPU are the same for almost everything,
830 * except that the CPU DP PLL is configured in this
833 * CPT PCH is quite different, having many bits moved
834 * to the TRANS_DP_CTL register instead. That
835 * configuration happens (oddly) in ironlake_pch_enable
838 /* Preserve the BIOS-computed detected bit. This is
839 * supposed to be read-only.
841 intel_dp
->DP
= I915_READ(intel_dp
->output_reg
) & DP_DETECTED
;
842 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
844 /* Handle DP bits in common between all three register formats */
846 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
848 switch (intel_dp
->lane_count
) {
850 intel_dp
->DP
|= DP_PORT_WIDTH_1
;
853 intel_dp
->DP
|= DP_PORT_WIDTH_2
;
856 intel_dp
->DP
|= DP_PORT_WIDTH_4
;
859 if (intel_dp
->has_audio
) {
860 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
861 pipe_name(intel_crtc
->pipe
));
862 intel_dp
->DP
|= DP_AUDIO_OUTPUT_ENABLE
;
863 intel_write_eld(encoder
, adjusted_mode
);
865 memset(intel_dp
->link_configuration
, 0, DP_LINK_CONFIGURATION_SIZE
);
866 intel_dp
->link_configuration
[0] = intel_dp
->link_bw
;
867 intel_dp
->link_configuration
[1] = intel_dp
->lane_count
;
868 intel_dp
->link_configuration
[8] = DP_SET_ANSI_8B10B
;
870 * Check for DPCD version > 1.1 and enhanced framing support
872 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
873 (intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & DP_ENHANCED_FRAME_CAP
)) {
874 intel_dp
->link_configuration
[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
877 /* Split out the IBX/CPU vs CPT settings */
879 if (is_cpu_edp(intel_dp
) && IS_GEN7(dev
)) {
880 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
881 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
882 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
883 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
884 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
886 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
887 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
889 intel_dp
->DP
|= intel_crtc
->pipe
<< 29;
891 /* don't miss out required setting for eDP */
892 intel_dp
->DP
|= DP_PLL_ENABLE
;
893 if (adjusted_mode
->clock
< 200000)
894 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
896 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
897 } else if (!HAS_PCH_CPT(dev
) || is_cpu_edp(intel_dp
)) {
898 intel_dp
->DP
|= intel_dp
->color_range
;
900 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
901 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
902 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
903 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
904 intel_dp
->DP
|= DP_LINK_TRAIN_OFF
;
906 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
907 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
909 if (intel_crtc
->pipe
== 1)
910 intel_dp
->DP
|= DP_PIPEB_SELECT
;
912 if (is_cpu_edp(intel_dp
)) {
913 /* don't miss out required setting for eDP */
914 intel_dp
->DP
|= DP_PLL_ENABLE
;
915 if (adjusted_mode
->clock
< 200000)
916 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
918 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
921 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
925 #define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
926 #define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
928 #define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
929 #define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
931 #define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
932 #define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
934 static void ironlake_wait_panel_status(struct intel_dp
*intel_dp
,
938 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
939 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
941 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
943 I915_READ(PCH_PP_STATUS
),
944 I915_READ(PCH_PP_CONTROL
));
946 if (_wait_for((I915_READ(PCH_PP_STATUS
) & mask
) == value
, 5000, 10)) {
947 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
948 I915_READ(PCH_PP_STATUS
),
949 I915_READ(PCH_PP_CONTROL
));
953 static void ironlake_wait_panel_on(struct intel_dp
*intel_dp
)
955 DRM_DEBUG_KMS("Wait for panel power on\n");
956 ironlake_wait_panel_status(intel_dp
, IDLE_ON_MASK
, IDLE_ON_VALUE
);
959 static void ironlake_wait_panel_off(struct intel_dp
*intel_dp
)
961 DRM_DEBUG_KMS("Wait for panel power off time\n");
962 ironlake_wait_panel_status(intel_dp
, IDLE_OFF_MASK
, IDLE_OFF_VALUE
);
965 static void ironlake_wait_panel_power_cycle(struct intel_dp
*intel_dp
)
967 DRM_DEBUG_KMS("Wait for panel power cycle\n");
968 ironlake_wait_panel_status(intel_dp
, IDLE_CYCLE_MASK
, IDLE_CYCLE_VALUE
);
972 /* Read the current pp_control value, unlocking the register if it
976 static u32
ironlake_get_pp_control(struct drm_i915_private
*dev_priv
)
978 u32 control
= I915_READ(PCH_PP_CONTROL
);
980 control
&= ~PANEL_UNLOCK_MASK
;
981 control
|= PANEL_UNLOCK_REGS
;
985 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
)
987 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
988 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
991 if (!is_edp(intel_dp
))
993 DRM_DEBUG_KMS("Turn eDP VDD on\n");
995 WARN(intel_dp
->want_panel_vdd
,
996 "eDP VDD already requested on\n");
998 intel_dp
->want_panel_vdd
= true;
1000 if (ironlake_edp_have_panel_vdd(intel_dp
)) {
1001 DRM_DEBUG_KMS("eDP VDD already on\n");
1005 if (!ironlake_edp_have_panel_power(intel_dp
))
1006 ironlake_wait_panel_power_cycle(intel_dp
);
1008 pp
= ironlake_get_pp_control(dev_priv
);
1009 pp
|= EDP_FORCE_VDD
;
1010 I915_WRITE(PCH_PP_CONTROL
, pp
);
1011 POSTING_READ(PCH_PP_CONTROL
);
1012 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1013 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1016 * If the panel wasn't on, delay before accessing aux channel
1018 if (!ironlake_edp_have_panel_power(intel_dp
)) {
1019 DRM_DEBUG_KMS("eDP was not running\n");
1020 msleep(intel_dp
->panel_power_up_delay
);
1024 static void ironlake_panel_vdd_off_sync(struct intel_dp
*intel_dp
)
1026 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1027 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1030 if (!intel_dp
->want_panel_vdd
&& ironlake_edp_have_panel_vdd(intel_dp
)) {
1031 pp
= ironlake_get_pp_control(dev_priv
);
1032 pp
&= ~EDP_FORCE_VDD
;
1033 I915_WRITE(PCH_PP_CONTROL
, pp
);
1034 POSTING_READ(PCH_PP_CONTROL
);
1036 /* Make sure sequencer is idle before allowing subsequent activity */
1037 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1038 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1040 msleep(intel_dp
->panel_power_down_delay
);
1044 static void ironlake_panel_vdd_work(struct work_struct
*__work
)
1046 struct intel_dp
*intel_dp
= container_of(to_delayed_work(__work
),
1047 struct intel_dp
, panel_vdd_work
);
1048 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1050 mutex_lock(&dev
->mode_config
.mutex
);
1051 ironlake_panel_vdd_off_sync(intel_dp
);
1052 mutex_unlock(&dev
->mode_config
.mutex
);
1055 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
)
1057 if (!is_edp(intel_dp
))
1060 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp
->want_panel_vdd
);
1061 WARN(!intel_dp
->want_panel_vdd
, "eDP VDD not forced on");
1063 intel_dp
->want_panel_vdd
= false;
1066 ironlake_panel_vdd_off_sync(intel_dp
);
1069 * Queue the timer to fire a long
1070 * time from now (relative to the power down delay)
1071 * to keep the panel power up across a sequence of operations
1073 schedule_delayed_work(&intel_dp
->panel_vdd_work
,
1074 msecs_to_jiffies(intel_dp
->panel_power_cycle_delay
* 5));
1078 static void ironlake_edp_panel_on(struct intel_dp
*intel_dp
)
1080 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1081 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1084 if (!is_edp(intel_dp
))
1087 DRM_DEBUG_KMS("Turn eDP power on\n");
1089 if (ironlake_edp_have_panel_power(intel_dp
)) {
1090 DRM_DEBUG_KMS("eDP power already on\n");
1094 ironlake_wait_panel_power_cycle(intel_dp
);
1096 pp
= ironlake_get_pp_control(dev_priv
);
1098 /* ILK workaround: disable reset around power sequence */
1099 pp
&= ~PANEL_POWER_RESET
;
1100 I915_WRITE(PCH_PP_CONTROL
, pp
);
1101 POSTING_READ(PCH_PP_CONTROL
);
1104 pp
|= POWER_TARGET_ON
;
1106 pp
|= PANEL_POWER_RESET
;
1108 I915_WRITE(PCH_PP_CONTROL
, pp
);
1109 POSTING_READ(PCH_PP_CONTROL
);
1111 ironlake_wait_panel_on(intel_dp
);
1114 pp
|= PANEL_POWER_RESET
; /* restore panel reset bit */
1115 I915_WRITE(PCH_PP_CONTROL
, pp
);
1116 POSTING_READ(PCH_PP_CONTROL
);
1120 static void ironlake_edp_panel_off(struct intel_dp
*intel_dp
)
1122 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1123 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1126 if (!is_edp(intel_dp
))
1129 DRM_DEBUG_KMS("Turn eDP power off\n");
1131 WARN(intel_dp
->want_panel_vdd
, "Cannot turn power off while VDD is on\n");
1133 pp
= ironlake_get_pp_control(dev_priv
);
1134 pp
&= ~(POWER_TARGET_ON
| EDP_FORCE_VDD
| PANEL_POWER_RESET
| EDP_BLC_ENABLE
);
1135 I915_WRITE(PCH_PP_CONTROL
, pp
);
1136 POSTING_READ(PCH_PP_CONTROL
);
1138 ironlake_wait_panel_off(intel_dp
);
1141 static void ironlake_edp_backlight_on(struct intel_dp
*intel_dp
)
1143 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1144 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1147 if (!is_edp(intel_dp
))
1150 DRM_DEBUG_KMS("\n");
1152 * If we enable the backlight right away following a panel power
1153 * on, we may see slight flicker as the panel syncs with the eDP
1154 * link. So delay a bit to make sure the image is solid before
1155 * allowing it to appear.
1157 msleep(intel_dp
->backlight_on_delay
);
1158 pp
= ironlake_get_pp_control(dev_priv
);
1159 pp
|= EDP_BLC_ENABLE
;
1160 I915_WRITE(PCH_PP_CONTROL
, pp
);
1161 POSTING_READ(PCH_PP_CONTROL
);
1164 static void ironlake_edp_backlight_off(struct intel_dp
*intel_dp
)
1166 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1167 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1170 if (!is_edp(intel_dp
))
1173 DRM_DEBUG_KMS("\n");
1174 pp
= ironlake_get_pp_control(dev_priv
);
1175 pp
&= ~EDP_BLC_ENABLE
;
1176 I915_WRITE(PCH_PP_CONTROL
, pp
);
1177 POSTING_READ(PCH_PP_CONTROL
);
1178 msleep(intel_dp
->backlight_off_delay
);
1181 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
)
1183 struct drm_device
*dev
= encoder
->dev
;
1184 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1187 DRM_DEBUG_KMS("\n");
1188 dpa_ctl
= I915_READ(DP_A
);
1189 dpa_ctl
|= DP_PLL_ENABLE
;
1190 I915_WRITE(DP_A
, dpa_ctl
);
1195 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
)
1197 struct drm_device
*dev
= encoder
->dev
;
1198 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1201 dpa_ctl
= I915_READ(DP_A
);
1202 dpa_ctl
&= ~DP_PLL_ENABLE
;
1203 I915_WRITE(DP_A
, dpa_ctl
);
1208 /* If the sink supports it, try to set the power state appropriately */
1209 static void intel_dp_sink_dpms(struct intel_dp
*intel_dp
, int mode
)
1213 /* Should have a valid DPCD by this point */
1214 if (intel_dp
->dpcd
[DP_DPCD_REV
] < 0x11)
1217 if (mode
!= DRM_MODE_DPMS_ON
) {
1218 ret
= intel_dp_aux_native_write_1(intel_dp
, DP_SET_POWER
,
1221 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1224 * When turning on, we need to retry for 1ms to give the sink
1227 for (i
= 0; i
< 3; i
++) {
1228 ret
= intel_dp_aux_native_write_1(intel_dp
,
1238 static void intel_dp_prepare(struct drm_encoder
*encoder
)
1240 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1242 ironlake_edp_backlight_off(intel_dp
);
1243 ironlake_edp_panel_off(intel_dp
);
1245 /* Wake up the sink first */
1246 ironlake_edp_panel_vdd_on(intel_dp
);
1247 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1248 intel_dp_link_down(intel_dp
);
1249 ironlake_edp_panel_vdd_off(intel_dp
, false);
1251 /* Make sure the panel is off before trying to
1256 static void intel_dp_commit(struct drm_encoder
*encoder
)
1258 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1259 struct drm_device
*dev
= encoder
->dev
;
1260 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1262 ironlake_edp_panel_vdd_on(intel_dp
);
1263 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1264 intel_dp_start_link_train(intel_dp
);
1265 ironlake_edp_panel_on(intel_dp
);
1266 ironlake_edp_panel_vdd_off(intel_dp
, true);
1267 intel_dp_complete_link_train(intel_dp
);
1268 ironlake_edp_backlight_on(intel_dp
);
1270 intel_dp
->dpms_mode
= DRM_MODE_DPMS_ON
;
1272 if (HAS_PCH_CPT(dev
))
1273 intel_cpt_verify_modeset(dev
, intel_crtc
->pipe
);
1277 intel_dp_dpms(struct drm_encoder
*encoder
, int mode
)
1279 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1280 struct drm_device
*dev
= encoder
->dev
;
1281 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1282 uint32_t dp_reg
= I915_READ(intel_dp
->output_reg
);
1284 if (mode
!= DRM_MODE_DPMS_ON
) {
1285 ironlake_edp_backlight_off(intel_dp
);
1286 ironlake_edp_panel_off(intel_dp
);
1288 ironlake_edp_panel_vdd_on(intel_dp
);
1289 intel_dp_sink_dpms(intel_dp
, mode
);
1290 intel_dp_link_down(intel_dp
);
1291 ironlake_edp_panel_vdd_off(intel_dp
, false);
1293 if (is_cpu_edp(intel_dp
))
1294 ironlake_edp_pll_off(encoder
);
1296 if (is_cpu_edp(intel_dp
))
1297 ironlake_edp_pll_on(encoder
);
1299 ironlake_edp_panel_vdd_on(intel_dp
);
1300 intel_dp_sink_dpms(intel_dp
, mode
);
1301 if (!(dp_reg
& DP_PORT_EN
)) {
1302 intel_dp_start_link_train(intel_dp
);
1303 ironlake_edp_panel_on(intel_dp
);
1304 ironlake_edp_panel_vdd_off(intel_dp
, true);
1305 intel_dp_complete_link_train(intel_dp
);
1307 ironlake_edp_panel_vdd_off(intel_dp
, false);
1308 ironlake_edp_backlight_on(intel_dp
);
1310 intel_dp
->dpms_mode
= mode
;
1314 * Native read with retry for link status and receiver capability reads for
1315 * cases where the sink may still be asleep.
1318 intel_dp_aux_native_read_retry(struct intel_dp
*intel_dp
, uint16_t address
,
1319 uint8_t *recv
, int recv_bytes
)
1324 * Sinks are *supposed* to come up within 1ms from an off state,
1325 * but we're also supposed to retry 3 times per the spec.
1327 for (i
= 0; i
< 3; i
++) {
1328 ret
= intel_dp_aux_native_read(intel_dp
, address
, recv
,
1330 if (ret
== recv_bytes
)
1339 * Fetch AUX CH registers 0x202 - 0x207 which contain
1340 * link status information
1343 intel_dp_get_link_status(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1345 return intel_dp_aux_native_read_retry(intel_dp
,
1348 DP_LINK_STATUS_SIZE
);
1352 intel_dp_link_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1355 return link_status
[r
- DP_LANE0_1_STATUS
];
1359 intel_get_adjust_request_voltage(uint8_t adjust_request
[2],
1362 int s
= ((lane
& 1) ?
1363 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT
:
1364 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT
);
1365 uint8_t l
= adjust_request
[lane
>>1];
1367 return ((l
>> s
) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT
;
1371 intel_get_adjust_request_pre_emphasis(uint8_t adjust_request
[2],
1374 int s
= ((lane
& 1) ?
1375 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT
:
1376 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT
);
1377 uint8_t l
= adjust_request
[lane
>>1];
1379 return ((l
>> s
) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT
;
1384 static char *voltage_names
[] = {
1385 "0.4V", "0.6V", "0.8V", "1.2V"
1387 static char *pre_emph_names
[] = {
1388 "0dB", "3.5dB", "6dB", "9.5dB"
1390 static char *link_train_names
[] = {
1391 "pattern 1", "pattern 2", "idle", "off"
1396 * These are source-specific values; current Intel hardware supports
1397 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1401 intel_dp_voltage_max(struct intel_dp
*intel_dp
)
1403 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1405 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
))
1406 return DP_TRAIN_VOLTAGE_SWING_800
;
1407 else if (HAS_PCH_CPT(dev
) && !is_cpu_edp(intel_dp
))
1408 return DP_TRAIN_VOLTAGE_SWING_1200
;
1410 return DP_TRAIN_VOLTAGE_SWING_800
;
1414 intel_dp_pre_emphasis_max(struct intel_dp
*intel_dp
, uint8_t voltage_swing
)
1416 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1418 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1419 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1420 case DP_TRAIN_VOLTAGE_SWING_400
:
1421 return DP_TRAIN_PRE_EMPHASIS_6
;
1422 case DP_TRAIN_VOLTAGE_SWING_600
:
1423 case DP_TRAIN_VOLTAGE_SWING_800
:
1424 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1426 return DP_TRAIN_PRE_EMPHASIS_0
;
1429 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1430 case DP_TRAIN_VOLTAGE_SWING_400
:
1431 return DP_TRAIN_PRE_EMPHASIS_6
;
1432 case DP_TRAIN_VOLTAGE_SWING_600
:
1433 return DP_TRAIN_PRE_EMPHASIS_6
;
1434 case DP_TRAIN_VOLTAGE_SWING_800
:
1435 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1436 case DP_TRAIN_VOLTAGE_SWING_1200
:
1438 return DP_TRAIN_PRE_EMPHASIS_0
;
1444 intel_get_adjust_train(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1449 uint8_t *adjust_request
= link_status
+ (DP_ADJUST_REQUEST_LANE0_1
- DP_LANE0_1_STATUS
);
1450 uint8_t voltage_max
;
1451 uint8_t preemph_max
;
1453 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1454 uint8_t this_v
= intel_get_adjust_request_voltage(adjust_request
, lane
);
1455 uint8_t this_p
= intel_get_adjust_request_pre_emphasis(adjust_request
, lane
);
1463 voltage_max
= intel_dp_voltage_max(intel_dp
);
1464 if (v
>= voltage_max
)
1465 v
= voltage_max
| DP_TRAIN_MAX_SWING_REACHED
;
1467 preemph_max
= intel_dp_pre_emphasis_max(intel_dp
, v
);
1468 if (p
>= preemph_max
)
1469 p
= preemph_max
| DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
1471 for (lane
= 0; lane
< 4; lane
++)
1472 intel_dp
->train_set
[lane
] = v
| p
;
1476 intel_dp_signal_levels(uint8_t train_set
)
1478 uint32_t signal_levels
= 0;
1480 switch (train_set
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1481 case DP_TRAIN_VOLTAGE_SWING_400
:
1483 signal_levels
|= DP_VOLTAGE_0_4
;
1485 case DP_TRAIN_VOLTAGE_SWING_600
:
1486 signal_levels
|= DP_VOLTAGE_0_6
;
1488 case DP_TRAIN_VOLTAGE_SWING_800
:
1489 signal_levels
|= DP_VOLTAGE_0_8
;
1491 case DP_TRAIN_VOLTAGE_SWING_1200
:
1492 signal_levels
|= DP_VOLTAGE_1_2
;
1495 switch (train_set
& DP_TRAIN_PRE_EMPHASIS_MASK
) {
1496 case DP_TRAIN_PRE_EMPHASIS_0
:
1498 signal_levels
|= DP_PRE_EMPHASIS_0
;
1500 case DP_TRAIN_PRE_EMPHASIS_3_5
:
1501 signal_levels
|= DP_PRE_EMPHASIS_3_5
;
1503 case DP_TRAIN_PRE_EMPHASIS_6
:
1504 signal_levels
|= DP_PRE_EMPHASIS_6
;
1506 case DP_TRAIN_PRE_EMPHASIS_9_5
:
1507 signal_levels
|= DP_PRE_EMPHASIS_9_5
;
1510 return signal_levels
;
1513 /* Gen6's DP voltage swing and pre-emphasis control */
1515 intel_gen6_edp_signal_levels(uint8_t train_set
)
1517 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1518 DP_TRAIN_PRE_EMPHASIS_MASK
);
1519 switch (signal_levels
) {
1520 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1521 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1522 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1523 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1524 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B
;
1525 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1526 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_6
:
1527 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B
;
1528 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1529 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1530 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B
;
1531 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1532 case DP_TRAIN_VOLTAGE_SWING_1200
| DP_TRAIN_PRE_EMPHASIS_0
:
1533 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B
;
1535 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1536 "0x%x\n", signal_levels
);
1537 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1541 /* Gen7's DP voltage swing and pre-emphasis control */
1543 intel_gen7_edp_signal_levels(uint8_t train_set
)
1545 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1546 DP_TRAIN_PRE_EMPHASIS_MASK
);
1547 switch (signal_levels
) {
1548 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1549 return EDP_LINK_TRAIN_400MV_0DB_IVB
;
1550 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1551 return EDP_LINK_TRAIN_400MV_3_5DB_IVB
;
1552 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1553 return EDP_LINK_TRAIN_400MV_6DB_IVB
;
1555 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1556 return EDP_LINK_TRAIN_600MV_0DB_IVB
;
1557 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1558 return EDP_LINK_TRAIN_600MV_3_5DB_IVB
;
1560 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1561 return EDP_LINK_TRAIN_800MV_0DB_IVB
;
1562 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1563 return EDP_LINK_TRAIN_800MV_3_5DB_IVB
;
1566 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1567 "0x%x\n", signal_levels
);
1568 return EDP_LINK_TRAIN_500MV_0DB_IVB
;
1573 intel_get_lane_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1576 int s
= (lane
& 1) * 4;
1577 uint8_t l
= link_status
[lane
>>1];
1579 return (l
>> s
) & 0xf;
1582 /* Check for clock recovery is done on all channels */
1584 intel_clock_recovery_ok(uint8_t link_status
[DP_LINK_STATUS_SIZE
], int lane_count
)
1587 uint8_t lane_status
;
1589 for (lane
= 0; lane
< lane_count
; lane
++) {
1590 lane_status
= intel_get_lane_status(link_status
, lane
);
1591 if ((lane_status
& DP_LANE_CR_DONE
) == 0)
1597 /* Check to see if channel eq is done on all channels */
1598 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1599 DP_LANE_CHANNEL_EQ_DONE|\
1600 DP_LANE_SYMBOL_LOCKED)
1602 intel_channel_eq_ok(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1605 uint8_t lane_status
;
1608 lane_align
= intel_dp_link_status(link_status
,
1609 DP_LANE_ALIGN_STATUS_UPDATED
);
1610 if ((lane_align
& DP_INTERLANE_ALIGN_DONE
) == 0)
1612 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1613 lane_status
= intel_get_lane_status(link_status
, lane
);
1614 if ((lane_status
& CHANNEL_EQ_BITS
) != CHANNEL_EQ_BITS
)
1621 intel_dp_set_link_train(struct intel_dp
*intel_dp
,
1622 uint32_t dp_reg_value
,
1623 uint8_t dp_train_pat
)
1625 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1626 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1629 I915_WRITE(intel_dp
->output_reg
, dp_reg_value
);
1630 POSTING_READ(intel_dp
->output_reg
);
1632 intel_dp_aux_native_write_1(intel_dp
,
1633 DP_TRAINING_PATTERN_SET
,
1636 ret
= intel_dp_aux_native_write(intel_dp
,
1637 DP_TRAINING_LANE0_SET
,
1638 intel_dp
->train_set
,
1639 intel_dp
->lane_count
);
1640 if (ret
!= intel_dp
->lane_count
)
1646 /* Enable corresponding port and start training pattern 1 */
1648 intel_dp_start_link_train(struct intel_dp
*intel_dp
)
1650 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1651 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1652 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1655 bool clock_recovery
= false;
1656 int voltage_tries
, loop_tries
;
1658 uint32_t DP
= intel_dp
->DP
;
1661 * On CPT we have to enable the port in training pattern 1, which
1662 * will happen below in intel_dp_set_link_train. Otherwise, enable
1663 * the port and wait for it to become active.
1665 if (!HAS_PCH_CPT(dev
)) {
1666 I915_WRITE(intel_dp
->output_reg
, intel_dp
->DP
);
1667 POSTING_READ(intel_dp
->output_reg
);
1668 intel_wait_for_vblank(dev
, intel_crtc
->pipe
);
1671 /* Write the link configuration data */
1672 intel_dp_aux_native_write(intel_dp
, DP_LINK_BW_SET
,
1673 intel_dp
->link_configuration
,
1674 DP_LINK_CONFIGURATION_SIZE
);
1678 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1679 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1681 DP
&= ~DP_LINK_TRAIN_MASK
;
1682 memset(intel_dp
->train_set
, 0, 4);
1686 clock_recovery
= false;
1688 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1689 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1690 uint32_t signal_levels
;
1693 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1694 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1695 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1696 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1697 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1698 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1700 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1701 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels
);
1702 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1705 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1706 reg
= DP
| DP_LINK_TRAIN_PAT_1_CPT
;
1708 reg
= DP
| DP_LINK_TRAIN_PAT_1
;
1710 if (!intel_dp_set_link_train(intel_dp
, reg
,
1711 DP_TRAINING_PATTERN_1
|
1712 DP_LINK_SCRAMBLING_DISABLE
))
1714 /* Set training pattern 1 */
1717 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
1718 DRM_ERROR("failed to get link status\n");
1722 if (intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1723 DRM_DEBUG_KMS("clock recovery OK\n");
1724 clock_recovery
= true;
1728 /* Check to see if we've tried the max voltage */
1729 for (i
= 0; i
< intel_dp
->lane_count
; i
++)
1730 if ((intel_dp
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
1732 if (i
== intel_dp
->lane_count
) {
1734 if (loop_tries
== 5) {
1735 DRM_DEBUG_KMS("too many full retries, give up\n");
1738 memset(intel_dp
->train_set
, 0, 4);
1743 /* Check to see if we've tried the same voltage 5 times */
1744 if ((intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
1746 if (voltage_tries
== 5) {
1747 DRM_DEBUG_KMS("too many voltage retries, give up\n");
1752 voltage
= intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
1754 /* Compute new intel_dp->train_set as requested by target */
1755 intel_get_adjust_train(intel_dp
, link_status
);
1762 intel_dp_complete_link_train(struct intel_dp
*intel_dp
)
1764 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1765 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1766 bool channel_eq
= false;
1767 int tries
, cr_tries
;
1769 uint32_t DP
= intel_dp
->DP
;
1771 /* channel equalization */
1776 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1777 uint32_t signal_levels
;
1778 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1781 DRM_ERROR("failed to train DP, aborting\n");
1782 intel_dp_link_down(intel_dp
);
1786 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1787 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1788 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1789 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1790 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1791 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1793 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1794 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1797 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1798 reg
= DP
| DP_LINK_TRAIN_PAT_2_CPT
;
1800 reg
= DP
| DP_LINK_TRAIN_PAT_2
;
1802 /* channel eq pattern */
1803 if (!intel_dp_set_link_train(intel_dp
, reg
,
1804 DP_TRAINING_PATTERN_2
|
1805 DP_LINK_SCRAMBLING_DISABLE
))
1809 if (!intel_dp_get_link_status(intel_dp
, link_status
))
1812 /* Make sure clock is still ok */
1813 if (!intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1814 intel_dp_start_link_train(intel_dp
);
1819 if (intel_channel_eq_ok(intel_dp
, link_status
)) {
1824 /* Try 5 times, then try clock recovery if that fails */
1826 intel_dp_link_down(intel_dp
);
1827 intel_dp_start_link_train(intel_dp
);
1833 /* Compute new intel_dp->train_set as requested by target */
1834 intel_get_adjust_train(intel_dp
, link_status
);
1838 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1839 reg
= DP
| DP_LINK_TRAIN_OFF_CPT
;
1841 reg
= DP
| DP_LINK_TRAIN_OFF
;
1843 I915_WRITE(intel_dp
->output_reg
, reg
);
1844 POSTING_READ(intel_dp
->output_reg
);
1845 intel_dp_aux_native_write_1(intel_dp
,
1846 DP_TRAINING_PATTERN_SET
, DP_TRAINING_PATTERN_DISABLE
);
1850 intel_dp_link_down(struct intel_dp
*intel_dp
)
1852 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1853 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1854 uint32_t DP
= intel_dp
->DP
;
1856 if ((I915_READ(intel_dp
->output_reg
) & DP_PORT_EN
) == 0)
1859 DRM_DEBUG_KMS("\n");
1861 if (is_edp(intel_dp
)) {
1862 DP
&= ~DP_PLL_ENABLE
;
1863 I915_WRITE(intel_dp
->output_reg
, DP
);
1864 POSTING_READ(intel_dp
->output_reg
);
1868 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
))) {
1869 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1870 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE_CPT
);
1872 DP
&= ~DP_LINK_TRAIN_MASK
;
1873 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE
);
1875 POSTING_READ(intel_dp
->output_reg
);
1879 if (is_edp(intel_dp
)) {
1880 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1881 DP
|= DP_LINK_TRAIN_OFF_CPT
;
1883 DP
|= DP_LINK_TRAIN_OFF
;
1886 if (!HAS_PCH_CPT(dev
) &&
1887 I915_READ(intel_dp
->output_reg
) & DP_PIPEB_SELECT
) {
1888 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
1890 /* Hardware workaround: leaving our transcoder select
1891 * set to transcoder B while it's off will prevent the
1892 * corresponding HDMI output on transcoder A.
1894 * Combine this with another hardware workaround:
1895 * transcoder select bit can only be cleared while the
1898 DP
&= ~DP_PIPEB_SELECT
;
1899 I915_WRITE(intel_dp
->output_reg
, DP
);
1901 /* Changes to enable or select take place the vblank
1902 * after being written.
1905 /* We can arrive here never having been attached
1906 * to a CRTC, for instance, due to inheriting
1907 * random state from the BIOS.
1909 * If the pipe is not running, play safe and
1910 * wait for the clocks to stabilise before
1913 POSTING_READ(intel_dp
->output_reg
);
1916 intel_wait_for_vblank(dev
, to_intel_crtc(crtc
)->pipe
);
1919 DP
&= ~DP_AUDIO_OUTPUT_ENABLE
;
1920 I915_WRITE(intel_dp
->output_reg
, DP
& ~DP_PORT_EN
);
1921 POSTING_READ(intel_dp
->output_reg
);
1922 msleep(intel_dp
->panel_power_down_delay
);
1926 intel_dp_get_dpcd(struct intel_dp
*intel_dp
)
1928 if (intel_dp_aux_native_read_retry(intel_dp
, 0x000, intel_dp
->dpcd
,
1929 sizeof(intel_dp
->dpcd
)) &&
1930 (intel_dp
->dpcd
[DP_DPCD_REV
] != 0)) {
1938 intel_dp_get_sink_irq(struct intel_dp
*intel_dp
, u8
*sink_irq_vector
)
1942 ret
= intel_dp_aux_native_read_retry(intel_dp
,
1943 DP_DEVICE_SERVICE_IRQ_VECTOR
,
1944 sink_irq_vector
, 1);
1952 intel_dp_handle_test_request(struct intel_dp
*intel_dp
)
1954 /* NAK by default */
1955 intel_dp_aux_native_write_1(intel_dp
, DP_TEST_RESPONSE
, DP_TEST_ACK
);
1959 * According to DP spec
1962 * 2. Configure link according to Receiver Capabilities
1963 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
1964 * 4. Check link status on receipt of hot-plug interrupt
1968 intel_dp_check_link_status(struct intel_dp
*intel_dp
)
1971 u8 link_status
[DP_LINK_STATUS_SIZE
];
1973 if (intel_dp
->dpms_mode
!= DRM_MODE_DPMS_ON
)
1976 if (!intel_dp
->base
.base
.crtc
)
1979 /* Try to read receiver status if the link appears to be up */
1980 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
1981 intel_dp_link_down(intel_dp
);
1985 /* Now read the DPCD to see if it's actually running */
1986 if (!intel_dp_get_dpcd(intel_dp
)) {
1987 intel_dp_link_down(intel_dp
);
1991 /* Try to read the source of the interrupt */
1992 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
1993 intel_dp_get_sink_irq(intel_dp
, &sink_irq_vector
)) {
1994 /* Clear interrupt source */
1995 intel_dp_aux_native_write_1(intel_dp
,
1996 DP_DEVICE_SERVICE_IRQ_VECTOR
,
1999 if (sink_irq_vector
& DP_AUTOMATED_TEST_REQUEST
)
2000 intel_dp_handle_test_request(intel_dp
);
2001 if (sink_irq_vector
& (DP_CP_IRQ
| DP_SINK_SPECIFIC_IRQ
))
2002 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
2005 if (!intel_channel_eq_ok(intel_dp
, link_status
)) {
2006 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2007 drm_get_encoder_name(&intel_dp
->base
.base
));
2008 intel_dp_start_link_train(intel_dp
);
2009 intel_dp_complete_link_train(intel_dp
);
2013 static enum drm_connector_status
2014 intel_dp_detect_dpcd(struct intel_dp
*intel_dp
)
2016 if (intel_dp_get_dpcd(intel_dp
))
2017 return connector_status_connected
;
2018 return connector_status_disconnected
;
2021 static enum drm_connector_status
2022 ironlake_dp_detect(struct intel_dp
*intel_dp
)
2024 enum drm_connector_status status
;
2026 /* Can't disconnect eDP, but you can close the lid... */
2027 if (is_edp(intel_dp
)) {
2028 status
= intel_panel_detect(intel_dp
->base
.base
.dev
);
2029 if (status
== connector_status_unknown
)
2030 status
= connector_status_connected
;
2034 return intel_dp_detect_dpcd(intel_dp
);
2037 static enum drm_connector_status
2038 g4x_dp_detect(struct intel_dp
*intel_dp
)
2040 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2041 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2044 switch (intel_dp
->output_reg
) {
2046 bit
= DPB_HOTPLUG_INT_STATUS
;
2049 bit
= DPC_HOTPLUG_INT_STATUS
;
2052 bit
= DPD_HOTPLUG_INT_STATUS
;
2055 return connector_status_unknown
;
2058 temp
= I915_READ(PORT_HOTPLUG_STAT
);
2060 if ((temp
& bit
) == 0)
2061 return connector_status_disconnected
;
2063 return intel_dp_detect_dpcd(intel_dp
);
2066 static struct edid
*
2067 intel_dp_get_edid(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2069 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2072 ironlake_edp_panel_vdd_on(intel_dp
);
2073 edid
= drm_get_edid(connector
, adapter
);
2074 ironlake_edp_panel_vdd_off(intel_dp
, false);
2079 intel_dp_get_edid_modes(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2081 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2084 ironlake_edp_panel_vdd_on(intel_dp
);
2085 ret
= intel_ddc_get_modes(connector
, adapter
);
2086 ironlake_edp_panel_vdd_off(intel_dp
, false);
2092 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2094 * \return true if DP port is connected.
2095 * \return false if DP port is disconnected.
2097 static enum drm_connector_status
2098 intel_dp_detect(struct drm_connector
*connector
, bool force
)
2100 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2101 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2102 enum drm_connector_status status
;
2103 struct edid
*edid
= NULL
;
2105 intel_dp
->has_audio
= false;
2107 if (HAS_PCH_SPLIT(dev
))
2108 status
= ironlake_dp_detect(intel_dp
);
2110 status
= g4x_dp_detect(intel_dp
);
2112 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
2113 intel_dp
->dpcd
[0], intel_dp
->dpcd
[1], intel_dp
->dpcd
[2],
2114 intel_dp
->dpcd
[3], intel_dp
->dpcd
[4], intel_dp
->dpcd
[5],
2115 intel_dp
->dpcd
[6], intel_dp
->dpcd
[7]);
2117 if (status
!= connector_status_connected
)
2120 if (intel_dp
->force_audio
) {
2121 intel_dp
->has_audio
= intel_dp
->force_audio
> 0;
2123 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2125 intel_dp
->has_audio
= drm_detect_monitor_audio(edid
);
2126 connector
->display_info
.raw_edid
= NULL
;
2131 return connector_status_connected
;
2134 static int intel_dp_get_modes(struct drm_connector
*connector
)
2136 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2137 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2138 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2141 /* We should parse the EDID data and find out if it has an audio sink
2144 ret
= intel_dp_get_edid_modes(connector
, &intel_dp
->adapter
);
2146 if (is_edp(intel_dp
) && !intel_dp
->panel_fixed_mode
) {
2147 struct drm_display_mode
*newmode
;
2148 list_for_each_entry(newmode
, &connector
->probed_modes
,
2150 if ((newmode
->type
& DRM_MODE_TYPE_PREFERRED
)) {
2151 intel_dp
->panel_fixed_mode
=
2152 drm_mode_duplicate(dev
, newmode
);
2160 /* if eDP has no EDID, try to use fixed panel mode from VBT */
2161 if (is_edp(intel_dp
)) {
2162 /* initialize panel mode from VBT if available for eDP */
2163 if (intel_dp
->panel_fixed_mode
== NULL
&& dev_priv
->lfp_lvds_vbt_mode
!= NULL
) {
2164 intel_dp
->panel_fixed_mode
=
2165 drm_mode_duplicate(dev
, dev_priv
->lfp_lvds_vbt_mode
);
2166 if (intel_dp
->panel_fixed_mode
) {
2167 intel_dp
->panel_fixed_mode
->type
|=
2168 DRM_MODE_TYPE_PREFERRED
;
2171 if (intel_dp
->panel_fixed_mode
) {
2172 struct drm_display_mode
*mode
;
2173 mode
= drm_mode_duplicate(dev
, intel_dp
->panel_fixed_mode
);
2174 drm_mode_probed_add(connector
, mode
);
2182 intel_dp_detect_audio(struct drm_connector
*connector
)
2184 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2186 bool has_audio
= false;
2188 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2190 has_audio
= drm_detect_monitor_audio(edid
);
2192 connector
->display_info
.raw_edid
= NULL
;
2200 intel_dp_set_property(struct drm_connector
*connector
,
2201 struct drm_property
*property
,
2204 struct drm_i915_private
*dev_priv
= connector
->dev
->dev_private
;
2205 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2208 ret
= drm_connector_property_set_value(connector
, property
, val
);
2212 if (property
== dev_priv
->force_audio_property
) {
2216 if (i
== intel_dp
->force_audio
)
2219 intel_dp
->force_audio
= i
;
2222 has_audio
= intel_dp_detect_audio(connector
);
2226 if (has_audio
== intel_dp
->has_audio
)
2229 intel_dp
->has_audio
= has_audio
;
2233 if (property
== dev_priv
->broadcast_rgb_property
) {
2234 if (val
== !!intel_dp
->color_range
)
2237 intel_dp
->color_range
= val
? DP_COLOR_RANGE_16_235
: 0;
2244 if (intel_dp
->base
.base
.crtc
) {
2245 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
2246 drm_crtc_helper_set_mode(crtc
, &crtc
->mode
,
2255 intel_dp_destroy(struct drm_connector
*connector
)
2257 struct drm_device
*dev
= connector
->dev
;
2259 if (intel_dpd_is_edp(dev
))
2260 intel_panel_destroy_backlight(dev
);
2262 drm_sysfs_connector_remove(connector
);
2263 drm_connector_cleanup(connector
);
2267 static void intel_dp_encoder_destroy(struct drm_encoder
*encoder
)
2269 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
2271 i2c_del_adapter(&intel_dp
->adapter
);
2272 drm_encoder_cleanup(encoder
);
2273 if (is_edp(intel_dp
)) {
2274 cancel_delayed_work_sync(&intel_dp
->panel_vdd_work
);
2275 ironlake_panel_vdd_off_sync(intel_dp
);
2280 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs
= {
2281 .dpms
= intel_dp_dpms
,
2282 .mode_fixup
= intel_dp_mode_fixup
,
2283 .prepare
= intel_dp_prepare
,
2284 .mode_set
= intel_dp_mode_set
,
2285 .commit
= intel_dp_commit
,
2288 static const struct drm_connector_funcs intel_dp_connector_funcs
= {
2289 .dpms
= drm_helper_connector_dpms
,
2290 .detect
= intel_dp_detect
,
2291 .fill_modes
= drm_helper_probe_single_connector_modes
,
2292 .set_property
= intel_dp_set_property
,
2293 .destroy
= intel_dp_destroy
,
2296 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs
= {
2297 .get_modes
= intel_dp_get_modes
,
2298 .mode_valid
= intel_dp_mode_valid
,
2299 .best_encoder
= intel_best_encoder
,
2302 static const struct drm_encoder_funcs intel_dp_enc_funcs
= {
2303 .destroy
= intel_dp_encoder_destroy
,
2307 intel_dp_hot_plug(struct intel_encoder
*intel_encoder
)
2309 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
2311 intel_dp_check_link_status(intel_dp
);
2314 /* Return which DP Port should be selected for Transcoder DP control */
2316 intel_trans_dp_port_sel(struct drm_crtc
*crtc
)
2318 struct drm_device
*dev
= crtc
->dev
;
2319 struct drm_mode_config
*mode_config
= &dev
->mode_config
;
2320 struct drm_encoder
*encoder
;
2322 list_for_each_entry(encoder
, &mode_config
->encoder_list
, head
) {
2323 struct intel_dp
*intel_dp
;
2325 if (encoder
->crtc
!= crtc
)
2328 intel_dp
= enc_to_intel_dp(encoder
);
2329 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
2330 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
2331 return intel_dp
->output_reg
;
2337 /* check the VBT to see whether the eDP is on DP-D port */
2338 bool intel_dpd_is_edp(struct drm_device
*dev
)
2340 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2341 struct child_device_config
*p_child
;
2344 if (!dev_priv
->child_dev_num
)
2347 for (i
= 0; i
< dev_priv
->child_dev_num
; i
++) {
2348 p_child
= dev_priv
->child_dev
+ i
;
2350 if (p_child
->dvo_port
== PORT_IDPD
&&
2351 p_child
->device_type
== DEVICE_TYPE_eDP
)
2358 intel_dp_add_properties(struct intel_dp
*intel_dp
, struct drm_connector
*connector
)
2360 intel_attach_force_audio_property(connector
);
2361 intel_attach_broadcast_rgb_property(connector
);
2365 intel_dp_init(struct drm_device
*dev
, int output_reg
)
2367 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2368 struct drm_connector
*connector
;
2369 struct intel_dp
*intel_dp
;
2370 struct intel_encoder
*intel_encoder
;
2371 struct intel_connector
*intel_connector
;
2372 const char *name
= NULL
;
2375 intel_dp
= kzalloc(sizeof(struct intel_dp
), GFP_KERNEL
);
2379 intel_dp
->output_reg
= output_reg
;
2380 intel_dp
->dpms_mode
= -1;
2382 intel_connector
= kzalloc(sizeof(struct intel_connector
), GFP_KERNEL
);
2383 if (!intel_connector
) {
2387 intel_encoder
= &intel_dp
->base
;
2389 if (HAS_PCH_SPLIT(dev
) && output_reg
== PCH_DP_D
)
2390 if (intel_dpd_is_edp(dev
))
2391 intel_dp
->is_pch_edp
= true;
2393 if (output_reg
== DP_A
|| is_pch_edp(intel_dp
)) {
2394 type
= DRM_MODE_CONNECTOR_eDP
;
2395 intel_encoder
->type
= INTEL_OUTPUT_EDP
;
2397 type
= DRM_MODE_CONNECTOR_DisplayPort
;
2398 intel_encoder
->type
= INTEL_OUTPUT_DISPLAYPORT
;
2401 connector
= &intel_connector
->base
;
2402 drm_connector_init(dev
, connector
, &intel_dp_connector_funcs
, type
);
2403 drm_connector_helper_add(connector
, &intel_dp_connector_helper_funcs
);
2405 connector
->polled
= DRM_CONNECTOR_POLL_HPD
;
2407 if (output_reg
== DP_B
|| output_reg
== PCH_DP_B
)
2408 intel_encoder
->clone_mask
= (1 << INTEL_DP_B_CLONE_BIT
);
2409 else if (output_reg
== DP_C
|| output_reg
== PCH_DP_C
)
2410 intel_encoder
->clone_mask
= (1 << INTEL_DP_C_CLONE_BIT
);
2411 else if (output_reg
== DP_D
|| output_reg
== PCH_DP_D
)
2412 intel_encoder
->clone_mask
= (1 << INTEL_DP_D_CLONE_BIT
);
2414 if (is_edp(intel_dp
)) {
2415 intel_encoder
->clone_mask
= (1 << INTEL_EDP_CLONE_BIT
);
2416 INIT_DELAYED_WORK(&intel_dp
->panel_vdd_work
,
2417 ironlake_panel_vdd_work
);
2420 intel_encoder
->crtc_mask
= (1 << 0) | (1 << 1) | (1 << 2);
2421 connector
->interlace_allowed
= true;
2422 connector
->doublescan_allowed
= 0;
2424 drm_encoder_init(dev
, &intel_encoder
->base
, &intel_dp_enc_funcs
,
2425 DRM_MODE_ENCODER_TMDS
);
2426 drm_encoder_helper_add(&intel_encoder
->base
, &intel_dp_helper_funcs
);
2428 intel_connector_attach_encoder(intel_connector
, intel_encoder
);
2429 drm_sysfs_connector_add(connector
);
2431 /* Set up the DDC bus. */
2432 switch (output_reg
) {
2438 dev_priv
->hotplug_supported_mask
|=
2439 HDMIB_HOTPLUG_INT_STATUS
;
2444 dev_priv
->hotplug_supported_mask
|=
2445 HDMIC_HOTPLUG_INT_STATUS
;
2450 dev_priv
->hotplug_supported_mask
|=
2451 HDMID_HOTPLUG_INT_STATUS
;
2456 /* Cache some DPCD data in the eDP case */
2457 if (is_edp(intel_dp
)) {
2459 struct edp_power_seq cur
, vbt
;
2460 u32 pp_on
, pp_off
, pp_div
;
2462 pp_on
= I915_READ(PCH_PP_ON_DELAYS
);
2463 pp_off
= I915_READ(PCH_PP_OFF_DELAYS
);
2464 pp_div
= I915_READ(PCH_PP_DIVISOR
);
2466 /* Pull timing values out of registers */
2467 cur
.t1_t3
= (pp_on
& PANEL_POWER_UP_DELAY_MASK
) >>
2468 PANEL_POWER_UP_DELAY_SHIFT
;
2470 cur
.t8
= (pp_on
& PANEL_LIGHT_ON_DELAY_MASK
) >>
2471 PANEL_LIGHT_ON_DELAY_SHIFT
;
2473 cur
.t9
= (pp_off
& PANEL_LIGHT_OFF_DELAY_MASK
) >>
2474 PANEL_LIGHT_OFF_DELAY_SHIFT
;
2476 cur
.t10
= (pp_off
& PANEL_POWER_DOWN_DELAY_MASK
) >>
2477 PANEL_POWER_DOWN_DELAY_SHIFT
;
2479 cur
.t11_t12
= ((pp_div
& PANEL_POWER_CYCLE_DELAY_MASK
) >>
2480 PANEL_POWER_CYCLE_DELAY_SHIFT
) * 1000;
2482 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2483 cur
.t1_t3
, cur
.t8
, cur
.t9
, cur
.t10
, cur
.t11_t12
);
2485 vbt
= dev_priv
->edp
.pps
;
2487 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2488 vbt
.t1_t3
, vbt
.t8
, vbt
.t9
, vbt
.t10
, vbt
.t11_t12
);
2490 #define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10)
2492 intel_dp
->panel_power_up_delay
= get_delay(t1_t3
);
2493 intel_dp
->backlight_on_delay
= get_delay(t8
);
2494 intel_dp
->backlight_off_delay
= get_delay(t9
);
2495 intel_dp
->panel_power_down_delay
= get_delay(t10
);
2496 intel_dp
->panel_power_cycle_delay
= get_delay(t11_t12
);
2498 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2499 intel_dp
->panel_power_up_delay
, intel_dp
->panel_power_down_delay
,
2500 intel_dp
->panel_power_cycle_delay
);
2502 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2503 intel_dp
->backlight_on_delay
, intel_dp
->backlight_off_delay
);
2505 ironlake_edp_panel_vdd_on(intel_dp
);
2506 ret
= intel_dp_get_dpcd(intel_dp
);
2507 ironlake_edp_panel_vdd_off(intel_dp
, false);
2510 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11)
2511 dev_priv
->no_aux_handshake
=
2512 intel_dp
->dpcd
[DP_MAX_DOWNSPREAD
] &
2513 DP_NO_AUX_HANDSHAKE_LINK_TRAINING
;
2515 /* if this fails, presume the device is a ghost */
2516 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2517 intel_dp_encoder_destroy(&intel_dp
->base
.base
);
2518 intel_dp_destroy(&intel_connector
->base
);
2523 intel_dp_i2c_init(intel_dp
, intel_connector
, name
);
2525 intel_encoder
->hot_plug
= intel_dp_hot_plug
;
2527 if (is_edp(intel_dp
)) {
2528 dev_priv
->int_edp_connector
= connector
;
2529 intel_panel_setup_backlight(dev
);
2532 intel_dp_add_properties(intel_dp
, connector
);
2534 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2535 * 0xd. Failure to do so will result in spurious interrupts being
2536 * generated on the port when a cable is not attached.
2538 if (IS_G4X(dev
) && !IS_GM45(dev
)) {
2539 u32 temp
= I915_READ(PEG_BAND_GAP_DATA
);
2540 I915_WRITE(PEG_BAND_GAP_DATA
, (temp
& ~0xf) | 0xd);