2 * Copyright © 2008 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Keith Packard <keithp@keithp.com>
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
30 #include <linux/export.h>
34 #include "drm_crtc_helper.h"
35 #include "intel_drv.h"
38 #include "drm_dp_helper.h"
40 #define DP_RECEIVER_CAP_SIZE 0xf
41 #define DP_LINK_STATUS_SIZE 6
42 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
44 #define DP_LINK_CONFIGURATION_SIZE 9
47 struct intel_encoder base
;
50 uint8_t link_configuration
[DP_LINK_CONFIGURATION_SIZE
];
57 uint8_t dpcd
[DP_RECEIVER_CAP_SIZE
];
58 struct i2c_adapter adapter
;
59 struct i2c_algo_dp_aux_data algo
;
62 int panel_power_up_delay
;
63 int panel_power_down_delay
;
64 int panel_power_cycle_delay
;
65 int backlight_on_delay
;
66 int backlight_off_delay
;
67 struct drm_display_mode
*panel_fixed_mode
; /* for eDP */
68 struct delayed_work panel_vdd_work
;
73 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
74 * @intel_dp: DP struct
76 * If a CPU or PCH DP output is attached to an eDP panel, this function
77 * will return true, and false otherwise.
79 static bool is_edp(struct intel_dp
*intel_dp
)
81 return intel_dp
->base
.type
== INTEL_OUTPUT_EDP
;
85 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
86 * @intel_dp: DP struct
88 * Returns true if the given DP struct corresponds to a PCH DP port attached
89 * to an eDP panel, false otherwise. Helpful for determining whether we
90 * may need FDI resources for a given DP output or not.
92 static bool is_pch_edp(struct intel_dp
*intel_dp
)
94 return intel_dp
->is_pch_edp
;
98 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
99 * @intel_dp: DP struct
101 * Returns true if the given DP struct corresponds to a CPU eDP port.
103 static bool is_cpu_edp(struct intel_dp
*intel_dp
)
105 return is_edp(intel_dp
) && !is_pch_edp(intel_dp
);
108 static struct intel_dp
*enc_to_intel_dp(struct drm_encoder
*encoder
)
110 return container_of(encoder
, struct intel_dp
, base
.base
);
113 static struct intel_dp
*intel_attached_dp(struct drm_connector
*connector
)
115 return container_of(intel_attached_encoder(connector
),
116 struct intel_dp
, base
);
120 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
121 * @encoder: DRM encoder
123 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed
124 * by intel_display.c.
126 bool intel_encoder_is_pch_edp(struct drm_encoder
*encoder
)
128 struct intel_dp
*intel_dp
;
133 intel_dp
= enc_to_intel_dp(encoder
);
135 return is_pch_edp(intel_dp
);
138 static void intel_dp_start_link_train(struct intel_dp
*intel_dp
);
139 static void intel_dp_complete_link_train(struct intel_dp
*intel_dp
);
140 static void intel_dp_link_down(struct intel_dp
*intel_dp
);
143 intel_edp_link_config(struct intel_encoder
*intel_encoder
,
144 int *lane_num
, int *link_bw
)
146 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
148 *lane_num
= intel_dp
->lane_count
;
149 if (intel_dp
->link_bw
== DP_LINK_BW_1_62
)
151 else if (intel_dp
->link_bw
== DP_LINK_BW_2_7
)
156 intel_dp_max_lane_count(struct intel_dp
*intel_dp
)
158 int max_lane_count
= intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & 0x1f;
159 switch (max_lane_count
) {
160 case 1: case 2: case 4:
165 return max_lane_count
;
169 intel_dp_max_link_bw(struct intel_dp
*intel_dp
)
171 int max_link_bw
= intel_dp
->dpcd
[DP_MAX_LINK_RATE
];
173 switch (max_link_bw
) {
174 case DP_LINK_BW_1_62
:
178 max_link_bw
= DP_LINK_BW_1_62
;
185 intel_dp_link_clock(uint8_t link_bw
)
187 if (link_bw
== DP_LINK_BW_2_7
)
194 * The units on the numbers in the next two are... bizarre. Examples will
195 * make it clearer; this one parallels an example in the eDP spec.
197 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
199 * 270000 * 1 * 8 / 10 == 216000
201 * The actual data capacity of that configuration is 2.16Gbit/s, so the
202 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz -
203 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
204 * 119000. At 18bpp that's 2142000 kilobits per second.
206 * Thus the strange-looking division by 10 in intel_dp_link_required, to
207 * get the result in decakilobits instead of kilobits.
211 intel_dp_link_required(int pixel_clock
, int bpp
)
213 return (pixel_clock
* bpp
+ 9) / 10;
217 intel_dp_max_data_rate(int max_link_clock
, int max_lanes
)
219 return (max_link_clock
* max_lanes
* 8) / 10;
223 intel_dp_adjust_dithering(struct intel_dp
*intel_dp
,
224 struct drm_display_mode
*mode
,
225 struct drm_display_mode
*adjusted_mode
)
227 int max_link_clock
= intel_dp_link_clock(intel_dp_max_link_bw(intel_dp
));
228 int max_lanes
= intel_dp_max_lane_count(intel_dp
);
229 int max_rate
, mode_rate
;
231 mode_rate
= intel_dp_link_required(mode
->clock
, 24);
232 max_rate
= intel_dp_max_data_rate(max_link_clock
, max_lanes
);
234 if (mode_rate
> max_rate
) {
235 mode_rate
= intel_dp_link_required(mode
->clock
, 18);
236 if (mode_rate
> max_rate
)
240 adjusted_mode
->private_flags
241 |= INTEL_MODE_DP_FORCE_6BPC
;
250 intel_dp_mode_valid(struct drm_connector
*connector
,
251 struct drm_display_mode
*mode
)
253 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
255 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
256 if (mode
->hdisplay
> intel_dp
->panel_fixed_mode
->hdisplay
)
259 if (mode
->vdisplay
> intel_dp
->panel_fixed_mode
->vdisplay
)
263 if (!intel_dp_adjust_dithering(intel_dp
, mode
, NULL
))
264 return MODE_CLOCK_HIGH
;
266 if (mode
->clock
< 10000)
267 return MODE_CLOCK_LOW
;
273 pack_aux(uint8_t *src
, int src_bytes
)
280 for (i
= 0; i
< src_bytes
; i
++)
281 v
|= ((uint32_t) src
[i
]) << ((3-i
) * 8);
286 unpack_aux(uint32_t src
, uint8_t *dst
, int dst_bytes
)
291 for (i
= 0; i
< dst_bytes
; i
++)
292 dst
[i
] = src
>> ((3-i
) * 8);
295 /* hrawclock is 1/4 the FSB frequency */
297 intel_hrawclk(struct drm_device
*dev
)
299 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
302 clkcfg
= I915_READ(CLKCFG
);
303 switch (clkcfg
& CLKCFG_FSB_MASK
) {
312 case CLKCFG_FSB_1067
:
314 case CLKCFG_FSB_1333
:
316 /* these two are just a guess; one of them might be right */
317 case CLKCFG_FSB_1600
:
318 case CLKCFG_FSB_1600_ALT
:
325 static bool ironlake_edp_have_panel_power(struct intel_dp
*intel_dp
)
327 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
328 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
330 return (I915_READ(PCH_PP_STATUS
) & PP_ON
) != 0;
333 static bool ironlake_edp_have_panel_vdd(struct intel_dp
*intel_dp
)
335 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
336 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
338 return (I915_READ(PCH_PP_CONTROL
) & EDP_FORCE_VDD
) != 0;
342 intel_dp_check_edp(struct intel_dp
*intel_dp
)
344 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
345 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
347 if (!is_edp(intel_dp
))
349 if (!ironlake_edp_have_panel_power(intel_dp
) && !ironlake_edp_have_panel_vdd(intel_dp
)) {
350 WARN(1, "eDP powered off while attempting aux channel communication.\n");
351 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
352 I915_READ(PCH_PP_STATUS
),
353 I915_READ(PCH_PP_CONTROL
));
358 intel_dp_aux_ch(struct intel_dp
*intel_dp
,
359 uint8_t *send
, int send_bytes
,
360 uint8_t *recv
, int recv_size
)
362 uint32_t output_reg
= intel_dp
->output_reg
;
363 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
364 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
365 uint32_t ch_ctl
= output_reg
+ 0x10;
366 uint32_t ch_data
= ch_ctl
+ 4;
370 uint32_t aux_clock_divider
;
373 intel_dp_check_edp(intel_dp
);
374 /* The clock divider is based off the hrawclk,
375 * and would like to run at 2MHz. So, take the
376 * hrawclk value and divide by 2 and use that
378 * Note that PCH attached eDP panels should use a 125MHz input
381 if (is_cpu_edp(intel_dp
)) {
382 if (IS_GEN6(dev
) || IS_GEN7(dev
))
383 aux_clock_divider
= 200; /* SNB & IVB eDP input clock at 400Mhz */
385 aux_clock_divider
= 225; /* eDP input clock at 450Mhz */
386 } else if (HAS_PCH_SPLIT(dev
))
387 aux_clock_divider
= 62; /* IRL input clock fixed at 125Mhz */
389 aux_clock_divider
= intel_hrawclk(dev
) / 2;
396 /* Try to wait for any previous AUX channel activity */
397 for (try = 0; try < 3; try++) {
398 status
= I915_READ(ch_ctl
);
399 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
405 WARN(1, "dp_aux_ch not started status 0x%08x\n",
410 /* Must try at least 3 times according to DP spec */
411 for (try = 0; try < 5; try++) {
412 /* Load the send data into the aux channel data registers */
413 for (i
= 0; i
< send_bytes
; i
+= 4)
414 I915_WRITE(ch_data
+ i
,
415 pack_aux(send
+ i
, send_bytes
- i
));
417 /* Send the command and wait for it to complete */
419 DP_AUX_CH_CTL_SEND_BUSY
|
420 DP_AUX_CH_CTL_TIME_OUT_400us
|
421 (send_bytes
<< DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
) |
422 (precharge
<< DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT
) |
423 (aux_clock_divider
<< DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT
) |
425 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
426 DP_AUX_CH_CTL_RECEIVE_ERROR
);
428 status
= I915_READ(ch_ctl
);
429 if ((status
& DP_AUX_CH_CTL_SEND_BUSY
) == 0)
434 /* Clear done status and any errors */
438 DP_AUX_CH_CTL_TIME_OUT_ERROR
|
439 DP_AUX_CH_CTL_RECEIVE_ERROR
);
440 if (status
& DP_AUX_CH_CTL_DONE
)
444 if ((status
& DP_AUX_CH_CTL_DONE
) == 0) {
445 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status
);
449 /* Check for timeout or receive error.
450 * Timeouts occur when the sink is not connected
452 if (status
& DP_AUX_CH_CTL_RECEIVE_ERROR
) {
453 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status
);
457 /* Timeouts occur when the device isn't connected, so they're
458 * "normal" -- don't fill the kernel log with these */
459 if (status
& DP_AUX_CH_CTL_TIME_OUT_ERROR
) {
460 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status
);
464 /* Unload any bytes sent back from the other side */
465 recv_bytes
= ((status
& DP_AUX_CH_CTL_MESSAGE_SIZE_MASK
) >>
466 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT
);
467 if (recv_bytes
> recv_size
)
468 recv_bytes
= recv_size
;
470 for (i
= 0; i
< recv_bytes
; i
+= 4)
471 unpack_aux(I915_READ(ch_data
+ i
),
472 recv
+ i
, recv_bytes
- i
);
477 /* Write data to the aux channel in native mode */
479 intel_dp_aux_native_write(struct intel_dp
*intel_dp
,
480 uint16_t address
, uint8_t *send
, int send_bytes
)
487 intel_dp_check_edp(intel_dp
);
490 msg
[0] = AUX_NATIVE_WRITE
<< 4;
491 msg
[1] = address
>> 8;
492 msg
[2] = address
& 0xff;
493 msg
[3] = send_bytes
- 1;
494 memcpy(&msg
[4], send
, send_bytes
);
495 msg_bytes
= send_bytes
+ 4;
497 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
, &ack
, 1);
500 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
502 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
510 /* Write a single byte to the aux channel in native mode */
512 intel_dp_aux_native_write_1(struct intel_dp
*intel_dp
,
513 uint16_t address
, uint8_t byte
)
515 return intel_dp_aux_native_write(intel_dp
, address
, &byte
, 1);
518 /* read bytes from a native aux channel */
520 intel_dp_aux_native_read(struct intel_dp
*intel_dp
,
521 uint16_t address
, uint8_t *recv
, int recv_bytes
)
530 intel_dp_check_edp(intel_dp
);
531 msg
[0] = AUX_NATIVE_READ
<< 4;
532 msg
[1] = address
>> 8;
533 msg
[2] = address
& 0xff;
534 msg
[3] = recv_bytes
- 1;
537 reply_bytes
= recv_bytes
+ 1;
540 ret
= intel_dp_aux_ch(intel_dp
, msg
, msg_bytes
,
547 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
) {
548 memcpy(recv
, reply
+ 1, ret
- 1);
551 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
559 intel_dp_i2c_aux_ch(struct i2c_adapter
*adapter
, int mode
,
560 uint8_t write_byte
, uint8_t *read_byte
)
562 struct i2c_algo_dp_aux_data
*algo_data
= adapter
->algo_data
;
563 struct intel_dp
*intel_dp
= container_of(adapter
,
566 uint16_t address
= algo_data
->address
;
574 intel_dp_check_edp(intel_dp
);
575 /* Set up the command byte */
576 if (mode
& MODE_I2C_READ
)
577 msg
[0] = AUX_I2C_READ
<< 4;
579 msg
[0] = AUX_I2C_WRITE
<< 4;
581 if (!(mode
& MODE_I2C_STOP
))
582 msg
[0] |= AUX_I2C_MOT
<< 4;
584 msg
[1] = address
>> 8;
605 for (retry
= 0; retry
< 5; retry
++) {
606 ret
= intel_dp_aux_ch(intel_dp
,
610 DRM_DEBUG_KMS("aux_ch failed %d\n", ret
);
614 switch (reply
[0] & AUX_NATIVE_REPLY_MASK
) {
615 case AUX_NATIVE_REPLY_ACK
:
616 /* I2C-over-AUX Reply field is only valid
617 * when paired with AUX ACK.
620 case AUX_NATIVE_REPLY_NACK
:
621 DRM_DEBUG_KMS("aux_ch native nack\n");
623 case AUX_NATIVE_REPLY_DEFER
:
627 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
632 switch (reply
[0] & AUX_I2C_REPLY_MASK
) {
633 case AUX_I2C_REPLY_ACK
:
634 if (mode
== MODE_I2C_READ
) {
635 *read_byte
= reply
[1];
637 return reply_bytes
- 1;
638 case AUX_I2C_REPLY_NACK
:
639 DRM_DEBUG_KMS("aux_i2c nack\n");
641 case AUX_I2C_REPLY_DEFER
:
642 DRM_DEBUG_KMS("aux_i2c defer\n");
646 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply
[0]);
651 DRM_ERROR("too many retries, giving up\n");
655 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
);
656 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
);
659 intel_dp_i2c_init(struct intel_dp
*intel_dp
,
660 struct intel_connector
*intel_connector
, const char *name
)
664 DRM_DEBUG_KMS("i2c_init %s\n", name
);
665 intel_dp
->algo
.running
= false;
666 intel_dp
->algo
.address
= 0;
667 intel_dp
->algo
.aux_ch
= intel_dp_i2c_aux_ch
;
669 memset(&intel_dp
->adapter
, '\0', sizeof(intel_dp
->adapter
));
670 intel_dp
->adapter
.owner
= THIS_MODULE
;
671 intel_dp
->adapter
.class = I2C_CLASS_DDC
;
672 strncpy(intel_dp
->adapter
.name
, name
, sizeof(intel_dp
->adapter
.name
) - 1);
673 intel_dp
->adapter
.name
[sizeof(intel_dp
->adapter
.name
) - 1] = '\0';
674 intel_dp
->adapter
.algo_data
= &intel_dp
->algo
;
675 intel_dp
->adapter
.dev
.parent
= &intel_connector
->base
.kdev
;
677 ironlake_edp_panel_vdd_on(intel_dp
);
678 ret
= i2c_dp_aux_add_bus(&intel_dp
->adapter
);
679 ironlake_edp_panel_vdd_off(intel_dp
, false);
684 intel_dp_mode_fixup(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
,
685 struct drm_display_mode
*adjusted_mode
)
687 struct drm_device
*dev
= encoder
->dev
;
688 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
689 int lane_count
, clock
;
690 int max_lane_count
= intel_dp_max_lane_count(intel_dp
);
691 int max_clock
= intel_dp_max_link_bw(intel_dp
) == DP_LINK_BW_2_7
? 1 : 0;
693 static int bws
[2] = { DP_LINK_BW_1_62
, DP_LINK_BW_2_7
};
695 if (is_edp(intel_dp
) && intel_dp
->panel_fixed_mode
) {
696 intel_fixed_panel_mode(intel_dp
->panel_fixed_mode
, adjusted_mode
);
697 intel_pch_panel_fitting(dev
, DRM_MODE_SCALE_FULLSCREEN
,
698 mode
, adjusted_mode
);
700 * the mode->clock is used to calculate the Data&Link M/N
701 * of the pipe. For the eDP the fixed clock should be used.
703 mode
->clock
= intel_dp
->panel_fixed_mode
->clock
;
706 if (!intel_dp_adjust_dithering(intel_dp
, mode
, adjusted_mode
))
709 bpp
= adjusted_mode
->private_flags
& INTEL_MODE_DP_FORCE_6BPC
? 18 : 24;
711 for (lane_count
= 1; lane_count
<= max_lane_count
; lane_count
<<= 1) {
712 for (clock
= 0; clock
<= max_clock
; clock
++) {
713 int link_avail
= intel_dp_max_data_rate(intel_dp_link_clock(bws
[clock
]), lane_count
);
715 if (intel_dp_link_required(mode
->clock
, bpp
)
717 intel_dp
->link_bw
= bws
[clock
];
718 intel_dp
->lane_count
= lane_count
;
719 adjusted_mode
->clock
= intel_dp_link_clock(intel_dp
->link_bw
);
720 DRM_DEBUG_KMS("Display port link bw %02x lane "
721 "count %d clock %d\n",
722 intel_dp
->link_bw
, intel_dp
->lane_count
,
723 adjusted_mode
->clock
);
732 struct intel_dp_m_n
{
741 intel_reduce_ratio(uint32_t *num
, uint32_t *den
)
743 while (*num
> 0xffffff || *den
> 0xffffff) {
750 intel_dp_compute_m_n(int bpp
,
754 struct intel_dp_m_n
*m_n
)
757 m_n
->gmch_m
= (pixel_clock
* bpp
) >> 3;
758 m_n
->gmch_n
= link_clock
* nlanes
;
759 intel_reduce_ratio(&m_n
->gmch_m
, &m_n
->gmch_n
);
760 m_n
->link_m
= pixel_clock
;
761 m_n
->link_n
= link_clock
;
762 intel_reduce_ratio(&m_n
->link_m
, &m_n
->link_n
);
766 intel_dp_set_m_n(struct drm_crtc
*crtc
, struct drm_display_mode
*mode
,
767 struct drm_display_mode
*adjusted_mode
)
769 struct drm_device
*dev
= crtc
->dev
;
770 struct drm_mode_config
*mode_config
= &dev
->mode_config
;
771 struct drm_encoder
*encoder
;
772 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
773 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
775 struct intel_dp_m_n m_n
;
776 int pipe
= intel_crtc
->pipe
;
779 * Find the lane count in the intel_encoder private
781 list_for_each_entry(encoder
, &mode_config
->encoder_list
, head
) {
782 struct intel_dp
*intel_dp
;
784 if (encoder
->crtc
!= crtc
)
787 intel_dp
= enc_to_intel_dp(encoder
);
788 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
789 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
791 lane_count
= intel_dp
->lane_count
;
797 * Compute the GMCH and Link ratios. The '3' here is
798 * the number of bytes_per_pixel post-LUT, which we always
799 * set up for 8-bits of R/G/B, or 3 bytes total.
801 intel_dp_compute_m_n(intel_crtc
->bpp
, lane_count
,
802 mode
->clock
, adjusted_mode
->clock
, &m_n
);
804 if (HAS_PCH_SPLIT(dev
)) {
805 I915_WRITE(TRANSDATA_M1(pipe
),
806 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
808 I915_WRITE(TRANSDATA_N1(pipe
), m_n
.gmch_n
);
809 I915_WRITE(TRANSDPLINK_M1(pipe
), m_n
.link_m
);
810 I915_WRITE(TRANSDPLINK_N1(pipe
), m_n
.link_n
);
812 I915_WRITE(PIPE_GMCH_DATA_M(pipe
),
813 ((m_n
.tu
- 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT
) |
815 I915_WRITE(PIPE_GMCH_DATA_N(pipe
), m_n
.gmch_n
);
816 I915_WRITE(PIPE_DP_LINK_M(pipe
), m_n
.link_m
);
817 I915_WRITE(PIPE_DP_LINK_N(pipe
), m_n
.link_n
);
821 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
);
822 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
);
825 intel_dp_mode_set(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
,
826 struct drm_display_mode
*adjusted_mode
)
828 struct drm_device
*dev
= encoder
->dev
;
829 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
830 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
831 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
832 struct intel_crtc
*intel_crtc
= to_intel_crtc(crtc
);
834 /* Turn on the eDP PLL if needed */
835 if (is_edp(intel_dp
)) {
836 if (!is_pch_edp(intel_dp
))
837 ironlake_edp_pll_on(encoder
);
839 ironlake_edp_pll_off(encoder
);
843 * There are four kinds of DP registers:
850 * IBX PCH and CPU are the same for almost everything,
851 * except that the CPU DP PLL is configured in this
854 * CPT PCH is quite different, having many bits moved
855 * to the TRANS_DP_CTL register instead. That
856 * configuration happens (oddly) in ironlake_pch_enable
859 /* Preserve the BIOS-computed detected bit. This is
860 * supposed to be read-only.
862 intel_dp
->DP
= I915_READ(intel_dp
->output_reg
) & DP_DETECTED
;
863 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
865 /* Handle DP bits in common between all three register formats */
867 intel_dp
->DP
|= DP_VOLTAGE_0_4
| DP_PRE_EMPHASIS_0
;
869 switch (intel_dp
->lane_count
) {
871 intel_dp
->DP
|= DP_PORT_WIDTH_1
;
874 intel_dp
->DP
|= DP_PORT_WIDTH_2
;
877 intel_dp
->DP
|= DP_PORT_WIDTH_4
;
880 if (intel_dp
->has_audio
) {
881 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
882 pipe_name(intel_crtc
->pipe
));
883 intel_dp
->DP
|= DP_AUDIO_OUTPUT_ENABLE
;
884 intel_write_eld(encoder
, adjusted_mode
);
886 memset(intel_dp
->link_configuration
, 0, DP_LINK_CONFIGURATION_SIZE
);
887 intel_dp
->link_configuration
[0] = intel_dp
->link_bw
;
888 intel_dp
->link_configuration
[1] = intel_dp
->lane_count
;
889 intel_dp
->link_configuration
[8] = DP_SET_ANSI_8B10B
;
891 * Check for DPCD version > 1.1 and enhanced framing support
893 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
894 (intel_dp
->dpcd
[DP_MAX_LANE_COUNT
] & DP_ENHANCED_FRAME_CAP
)) {
895 intel_dp
->link_configuration
[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
898 /* Split out the IBX/CPU vs CPT settings */
900 if (is_cpu_edp(intel_dp
) && IS_GEN7(dev
)) {
901 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
902 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
903 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
904 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
905 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
907 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
908 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
910 intel_dp
->DP
|= intel_crtc
->pipe
<< 29;
912 /* don't miss out required setting for eDP */
913 intel_dp
->DP
|= DP_PLL_ENABLE
;
914 if (adjusted_mode
->clock
< 200000)
915 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
917 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
918 } else if (!HAS_PCH_CPT(dev
) || is_cpu_edp(intel_dp
)) {
919 intel_dp
->DP
|= intel_dp
->color_range
;
921 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PHSYNC
)
922 intel_dp
->DP
|= DP_SYNC_HS_HIGH
;
923 if (adjusted_mode
->flags
& DRM_MODE_FLAG_PVSYNC
)
924 intel_dp
->DP
|= DP_SYNC_VS_HIGH
;
925 intel_dp
->DP
|= DP_LINK_TRAIN_OFF
;
927 if (intel_dp
->link_configuration
[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN
)
928 intel_dp
->DP
|= DP_ENHANCED_FRAMING
;
930 if (intel_crtc
->pipe
== 1)
931 intel_dp
->DP
|= DP_PIPEB_SELECT
;
933 if (is_cpu_edp(intel_dp
)) {
934 /* don't miss out required setting for eDP */
935 intel_dp
->DP
|= DP_PLL_ENABLE
;
936 if (adjusted_mode
->clock
< 200000)
937 intel_dp
->DP
|= DP_PLL_FREQ_160MHZ
;
939 intel_dp
->DP
|= DP_PLL_FREQ_270MHZ
;
942 intel_dp
->DP
|= DP_LINK_TRAIN_OFF_CPT
;
946 #define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
947 #define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
949 #define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
950 #define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
952 #define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
953 #define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
955 static void ironlake_wait_panel_status(struct intel_dp
*intel_dp
,
959 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
960 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
962 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
964 I915_READ(PCH_PP_STATUS
),
965 I915_READ(PCH_PP_CONTROL
));
967 if (_wait_for((I915_READ(PCH_PP_STATUS
) & mask
) == value
, 5000, 10)) {
968 DRM_ERROR("Panel status timeout: status %08x control %08x\n",
969 I915_READ(PCH_PP_STATUS
),
970 I915_READ(PCH_PP_CONTROL
));
974 static void ironlake_wait_panel_on(struct intel_dp
*intel_dp
)
976 DRM_DEBUG_KMS("Wait for panel power on\n");
977 ironlake_wait_panel_status(intel_dp
, IDLE_ON_MASK
, IDLE_ON_VALUE
);
980 static void ironlake_wait_panel_off(struct intel_dp
*intel_dp
)
982 DRM_DEBUG_KMS("Wait for panel power off time\n");
983 ironlake_wait_panel_status(intel_dp
, IDLE_OFF_MASK
, IDLE_OFF_VALUE
);
986 static void ironlake_wait_panel_power_cycle(struct intel_dp
*intel_dp
)
988 DRM_DEBUG_KMS("Wait for panel power cycle\n");
989 ironlake_wait_panel_status(intel_dp
, IDLE_CYCLE_MASK
, IDLE_CYCLE_VALUE
);
993 /* Read the current pp_control value, unlocking the register if it
997 static u32
ironlake_get_pp_control(struct drm_i915_private
*dev_priv
)
999 u32 control
= I915_READ(PCH_PP_CONTROL
);
1001 control
&= ~PANEL_UNLOCK_MASK
;
1002 control
|= PANEL_UNLOCK_REGS
;
1006 static void ironlake_edp_panel_vdd_on(struct intel_dp
*intel_dp
)
1008 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1009 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1012 if (!is_edp(intel_dp
))
1014 DRM_DEBUG_KMS("Turn eDP VDD on\n");
1016 WARN(intel_dp
->want_panel_vdd
,
1017 "eDP VDD already requested on\n");
1019 intel_dp
->want_panel_vdd
= true;
1021 if (ironlake_edp_have_panel_vdd(intel_dp
)) {
1022 DRM_DEBUG_KMS("eDP VDD already on\n");
1026 if (!ironlake_edp_have_panel_power(intel_dp
))
1027 ironlake_wait_panel_power_cycle(intel_dp
);
1029 pp
= ironlake_get_pp_control(dev_priv
);
1030 pp
|= EDP_FORCE_VDD
;
1031 I915_WRITE(PCH_PP_CONTROL
, pp
);
1032 POSTING_READ(PCH_PP_CONTROL
);
1033 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1034 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1037 * If the panel wasn't on, delay before accessing aux channel
1039 if (!ironlake_edp_have_panel_power(intel_dp
)) {
1040 DRM_DEBUG_KMS("eDP was not running\n");
1041 msleep(intel_dp
->panel_power_up_delay
);
1045 static void ironlake_panel_vdd_off_sync(struct intel_dp
*intel_dp
)
1047 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1048 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1051 if (!intel_dp
->want_panel_vdd
&& ironlake_edp_have_panel_vdd(intel_dp
)) {
1052 pp
= ironlake_get_pp_control(dev_priv
);
1053 pp
&= ~EDP_FORCE_VDD
;
1054 I915_WRITE(PCH_PP_CONTROL
, pp
);
1055 POSTING_READ(PCH_PP_CONTROL
);
1057 /* Make sure sequencer is idle before allowing subsequent activity */
1058 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1059 I915_READ(PCH_PP_STATUS
), I915_READ(PCH_PP_CONTROL
));
1061 msleep(intel_dp
->panel_power_down_delay
);
1065 static void ironlake_panel_vdd_work(struct work_struct
*__work
)
1067 struct intel_dp
*intel_dp
= container_of(to_delayed_work(__work
),
1068 struct intel_dp
, panel_vdd_work
);
1069 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1071 mutex_lock(&dev
->mode_config
.mutex
);
1072 ironlake_panel_vdd_off_sync(intel_dp
);
1073 mutex_unlock(&dev
->mode_config
.mutex
);
1076 static void ironlake_edp_panel_vdd_off(struct intel_dp
*intel_dp
, bool sync
)
1078 if (!is_edp(intel_dp
))
1081 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp
->want_panel_vdd
);
1082 WARN(!intel_dp
->want_panel_vdd
, "eDP VDD not forced on");
1084 intel_dp
->want_panel_vdd
= false;
1087 ironlake_panel_vdd_off_sync(intel_dp
);
1090 * Queue the timer to fire a long
1091 * time from now (relative to the power down delay)
1092 * to keep the panel power up across a sequence of operations
1094 schedule_delayed_work(&intel_dp
->panel_vdd_work
,
1095 msecs_to_jiffies(intel_dp
->panel_power_cycle_delay
* 5));
1099 static void ironlake_edp_panel_on(struct intel_dp
*intel_dp
)
1101 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1102 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1105 if (!is_edp(intel_dp
))
1108 DRM_DEBUG_KMS("Turn eDP power on\n");
1110 if (ironlake_edp_have_panel_power(intel_dp
)) {
1111 DRM_DEBUG_KMS("eDP power already on\n");
1115 ironlake_wait_panel_power_cycle(intel_dp
);
1117 pp
= ironlake_get_pp_control(dev_priv
);
1119 /* ILK workaround: disable reset around power sequence */
1120 pp
&= ~PANEL_POWER_RESET
;
1121 I915_WRITE(PCH_PP_CONTROL
, pp
);
1122 POSTING_READ(PCH_PP_CONTROL
);
1125 pp
|= POWER_TARGET_ON
;
1127 pp
|= PANEL_POWER_RESET
;
1129 I915_WRITE(PCH_PP_CONTROL
, pp
);
1130 POSTING_READ(PCH_PP_CONTROL
);
1132 ironlake_wait_panel_on(intel_dp
);
1135 pp
|= PANEL_POWER_RESET
; /* restore panel reset bit */
1136 I915_WRITE(PCH_PP_CONTROL
, pp
);
1137 POSTING_READ(PCH_PP_CONTROL
);
1141 static void ironlake_edp_panel_off(struct intel_dp
*intel_dp
)
1143 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1144 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1147 if (!is_edp(intel_dp
))
1150 DRM_DEBUG_KMS("Turn eDP power off\n");
1152 WARN(intel_dp
->want_panel_vdd
, "Cannot turn power off while VDD is on\n");
1154 pp
= ironlake_get_pp_control(dev_priv
);
1155 pp
&= ~(POWER_TARGET_ON
| EDP_FORCE_VDD
| PANEL_POWER_RESET
| EDP_BLC_ENABLE
);
1156 I915_WRITE(PCH_PP_CONTROL
, pp
);
1157 POSTING_READ(PCH_PP_CONTROL
);
1159 ironlake_wait_panel_off(intel_dp
);
1162 static void ironlake_edp_backlight_on(struct intel_dp
*intel_dp
)
1164 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1165 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1168 if (!is_edp(intel_dp
))
1171 DRM_DEBUG_KMS("\n");
1173 * If we enable the backlight right away following a panel power
1174 * on, we may see slight flicker as the panel syncs with the eDP
1175 * link. So delay a bit to make sure the image is solid before
1176 * allowing it to appear.
1178 msleep(intel_dp
->backlight_on_delay
);
1179 pp
= ironlake_get_pp_control(dev_priv
);
1180 pp
|= EDP_BLC_ENABLE
;
1181 I915_WRITE(PCH_PP_CONTROL
, pp
);
1182 POSTING_READ(PCH_PP_CONTROL
);
1185 static void ironlake_edp_backlight_off(struct intel_dp
*intel_dp
)
1187 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1188 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1191 if (!is_edp(intel_dp
))
1194 DRM_DEBUG_KMS("\n");
1195 pp
= ironlake_get_pp_control(dev_priv
);
1196 pp
&= ~EDP_BLC_ENABLE
;
1197 I915_WRITE(PCH_PP_CONTROL
, pp
);
1198 POSTING_READ(PCH_PP_CONTROL
);
1199 msleep(intel_dp
->backlight_off_delay
);
1202 static void ironlake_edp_pll_on(struct drm_encoder
*encoder
)
1204 struct drm_device
*dev
= encoder
->dev
;
1205 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1208 DRM_DEBUG_KMS("\n");
1209 dpa_ctl
= I915_READ(DP_A
);
1210 dpa_ctl
|= DP_PLL_ENABLE
;
1211 I915_WRITE(DP_A
, dpa_ctl
);
1216 static void ironlake_edp_pll_off(struct drm_encoder
*encoder
)
1218 struct drm_device
*dev
= encoder
->dev
;
1219 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1222 dpa_ctl
= I915_READ(DP_A
);
1223 dpa_ctl
&= ~DP_PLL_ENABLE
;
1224 I915_WRITE(DP_A
, dpa_ctl
);
1229 /* If the sink supports it, try to set the power state appropriately */
1230 static void intel_dp_sink_dpms(struct intel_dp
*intel_dp
, int mode
)
1234 /* Should have a valid DPCD by this point */
1235 if (intel_dp
->dpcd
[DP_DPCD_REV
] < 0x11)
1238 if (mode
!= DRM_MODE_DPMS_ON
) {
1239 ret
= intel_dp_aux_native_write_1(intel_dp
, DP_SET_POWER
,
1242 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1245 * When turning on, we need to retry for 1ms to give the sink
1248 for (i
= 0; i
< 3; i
++) {
1249 ret
= intel_dp_aux_native_write_1(intel_dp
,
1259 static void intel_dp_prepare(struct drm_encoder
*encoder
)
1261 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1263 ironlake_edp_backlight_off(intel_dp
);
1264 ironlake_edp_panel_off(intel_dp
);
1266 /* Wake up the sink first */
1267 ironlake_edp_panel_vdd_on(intel_dp
);
1268 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1269 intel_dp_link_down(intel_dp
);
1270 ironlake_edp_panel_vdd_off(intel_dp
, false);
1272 /* Make sure the panel is off before trying to
1277 static void intel_dp_commit(struct drm_encoder
*encoder
)
1279 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1280 struct drm_device
*dev
= encoder
->dev
;
1281 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1283 ironlake_edp_panel_vdd_on(intel_dp
);
1284 intel_dp_sink_dpms(intel_dp
, DRM_MODE_DPMS_ON
);
1285 intel_dp_start_link_train(intel_dp
);
1286 ironlake_edp_panel_on(intel_dp
);
1287 ironlake_edp_panel_vdd_off(intel_dp
, true);
1288 intel_dp_complete_link_train(intel_dp
);
1289 ironlake_edp_backlight_on(intel_dp
);
1291 intel_dp
->dpms_mode
= DRM_MODE_DPMS_ON
;
1293 if (HAS_PCH_CPT(dev
))
1294 intel_cpt_verify_modeset(dev
, intel_crtc
->pipe
);
1298 intel_dp_dpms(struct drm_encoder
*encoder
, int mode
)
1300 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
1301 struct drm_device
*dev
= encoder
->dev
;
1302 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1303 uint32_t dp_reg
= I915_READ(intel_dp
->output_reg
);
1305 if (mode
!= DRM_MODE_DPMS_ON
) {
1306 ironlake_edp_backlight_off(intel_dp
);
1307 ironlake_edp_panel_off(intel_dp
);
1309 ironlake_edp_panel_vdd_on(intel_dp
);
1310 intel_dp_sink_dpms(intel_dp
, mode
);
1311 intel_dp_link_down(intel_dp
);
1312 ironlake_edp_panel_vdd_off(intel_dp
, false);
1314 if (is_cpu_edp(intel_dp
))
1315 ironlake_edp_pll_off(encoder
);
1317 if (is_cpu_edp(intel_dp
))
1318 ironlake_edp_pll_on(encoder
);
1320 ironlake_edp_panel_vdd_on(intel_dp
);
1321 intel_dp_sink_dpms(intel_dp
, mode
);
1322 if (!(dp_reg
& DP_PORT_EN
)) {
1323 intel_dp_start_link_train(intel_dp
);
1324 ironlake_edp_panel_on(intel_dp
);
1325 ironlake_edp_panel_vdd_off(intel_dp
, true);
1326 intel_dp_complete_link_train(intel_dp
);
1328 ironlake_edp_panel_vdd_off(intel_dp
, false);
1329 ironlake_edp_backlight_on(intel_dp
);
1331 intel_dp
->dpms_mode
= mode
;
1335 * Native read with retry for link status and receiver capability reads for
1336 * cases where the sink may still be asleep.
1339 intel_dp_aux_native_read_retry(struct intel_dp
*intel_dp
, uint16_t address
,
1340 uint8_t *recv
, int recv_bytes
)
1345 * Sinks are *supposed* to come up within 1ms from an off state,
1346 * but we're also supposed to retry 3 times per the spec.
1348 for (i
= 0; i
< 3; i
++) {
1349 ret
= intel_dp_aux_native_read(intel_dp
, address
, recv
,
1351 if (ret
== recv_bytes
)
1360 * Fetch AUX CH registers 0x202 - 0x207 which contain
1361 * link status information
1364 intel_dp_get_link_status(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1366 return intel_dp_aux_native_read_retry(intel_dp
,
1369 DP_LINK_STATUS_SIZE
);
1373 intel_dp_link_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1376 return link_status
[r
- DP_LANE0_1_STATUS
];
1380 intel_get_adjust_request_voltage(uint8_t adjust_request
[2],
1383 int s
= ((lane
& 1) ?
1384 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT
:
1385 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT
);
1386 uint8_t l
= adjust_request
[lane
>>1];
1388 return ((l
>> s
) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT
;
1392 intel_get_adjust_request_pre_emphasis(uint8_t adjust_request
[2],
1395 int s
= ((lane
& 1) ?
1396 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT
:
1397 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT
);
1398 uint8_t l
= adjust_request
[lane
>>1];
1400 return ((l
>> s
) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT
;
1405 static char *voltage_names
[] = {
1406 "0.4V", "0.6V", "0.8V", "1.2V"
1408 static char *pre_emph_names
[] = {
1409 "0dB", "3.5dB", "6dB", "9.5dB"
1411 static char *link_train_names
[] = {
1412 "pattern 1", "pattern 2", "idle", "off"
1417 * These are source-specific values; current Intel hardware supports
1418 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1422 intel_dp_voltage_max(struct intel_dp
*intel_dp
)
1424 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1426 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
))
1427 return DP_TRAIN_VOLTAGE_SWING_800
;
1428 else if (HAS_PCH_CPT(dev
) && !is_cpu_edp(intel_dp
))
1429 return DP_TRAIN_VOLTAGE_SWING_1200
;
1431 return DP_TRAIN_VOLTAGE_SWING_800
;
1435 intel_dp_pre_emphasis_max(struct intel_dp
*intel_dp
, uint8_t voltage_swing
)
1437 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1439 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1440 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1441 case DP_TRAIN_VOLTAGE_SWING_400
:
1442 return DP_TRAIN_PRE_EMPHASIS_6
;
1443 case DP_TRAIN_VOLTAGE_SWING_600
:
1444 case DP_TRAIN_VOLTAGE_SWING_800
:
1445 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1447 return DP_TRAIN_PRE_EMPHASIS_0
;
1450 switch (voltage_swing
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1451 case DP_TRAIN_VOLTAGE_SWING_400
:
1452 return DP_TRAIN_PRE_EMPHASIS_6
;
1453 case DP_TRAIN_VOLTAGE_SWING_600
:
1454 return DP_TRAIN_PRE_EMPHASIS_6
;
1455 case DP_TRAIN_VOLTAGE_SWING_800
:
1456 return DP_TRAIN_PRE_EMPHASIS_3_5
;
1457 case DP_TRAIN_VOLTAGE_SWING_1200
:
1459 return DP_TRAIN_PRE_EMPHASIS_0
;
1465 intel_get_adjust_train(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1470 uint8_t *adjust_request
= link_status
+ (DP_ADJUST_REQUEST_LANE0_1
- DP_LANE0_1_STATUS
);
1471 uint8_t voltage_max
;
1472 uint8_t preemph_max
;
1474 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1475 uint8_t this_v
= intel_get_adjust_request_voltage(adjust_request
, lane
);
1476 uint8_t this_p
= intel_get_adjust_request_pre_emphasis(adjust_request
, lane
);
1484 voltage_max
= intel_dp_voltage_max(intel_dp
);
1485 if (v
>= voltage_max
)
1486 v
= voltage_max
| DP_TRAIN_MAX_SWING_REACHED
;
1488 preemph_max
= intel_dp_pre_emphasis_max(intel_dp
, v
);
1489 if (p
>= preemph_max
)
1490 p
= preemph_max
| DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
1492 for (lane
= 0; lane
< 4; lane
++)
1493 intel_dp
->train_set
[lane
] = v
| p
;
1497 intel_dp_signal_levels(uint8_t train_set
)
1499 uint32_t signal_levels
= 0;
1501 switch (train_set
& DP_TRAIN_VOLTAGE_SWING_MASK
) {
1502 case DP_TRAIN_VOLTAGE_SWING_400
:
1504 signal_levels
|= DP_VOLTAGE_0_4
;
1506 case DP_TRAIN_VOLTAGE_SWING_600
:
1507 signal_levels
|= DP_VOLTAGE_0_6
;
1509 case DP_TRAIN_VOLTAGE_SWING_800
:
1510 signal_levels
|= DP_VOLTAGE_0_8
;
1512 case DP_TRAIN_VOLTAGE_SWING_1200
:
1513 signal_levels
|= DP_VOLTAGE_1_2
;
1516 switch (train_set
& DP_TRAIN_PRE_EMPHASIS_MASK
) {
1517 case DP_TRAIN_PRE_EMPHASIS_0
:
1519 signal_levels
|= DP_PRE_EMPHASIS_0
;
1521 case DP_TRAIN_PRE_EMPHASIS_3_5
:
1522 signal_levels
|= DP_PRE_EMPHASIS_3_5
;
1524 case DP_TRAIN_PRE_EMPHASIS_6
:
1525 signal_levels
|= DP_PRE_EMPHASIS_6
;
1527 case DP_TRAIN_PRE_EMPHASIS_9_5
:
1528 signal_levels
|= DP_PRE_EMPHASIS_9_5
;
1531 return signal_levels
;
1534 /* Gen6's DP voltage swing and pre-emphasis control */
1536 intel_gen6_edp_signal_levels(uint8_t train_set
)
1538 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1539 DP_TRAIN_PRE_EMPHASIS_MASK
);
1540 switch (signal_levels
) {
1541 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1542 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1543 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1544 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1545 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B
;
1546 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1547 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_6
:
1548 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B
;
1549 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1550 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1551 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B
;
1552 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1553 case DP_TRAIN_VOLTAGE_SWING_1200
| DP_TRAIN_PRE_EMPHASIS_0
:
1554 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B
;
1556 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1557 "0x%x\n", signal_levels
);
1558 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B
;
1562 /* Gen7's DP voltage swing and pre-emphasis control */
1564 intel_gen7_edp_signal_levels(uint8_t train_set
)
1566 int signal_levels
= train_set
& (DP_TRAIN_VOLTAGE_SWING_MASK
|
1567 DP_TRAIN_PRE_EMPHASIS_MASK
);
1568 switch (signal_levels
) {
1569 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_0
:
1570 return EDP_LINK_TRAIN_400MV_0DB_IVB
;
1571 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1572 return EDP_LINK_TRAIN_400MV_3_5DB_IVB
;
1573 case DP_TRAIN_VOLTAGE_SWING_400
| DP_TRAIN_PRE_EMPHASIS_6
:
1574 return EDP_LINK_TRAIN_400MV_6DB_IVB
;
1576 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_0
:
1577 return EDP_LINK_TRAIN_600MV_0DB_IVB
;
1578 case DP_TRAIN_VOLTAGE_SWING_600
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1579 return EDP_LINK_TRAIN_600MV_3_5DB_IVB
;
1581 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_0
:
1582 return EDP_LINK_TRAIN_800MV_0DB_IVB
;
1583 case DP_TRAIN_VOLTAGE_SWING_800
| DP_TRAIN_PRE_EMPHASIS_3_5
:
1584 return EDP_LINK_TRAIN_800MV_3_5DB_IVB
;
1587 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1588 "0x%x\n", signal_levels
);
1589 return EDP_LINK_TRAIN_500MV_0DB_IVB
;
1594 intel_get_lane_status(uint8_t link_status
[DP_LINK_STATUS_SIZE
],
1597 int s
= (lane
& 1) * 4;
1598 uint8_t l
= link_status
[lane
>>1];
1600 return (l
>> s
) & 0xf;
1603 /* Check for clock recovery is done on all channels */
1605 intel_clock_recovery_ok(uint8_t link_status
[DP_LINK_STATUS_SIZE
], int lane_count
)
1608 uint8_t lane_status
;
1610 for (lane
= 0; lane
< lane_count
; lane
++) {
1611 lane_status
= intel_get_lane_status(link_status
, lane
);
1612 if ((lane_status
& DP_LANE_CR_DONE
) == 0)
1618 /* Check to see if channel eq is done on all channels */
1619 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1620 DP_LANE_CHANNEL_EQ_DONE|\
1621 DP_LANE_SYMBOL_LOCKED)
1623 intel_channel_eq_ok(struct intel_dp
*intel_dp
, uint8_t link_status
[DP_LINK_STATUS_SIZE
])
1626 uint8_t lane_status
;
1629 lane_align
= intel_dp_link_status(link_status
,
1630 DP_LANE_ALIGN_STATUS_UPDATED
);
1631 if ((lane_align
& DP_INTERLANE_ALIGN_DONE
) == 0)
1633 for (lane
= 0; lane
< intel_dp
->lane_count
; lane
++) {
1634 lane_status
= intel_get_lane_status(link_status
, lane
);
1635 if ((lane_status
& CHANNEL_EQ_BITS
) != CHANNEL_EQ_BITS
)
1642 intel_dp_set_link_train(struct intel_dp
*intel_dp
,
1643 uint32_t dp_reg_value
,
1644 uint8_t dp_train_pat
)
1646 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1647 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1650 I915_WRITE(intel_dp
->output_reg
, dp_reg_value
);
1651 POSTING_READ(intel_dp
->output_reg
);
1653 intel_dp_aux_native_write_1(intel_dp
,
1654 DP_TRAINING_PATTERN_SET
,
1657 ret
= intel_dp_aux_native_write(intel_dp
,
1658 DP_TRAINING_LANE0_SET
,
1659 intel_dp
->train_set
,
1660 intel_dp
->lane_count
);
1661 if (ret
!= intel_dp
->lane_count
)
1667 /* Enable corresponding port and start training pattern 1 */
1669 intel_dp_start_link_train(struct intel_dp
*intel_dp
)
1671 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1672 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1673 struct intel_crtc
*intel_crtc
= to_intel_crtc(intel_dp
->base
.base
.crtc
);
1676 bool clock_recovery
= false;
1677 int voltage_tries
, loop_tries
;
1679 uint32_t DP
= intel_dp
->DP
;
1682 * On CPT we have to enable the port in training pattern 1, which
1683 * will happen below in intel_dp_set_link_train. Otherwise, enable
1684 * the port and wait for it to become active.
1686 if (!HAS_PCH_CPT(dev
)) {
1687 I915_WRITE(intel_dp
->output_reg
, intel_dp
->DP
);
1688 POSTING_READ(intel_dp
->output_reg
);
1689 intel_wait_for_vblank(dev
, intel_crtc
->pipe
);
1692 /* Write the link configuration data */
1693 intel_dp_aux_native_write(intel_dp
, DP_LINK_BW_SET
,
1694 intel_dp
->link_configuration
,
1695 DP_LINK_CONFIGURATION_SIZE
);
1699 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1700 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1702 DP
&= ~DP_LINK_TRAIN_MASK
;
1703 memset(intel_dp
->train_set
, 0, 4);
1707 clock_recovery
= false;
1709 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1710 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1711 uint32_t signal_levels
;
1714 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1715 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1716 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1717 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1718 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1719 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1721 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1722 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels
);
1723 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1726 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1727 reg
= DP
| DP_LINK_TRAIN_PAT_1_CPT
;
1729 reg
= DP
| DP_LINK_TRAIN_PAT_1
;
1731 if (!intel_dp_set_link_train(intel_dp
, reg
,
1732 DP_TRAINING_PATTERN_1
|
1733 DP_LINK_SCRAMBLING_DISABLE
))
1735 /* Set training pattern 1 */
1738 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
1739 DRM_ERROR("failed to get link status\n");
1743 if (intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1744 DRM_DEBUG_KMS("clock recovery OK\n");
1745 clock_recovery
= true;
1749 /* Check to see if we've tried the max voltage */
1750 for (i
= 0; i
< intel_dp
->lane_count
; i
++)
1751 if ((intel_dp
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
1753 if (i
== intel_dp
->lane_count
) {
1755 if (loop_tries
== 5) {
1756 DRM_DEBUG_KMS("too many full retries, give up\n");
1759 memset(intel_dp
->train_set
, 0, 4);
1764 /* Check to see if we've tried the same voltage 5 times */
1765 if ((intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
1767 if (voltage_tries
== 5) {
1768 DRM_DEBUG_KMS("too many voltage retries, give up\n");
1773 voltage
= intel_dp
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
1775 /* Compute new intel_dp->train_set as requested by target */
1776 intel_get_adjust_train(intel_dp
, link_status
);
1783 intel_dp_complete_link_train(struct intel_dp
*intel_dp
)
1785 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1786 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1787 bool channel_eq
= false;
1788 int tries
, cr_tries
;
1790 uint32_t DP
= intel_dp
->DP
;
1792 /* channel equalization */
1797 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1798 uint32_t signal_levels
;
1799 uint8_t link_status
[DP_LINK_STATUS_SIZE
];
1802 DRM_ERROR("failed to train DP, aborting\n");
1803 intel_dp_link_down(intel_dp
);
1807 if (IS_GEN7(dev
) && is_cpu_edp(intel_dp
)) {
1808 signal_levels
= intel_gen7_edp_signal_levels(intel_dp
->train_set
[0]);
1809 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB
) | signal_levels
;
1810 } else if (IS_GEN6(dev
) && is_cpu_edp(intel_dp
)) {
1811 signal_levels
= intel_gen6_edp_signal_levels(intel_dp
->train_set
[0]);
1812 DP
= (DP
& ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB
) | signal_levels
;
1814 signal_levels
= intel_dp_signal_levels(intel_dp
->train_set
[0]);
1815 DP
= (DP
& ~(DP_VOLTAGE_MASK
|DP_PRE_EMPHASIS_MASK
)) | signal_levels
;
1818 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1819 reg
= DP
| DP_LINK_TRAIN_PAT_2_CPT
;
1821 reg
= DP
| DP_LINK_TRAIN_PAT_2
;
1823 /* channel eq pattern */
1824 if (!intel_dp_set_link_train(intel_dp
, reg
,
1825 DP_TRAINING_PATTERN_2
|
1826 DP_LINK_SCRAMBLING_DISABLE
))
1830 if (!intel_dp_get_link_status(intel_dp
, link_status
))
1833 /* Make sure clock is still ok */
1834 if (!intel_clock_recovery_ok(link_status
, intel_dp
->lane_count
)) {
1835 intel_dp_start_link_train(intel_dp
);
1840 if (intel_channel_eq_ok(intel_dp
, link_status
)) {
1845 /* Try 5 times, then try clock recovery if that fails */
1847 intel_dp_link_down(intel_dp
);
1848 intel_dp_start_link_train(intel_dp
);
1854 /* Compute new intel_dp->train_set as requested by target */
1855 intel_get_adjust_train(intel_dp
, link_status
);
1859 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1860 reg
= DP
| DP_LINK_TRAIN_OFF_CPT
;
1862 reg
= DP
| DP_LINK_TRAIN_OFF
;
1864 I915_WRITE(intel_dp
->output_reg
, reg
);
1865 POSTING_READ(intel_dp
->output_reg
);
1866 intel_dp_aux_native_write_1(intel_dp
,
1867 DP_TRAINING_PATTERN_SET
, DP_TRAINING_PATTERN_DISABLE
);
1871 intel_dp_link_down(struct intel_dp
*intel_dp
)
1873 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
1874 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
1875 uint32_t DP
= intel_dp
->DP
;
1877 if ((I915_READ(intel_dp
->output_reg
) & DP_PORT_EN
) == 0)
1880 DRM_DEBUG_KMS("\n");
1882 if (is_edp(intel_dp
)) {
1883 DP
&= ~DP_PLL_ENABLE
;
1884 I915_WRITE(intel_dp
->output_reg
, DP
);
1885 POSTING_READ(intel_dp
->output_reg
);
1889 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
))) {
1890 DP
&= ~DP_LINK_TRAIN_MASK_CPT
;
1891 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE_CPT
);
1893 DP
&= ~DP_LINK_TRAIN_MASK
;
1894 I915_WRITE(intel_dp
->output_reg
, DP
| DP_LINK_TRAIN_PAT_IDLE
);
1896 POSTING_READ(intel_dp
->output_reg
);
1900 if (is_edp(intel_dp
)) {
1901 if (HAS_PCH_CPT(dev
) && (IS_GEN7(dev
) || !is_cpu_edp(intel_dp
)))
1902 DP
|= DP_LINK_TRAIN_OFF_CPT
;
1904 DP
|= DP_LINK_TRAIN_OFF
;
1907 if (!HAS_PCH_CPT(dev
) &&
1908 I915_READ(intel_dp
->output_reg
) & DP_PIPEB_SELECT
) {
1909 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
1911 /* Hardware workaround: leaving our transcoder select
1912 * set to transcoder B while it's off will prevent the
1913 * corresponding HDMI output on transcoder A.
1915 * Combine this with another hardware workaround:
1916 * transcoder select bit can only be cleared while the
1919 DP
&= ~DP_PIPEB_SELECT
;
1920 I915_WRITE(intel_dp
->output_reg
, DP
);
1922 /* Changes to enable or select take place the vblank
1923 * after being written.
1926 /* We can arrive here never having been attached
1927 * to a CRTC, for instance, due to inheriting
1928 * random state from the BIOS.
1930 * If the pipe is not running, play safe and
1931 * wait for the clocks to stabilise before
1934 POSTING_READ(intel_dp
->output_reg
);
1937 intel_wait_for_vblank(dev
, to_intel_crtc(crtc
)->pipe
);
1940 DP
&= ~DP_AUDIO_OUTPUT_ENABLE
;
1941 I915_WRITE(intel_dp
->output_reg
, DP
& ~DP_PORT_EN
);
1942 POSTING_READ(intel_dp
->output_reg
);
1943 msleep(intel_dp
->panel_power_down_delay
);
1947 intel_dp_get_dpcd(struct intel_dp
*intel_dp
)
1949 if (intel_dp_aux_native_read_retry(intel_dp
, 0x000, intel_dp
->dpcd
,
1950 sizeof(intel_dp
->dpcd
)) &&
1951 (intel_dp
->dpcd
[DP_DPCD_REV
] != 0)) {
1959 intel_dp_get_sink_irq(struct intel_dp
*intel_dp
, u8
*sink_irq_vector
)
1963 ret
= intel_dp_aux_native_read_retry(intel_dp
,
1964 DP_DEVICE_SERVICE_IRQ_VECTOR
,
1965 sink_irq_vector
, 1);
1973 intel_dp_handle_test_request(struct intel_dp
*intel_dp
)
1975 /* NAK by default */
1976 intel_dp_aux_native_write_1(intel_dp
, DP_TEST_RESPONSE
, DP_TEST_ACK
);
1980 * According to DP spec
1983 * 2. Configure link according to Receiver Capabilities
1984 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
1985 * 4. Check link status on receipt of hot-plug interrupt
1989 intel_dp_check_link_status(struct intel_dp
*intel_dp
)
1992 u8 link_status
[DP_LINK_STATUS_SIZE
];
1994 if (intel_dp
->dpms_mode
!= DRM_MODE_DPMS_ON
)
1997 if (!intel_dp
->base
.base
.crtc
)
2000 /* Try to read receiver status if the link appears to be up */
2001 if (!intel_dp_get_link_status(intel_dp
, link_status
)) {
2002 intel_dp_link_down(intel_dp
);
2006 /* Now read the DPCD to see if it's actually running */
2007 if (!intel_dp_get_dpcd(intel_dp
)) {
2008 intel_dp_link_down(intel_dp
);
2012 /* Try to read the source of the interrupt */
2013 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
2014 intel_dp_get_sink_irq(intel_dp
, &sink_irq_vector
)) {
2015 /* Clear interrupt source */
2016 intel_dp_aux_native_write_1(intel_dp
,
2017 DP_DEVICE_SERVICE_IRQ_VECTOR
,
2020 if (sink_irq_vector
& DP_AUTOMATED_TEST_REQUEST
)
2021 intel_dp_handle_test_request(intel_dp
);
2022 if (sink_irq_vector
& (DP_CP_IRQ
| DP_SINK_SPECIFIC_IRQ
))
2023 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
2026 if (!intel_channel_eq_ok(intel_dp
, link_status
)) {
2027 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2028 drm_get_encoder_name(&intel_dp
->base
.base
));
2029 intel_dp_start_link_train(intel_dp
);
2030 intel_dp_complete_link_train(intel_dp
);
2034 static enum drm_connector_status
2035 intel_dp_detect_dpcd(struct intel_dp
*intel_dp
)
2037 if (intel_dp_get_dpcd(intel_dp
))
2038 return connector_status_connected
;
2039 return connector_status_disconnected
;
2042 static enum drm_connector_status
2043 ironlake_dp_detect(struct intel_dp
*intel_dp
)
2045 enum drm_connector_status status
;
2047 /* Can't disconnect eDP, but you can close the lid... */
2048 if (is_edp(intel_dp
)) {
2049 status
= intel_panel_detect(intel_dp
->base
.base
.dev
);
2050 if (status
== connector_status_unknown
)
2051 status
= connector_status_connected
;
2055 return intel_dp_detect_dpcd(intel_dp
);
2058 static enum drm_connector_status
2059 g4x_dp_detect(struct intel_dp
*intel_dp
)
2061 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2062 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2065 switch (intel_dp
->output_reg
) {
2067 bit
= DPB_HOTPLUG_INT_STATUS
;
2070 bit
= DPC_HOTPLUG_INT_STATUS
;
2073 bit
= DPD_HOTPLUG_INT_STATUS
;
2076 return connector_status_unknown
;
2079 temp
= I915_READ(PORT_HOTPLUG_STAT
);
2081 if ((temp
& bit
) == 0)
2082 return connector_status_disconnected
;
2084 return intel_dp_detect_dpcd(intel_dp
);
2087 static struct edid
*
2088 intel_dp_get_edid(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2090 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2093 ironlake_edp_panel_vdd_on(intel_dp
);
2094 edid
= drm_get_edid(connector
, adapter
);
2095 ironlake_edp_panel_vdd_off(intel_dp
, false);
2100 intel_dp_get_edid_modes(struct drm_connector
*connector
, struct i2c_adapter
*adapter
)
2102 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2105 ironlake_edp_panel_vdd_on(intel_dp
);
2106 ret
= intel_ddc_get_modes(connector
, adapter
);
2107 ironlake_edp_panel_vdd_off(intel_dp
, false);
2113 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2115 * \return true if DP port is connected.
2116 * \return false if DP port is disconnected.
2118 static enum drm_connector_status
2119 intel_dp_detect(struct drm_connector
*connector
, bool force
)
2121 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2122 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2123 enum drm_connector_status status
;
2124 struct edid
*edid
= NULL
;
2126 intel_dp
->has_audio
= false;
2128 if (HAS_PCH_SPLIT(dev
))
2129 status
= ironlake_dp_detect(intel_dp
);
2131 status
= g4x_dp_detect(intel_dp
);
2133 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
2134 intel_dp
->dpcd
[0], intel_dp
->dpcd
[1], intel_dp
->dpcd
[2],
2135 intel_dp
->dpcd
[3], intel_dp
->dpcd
[4], intel_dp
->dpcd
[5],
2136 intel_dp
->dpcd
[6], intel_dp
->dpcd
[7]);
2138 if (status
!= connector_status_connected
)
2141 if (intel_dp
->force_audio
) {
2142 intel_dp
->has_audio
= intel_dp
->force_audio
> 0;
2144 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2146 intel_dp
->has_audio
= drm_detect_monitor_audio(edid
);
2147 connector
->display_info
.raw_edid
= NULL
;
2152 return connector_status_connected
;
2155 static int intel_dp_get_modes(struct drm_connector
*connector
)
2157 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2158 struct drm_device
*dev
= intel_dp
->base
.base
.dev
;
2159 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2162 /* We should parse the EDID data and find out if it has an audio sink
2165 ret
= intel_dp_get_edid_modes(connector
, &intel_dp
->adapter
);
2167 if (is_edp(intel_dp
) && !intel_dp
->panel_fixed_mode
) {
2168 struct drm_display_mode
*newmode
;
2169 list_for_each_entry(newmode
, &connector
->probed_modes
,
2171 if ((newmode
->type
& DRM_MODE_TYPE_PREFERRED
)) {
2172 intel_dp
->panel_fixed_mode
=
2173 drm_mode_duplicate(dev
, newmode
);
2181 /* if eDP has no EDID, try to use fixed panel mode from VBT */
2182 if (is_edp(intel_dp
)) {
2183 /* initialize panel mode from VBT if available for eDP */
2184 if (intel_dp
->panel_fixed_mode
== NULL
&& dev_priv
->lfp_lvds_vbt_mode
!= NULL
) {
2185 intel_dp
->panel_fixed_mode
=
2186 drm_mode_duplicate(dev
, dev_priv
->lfp_lvds_vbt_mode
);
2187 if (intel_dp
->panel_fixed_mode
) {
2188 intel_dp
->panel_fixed_mode
->type
|=
2189 DRM_MODE_TYPE_PREFERRED
;
2192 if (intel_dp
->panel_fixed_mode
) {
2193 struct drm_display_mode
*mode
;
2194 mode
= drm_mode_duplicate(dev
, intel_dp
->panel_fixed_mode
);
2195 drm_mode_probed_add(connector
, mode
);
2203 intel_dp_detect_audio(struct drm_connector
*connector
)
2205 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2207 bool has_audio
= false;
2209 edid
= intel_dp_get_edid(connector
, &intel_dp
->adapter
);
2211 has_audio
= drm_detect_monitor_audio(edid
);
2213 connector
->display_info
.raw_edid
= NULL
;
2221 intel_dp_set_property(struct drm_connector
*connector
,
2222 struct drm_property
*property
,
2225 struct drm_i915_private
*dev_priv
= connector
->dev
->dev_private
;
2226 struct intel_dp
*intel_dp
= intel_attached_dp(connector
);
2229 ret
= drm_connector_property_set_value(connector
, property
, val
);
2233 if (property
== dev_priv
->force_audio_property
) {
2237 if (i
== intel_dp
->force_audio
)
2240 intel_dp
->force_audio
= i
;
2243 has_audio
= intel_dp_detect_audio(connector
);
2247 if (has_audio
== intel_dp
->has_audio
)
2250 intel_dp
->has_audio
= has_audio
;
2254 if (property
== dev_priv
->broadcast_rgb_property
) {
2255 if (val
== !!intel_dp
->color_range
)
2258 intel_dp
->color_range
= val
? DP_COLOR_RANGE_16_235
: 0;
2265 if (intel_dp
->base
.base
.crtc
) {
2266 struct drm_crtc
*crtc
= intel_dp
->base
.base
.crtc
;
2267 drm_crtc_helper_set_mode(crtc
, &crtc
->mode
,
2276 intel_dp_destroy(struct drm_connector
*connector
)
2278 struct drm_device
*dev
= connector
->dev
;
2280 if (intel_dpd_is_edp(dev
))
2281 intel_panel_destroy_backlight(dev
);
2283 drm_sysfs_connector_remove(connector
);
2284 drm_connector_cleanup(connector
);
2288 static void intel_dp_encoder_destroy(struct drm_encoder
*encoder
)
2290 struct intel_dp
*intel_dp
= enc_to_intel_dp(encoder
);
2292 i2c_del_adapter(&intel_dp
->adapter
);
2293 drm_encoder_cleanup(encoder
);
2294 if (is_edp(intel_dp
)) {
2295 cancel_delayed_work_sync(&intel_dp
->panel_vdd_work
);
2296 ironlake_panel_vdd_off_sync(intel_dp
);
2301 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs
= {
2302 .dpms
= intel_dp_dpms
,
2303 .mode_fixup
= intel_dp_mode_fixup
,
2304 .prepare
= intel_dp_prepare
,
2305 .mode_set
= intel_dp_mode_set
,
2306 .commit
= intel_dp_commit
,
2309 static const struct drm_connector_funcs intel_dp_connector_funcs
= {
2310 .dpms
= drm_helper_connector_dpms
,
2311 .detect
= intel_dp_detect
,
2312 .fill_modes
= drm_helper_probe_single_connector_modes
,
2313 .set_property
= intel_dp_set_property
,
2314 .destroy
= intel_dp_destroy
,
2317 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs
= {
2318 .get_modes
= intel_dp_get_modes
,
2319 .mode_valid
= intel_dp_mode_valid
,
2320 .best_encoder
= intel_best_encoder
,
2323 static const struct drm_encoder_funcs intel_dp_enc_funcs
= {
2324 .destroy
= intel_dp_encoder_destroy
,
2328 intel_dp_hot_plug(struct intel_encoder
*intel_encoder
)
2330 struct intel_dp
*intel_dp
= container_of(intel_encoder
, struct intel_dp
, base
);
2332 intel_dp_check_link_status(intel_dp
);
2335 /* Return which DP Port should be selected for Transcoder DP control */
2337 intel_trans_dp_port_sel(struct drm_crtc
*crtc
)
2339 struct drm_device
*dev
= crtc
->dev
;
2340 struct drm_mode_config
*mode_config
= &dev
->mode_config
;
2341 struct drm_encoder
*encoder
;
2343 list_for_each_entry(encoder
, &mode_config
->encoder_list
, head
) {
2344 struct intel_dp
*intel_dp
;
2346 if (encoder
->crtc
!= crtc
)
2349 intel_dp
= enc_to_intel_dp(encoder
);
2350 if (intel_dp
->base
.type
== INTEL_OUTPUT_DISPLAYPORT
||
2351 intel_dp
->base
.type
== INTEL_OUTPUT_EDP
)
2352 return intel_dp
->output_reg
;
2358 /* check the VBT to see whether the eDP is on DP-D port */
2359 bool intel_dpd_is_edp(struct drm_device
*dev
)
2361 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2362 struct child_device_config
*p_child
;
2365 if (!dev_priv
->child_dev_num
)
2368 for (i
= 0; i
< dev_priv
->child_dev_num
; i
++) {
2369 p_child
= dev_priv
->child_dev
+ i
;
2371 if (p_child
->dvo_port
== PORT_IDPD
&&
2372 p_child
->device_type
== DEVICE_TYPE_eDP
)
2379 intel_dp_add_properties(struct intel_dp
*intel_dp
, struct drm_connector
*connector
)
2381 intel_attach_force_audio_property(connector
);
2382 intel_attach_broadcast_rgb_property(connector
);
2386 intel_dp_init(struct drm_device
*dev
, int output_reg
)
2388 struct drm_i915_private
*dev_priv
= dev
->dev_private
;
2389 struct drm_connector
*connector
;
2390 struct intel_dp
*intel_dp
;
2391 struct intel_encoder
*intel_encoder
;
2392 struct intel_connector
*intel_connector
;
2393 const char *name
= NULL
;
2396 intel_dp
= kzalloc(sizeof(struct intel_dp
), GFP_KERNEL
);
2400 intel_dp
->output_reg
= output_reg
;
2401 intel_dp
->dpms_mode
= -1;
2403 intel_connector
= kzalloc(sizeof(struct intel_connector
), GFP_KERNEL
);
2404 if (!intel_connector
) {
2408 intel_encoder
= &intel_dp
->base
;
2410 if (HAS_PCH_SPLIT(dev
) && output_reg
== PCH_DP_D
)
2411 if (intel_dpd_is_edp(dev
))
2412 intel_dp
->is_pch_edp
= true;
2414 if (output_reg
== DP_A
|| is_pch_edp(intel_dp
)) {
2415 type
= DRM_MODE_CONNECTOR_eDP
;
2416 intel_encoder
->type
= INTEL_OUTPUT_EDP
;
2418 type
= DRM_MODE_CONNECTOR_DisplayPort
;
2419 intel_encoder
->type
= INTEL_OUTPUT_DISPLAYPORT
;
2422 connector
= &intel_connector
->base
;
2423 drm_connector_init(dev
, connector
, &intel_dp_connector_funcs
, type
);
2424 drm_connector_helper_add(connector
, &intel_dp_connector_helper_funcs
);
2426 connector
->polled
= DRM_CONNECTOR_POLL_HPD
;
2428 if (output_reg
== DP_B
|| output_reg
== PCH_DP_B
)
2429 intel_encoder
->clone_mask
= (1 << INTEL_DP_B_CLONE_BIT
);
2430 else if (output_reg
== DP_C
|| output_reg
== PCH_DP_C
)
2431 intel_encoder
->clone_mask
= (1 << INTEL_DP_C_CLONE_BIT
);
2432 else if (output_reg
== DP_D
|| output_reg
== PCH_DP_D
)
2433 intel_encoder
->clone_mask
= (1 << INTEL_DP_D_CLONE_BIT
);
2435 if (is_edp(intel_dp
)) {
2436 intel_encoder
->clone_mask
= (1 << INTEL_EDP_CLONE_BIT
);
2437 INIT_DELAYED_WORK(&intel_dp
->panel_vdd_work
,
2438 ironlake_panel_vdd_work
);
2441 intel_encoder
->crtc_mask
= (1 << 0) | (1 << 1) | (1 << 2);
2442 connector
->interlace_allowed
= true;
2443 connector
->doublescan_allowed
= 0;
2445 drm_encoder_init(dev
, &intel_encoder
->base
, &intel_dp_enc_funcs
,
2446 DRM_MODE_ENCODER_TMDS
);
2447 drm_encoder_helper_add(&intel_encoder
->base
, &intel_dp_helper_funcs
);
2449 intel_connector_attach_encoder(intel_connector
, intel_encoder
);
2450 drm_sysfs_connector_add(connector
);
2452 /* Set up the DDC bus. */
2453 switch (output_reg
) {
2459 dev_priv
->hotplug_supported_mask
|=
2460 HDMIB_HOTPLUG_INT_STATUS
;
2465 dev_priv
->hotplug_supported_mask
|=
2466 HDMIC_HOTPLUG_INT_STATUS
;
2471 dev_priv
->hotplug_supported_mask
|=
2472 HDMID_HOTPLUG_INT_STATUS
;
2477 /* Cache some DPCD data in the eDP case */
2478 if (is_edp(intel_dp
)) {
2480 struct edp_power_seq cur
, vbt
;
2481 u32 pp_on
, pp_off
, pp_div
;
2483 pp_on
= I915_READ(PCH_PP_ON_DELAYS
);
2484 pp_off
= I915_READ(PCH_PP_OFF_DELAYS
);
2485 pp_div
= I915_READ(PCH_PP_DIVISOR
);
2487 /* Pull timing values out of registers */
2488 cur
.t1_t3
= (pp_on
& PANEL_POWER_UP_DELAY_MASK
) >>
2489 PANEL_POWER_UP_DELAY_SHIFT
;
2491 cur
.t8
= (pp_on
& PANEL_LIGHT_ON_DELAY_MASK
) >>
2492 PANEL_LIGHT_ON_DELAY_SHIFT
;
2494 cur
.t9
= (pp_off
& PANEL_LIGHT_OFF_DELAY_MASK
) >>
2495 PANEL_LIGHT_OFF_DELAY_SHIFT
;
2497 cur
.t10
= (pp_off
& PANEL_POWER_DOWN_DELAY_MASK
) >>
2498 PANEL_POWER_DOWN_DELAY_SHIFT
;
2500 cur
.t11_t12
= ((pp_div
& PANEL_POWER_CYCLE_DELAY_MASK
) >>
2501 PANEL_POWER_CYCLE_DELAY_SHIFT
) * 1000;
2503 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2504 cur
.t1_t3
, cur
.t8
, cur
.t9
, cur
.t10
, cur
.t11_t12
);
2506 vbt
= dev_priv
->edp
.pps
;
2508 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2509 vbt
.t1_t3
, vbt
.t8
, vbt
.t9
, vbt
.t10
, vbt
.t11_t12
);
2511 #define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10)
2513 intel_dp
->panel_power_up_delay
= get_delay(t1_t3
);
2514 intel_dp
->backlight_on_delay
= get_delay(t8
);
2515 intel_dp
->backlight_off_delay
= get_delay(t9
);
2516 intel_dp
->panel_power_down_delay
= get_delay(t10
);
2517 intel_dp
->panel_power_cycle_delay
= get_delay(t11_t12
);
2519 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2520 intel_dp
->panel_power_up_delay
, intel_dp
->panel_power_down_delay
,
2521 intel_dp
->panel_power_cycle_delay
);
2523 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2524 intel_dp
->backlight_on_delay
, intel_dp
->backlight_off_delay
);
2526 ironlake_edp_panel_vdd_on(intel_dp
);
2527 ret
= intel_dp_get_dpcd(intel_dp
);
2528 ironlake_edp_panel_vdd_off(intel_dp
, false);
2531 if (intel_dp
->dpcd
[DP_DPCD_REV
] >= 0x11)
2532 dev_priv
->no_aux_handshake
=
2533 intel_dp
->dpcd
[DP_MAX_DOWNSPREAD
] &
2534 DP_NO_AUX_HANDSHAKE_LINK_TRAINING
;
2536 /* if this fails, presume the device is a ghost */
2537 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2538 intel_dp_encoder_destroy(&intel_dp
->base
.base
);
2539 intel_dp_destroy(&intel_connector
->base
);
2544 intel_dp_i2c_init(intel_dp
, intel_connector
, name
);
2546 intel_encoder
->hot_plug
= intel_dp_hot_plug
;
2548 if (is_edp(intel_dp
)) {
2549 dev_priv
->int_edp_connector
= connector
;
2550 intel_panel_setup_backlight(dev
);
2553 intel_dp_add_properties(intel_dp
, connector
);
2555 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2556 * 0xd. Failure to do so will result in spurious interrupts being
2557 * generated on the port when a cable is not attached.
2559 if (IS_G4X(dev
) && !IS_GM45(dev
)) {
2560 u32 temp
= I915_READ(PEG_BAND_GAP_DATA
);
2561 I915_WRITE(PEG_BAND_GAP_DATA
, (temp
& ~0xf) | 0xd);