Merge tag 'block-5.11-2021-01-10' of git://git.kernel.dk/linux-block
[linux/fpc-iii.git] / drivers / gpu / drm / gma500 / cdv_intel_dp.c
blobbfd9a15d63b1a25a33abe256fe976478d9e4a294
1 /*
2 * Copyright © 2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
28 #include <linux/i2c.h>
29 #include <linux/module.h>
30 #include <linux/slab.h>
32 #include <drm/drm_crtc.h>
33 #include <drm/drm_crtc_helper.h>
34 #include <drm/drm_dp_helper.h>
35 #include <drm/drm_simple_kms_helper.h>
37 #include "gma_display.h"
38 #include "psb_drv.h"
39 #include "psb_intel_drv.h"
40 #include "psb_intel_reg.h"
42 /**
43 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
44 * aux algorithm
45 * @running: set by the algo indicating whether an i2c is ongoing or whether
46 * the i2c bus is quiescent
47 * @address: i2c target address for the currently ongoing transfer
48 * @aux_ch: driver callback to transfer a single byte of the i2c payload
50 struct i2c_algo_dp_aux_data {
51 bool running;
52 u16 address;
53 int (*aux_ch) (struct i2c_adapter *adapter,
54 int mode, uint8_t write_byte,
55 uint8_t *read_byte);
58 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
59 static int
60 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
61 uint8_t write_byte, uint8_t *read_byte)
63 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
64 int ret;
66 ret = (*algo_data->aux_ch)(adapter, mode,
67 write_byte, read_byte);
68 return ret;
72 * I2C over AUX CH
76 * Send the address. If the I2C link is running, this 'restarts'
77 * the connection with the new address, this is used for doing
78 * a write followed by a read (as needed for DDC)
80 static int
81 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
83 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
84 int mode = MODE_I2C_START;
85 int ret;
87 if (reading)
88 mode |= MODE_I2C_READ;
89 else
90 mode |= MODE_I2C_WRITE;
91 algo_data->address = address;
92 algo_data->running = true;
93 ret = i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
94 return ret;
98 * Stop the I2C transaction. This closes out the link, sending
99 * a bare address packet with the MOT bit turned off
101 static void
102 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
104 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
105 int mode = MODE_I2C_STOP;
107 if (reading)
108 mode |= MODE_I2C_READ;
109 else
110 mode |= MODE_I2C_WRITE;
111 if (algo_data->running) {
112 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
113 algo_data->running = false;
118 * Write a single byte to the current I2C address, the
119 * the I2C link must be running or this returns -EIO
121 static int
122 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
124 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
125 int ret;
127 if (!algo_data->running)
128 return -EIO;
130 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
131 return ret;
135 * Read a single byte from the current I2C address, the
136 * I2C link must be running or this returns -EIO
138 static int
139 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
141 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
142 int ret;
144 if (!algo_data->running)
145 return -EIO;
147 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
148 return ret;
151 static int
152 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
153 struct i2c_msg *msgs,
154 int num)
156 int ret = 0;
157 bool reading = false;
158 int m;
159 int b;
161 for (m = 0; m < num; m++) {
162 u16 len = msgs[m].len;
163 u8 *buf = msgs[m].buf;
164 reading = (msgs[m].flags & I2C_M_RD) != 0;
165 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
166 if (ret < 0)
167 break;
168 if (reading) {
169 for (b = 0; b < len; b++) {
170 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
171 if (ret < 0)
172 break;
174 } else {
175 for (b = 0; b < len; b++) {
176 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
177 if (ret < 0)
178 break;
181 if (ret < 0)
182 break;
184 if (ret >= 0)
185 ret = num;
186 i2c_algo_dp_aux_stop(adapter, reading);
187 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
188 return ret;
191 static u32
192 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
194 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
195 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
196 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
197 I2C_FUNC_10BIT_ADDR;
200 static const struct i2c_algorithm i2c_dp_aux_algo = {
201 .master_xfer = i2c_algo_dp_aux_xfer,
202 .functionality = i2c_algo_dp_aux_functionality,
205 static void
206 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
208 (void) i2c_algo_dp_aux_address(adapter, 0, false);
209 (void) i2c_algo_dp_aux_stop(adapter, false);
212 static int
213 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
215 adapter->algo = &i2c_dp_aux_algo;
216 adapter->retries = 3;
217 i2c_dp_aux_reset_bus(adapter);
218 return 0;
222 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
223 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
225 static int
226 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
228 int error;
230 error = i2c_dp_aux_prepare_bus(adapter);
231 if (error)
232 return error;
233 error = i2c_add_adapter(adapter);
234 return error;
237 #define _wait_for(COND, MS, W) ({ \
238 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
239 int ret__ = 0; \
240 while (! (COND)) { \
241 if (time_after(jiffies, timeout__)) { \
242 ret__ = -ETIMEDOUT; \
243 break; \
245 if (W && !in_dbg_master()) msleep(W); \
247 ret__; \
250 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
252 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
254 #define DP_LINK_CONFIGURATION_SIZE 9
256 #define CDV_FAST_LINK_TRAIN 1
258 struct cdv_intel_dp {
259 uint32_t output_reg;
260 uint32_t DP;
261 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
262 bool has_audio;
263 int force_audio;
264 uint32_t color_range;
265 uint8_t link_bw;
266 uint8_t lane_count;
267 uint8_t dpcd[4];
268 struct gma_encoder *encoder;
269 struct i2c_adapter adapter;
270 struct i2c_algo_dp_aux_data algo;
271 uint8_t train_set[4];
272 uint8_t link_status[DP_LINK_STATUS_SIZE];
273 int panel_power_up_delay;
274 int panel_power_down_delay;
275 int panel_power_cycle_delay;
276 int backlight_on_delay;
277 int backlight_off_delay;
278 struct drm_display_mode *panel_fixed_mode; /* for eDP */
279 bool panel_on;
282 struct ddi_regoff {
283 uint32_t PreEmph1;
284 uint32_t PreEmph2;
285 uint32_t VSwing1;
286 uint32_t VSwing2;
287 uint32_t VSwing3;
288 uint32_t VSwing4;
289 uint32_t VSwing5;
292 static struct ddi_regoff ddi_DP_train_table[] = {
293 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
294 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
295 .VSwing5 = 0x8158,},
296 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
297 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
298 .VSwing5 = 0x8258,},
301 static uint32_t dp_vswing_premph_table[] = {
302 0x55338954, 0x4000,
303 0x554d8954, 0x2000,
304 0x55668954, 0,
305 0x559ac0d4, 0x6000,
308 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
309 * @intel_dp: DP struct
311 * If a CPU or PCH DP output is attached to an eDP panel, this function
312 * will return true, and false otherwise.
314 static bool is_edp(struct gma_encoder *encoder)
316 return encoder->type == INTEL_OUTPUT_EDP;
320 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
321 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
322 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
324 static int
325 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
327 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
328 int max_lane_count = 4;
330 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
331 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
332 switch (max_lane_count) {
333 case 1: case 2: case 4:
334 break;
335 default:
336 max_lane_count = 4;
339 return max_lane_count;
342 static int
343 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
345 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
346 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
348 switch (max_link_bw) {
349 case DP_LINK_BW_1_62:
350 case DP_LINK_BW_2_7:
351 break;
352 default:
353 max_link_bw = DP_LINK_BW_1_62;
354 break;
356 return max_link_bw;
359 static int
360 cdv_intel_dp_link_clock(uint8_t link_bw)
362 if (link_bw == DP_LINK_BW_2_7)
363 return 270000;
364 else
365 return 162000;
368 static int
369 cdv_intel_dp_link_required(int pixel_clock, int bpp)
371 return (pixel_clock * bpp + 7) / 8;
374 static int
375 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
377 return (max_link_clock * max_lanes * 19) / 20;
380 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
382 struct drm_device *dev = intel_encoder->base.dev;
383 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
384 u32 pp;
386 if (intel_dp->panel_on) {
387 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
388 return;
390 DRM_DEBUG_KMS("\n");
392 pp = REG_READ(PP_CONTROL);
394 pp |= EDP_FORCE_VDD;
395 REG_WRITE(PP_CONTROL, pp);
396 REG_READ(PP_CONTROL);
397 msleep(intel_dp->panel_power_up_delay);
400 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
402 struct drm_device *dev = intel_encoder->base.dev;
403 u32 pp;
405 DRM_DEBUG_KMS("\n");
406 pp = REG_READ(PP_CONTROL);
408 pp &= ~EDP_FORCE_VDD;
409 REG_WRITE(PP_CONTROL, pp);
410 REG_READ(PP_CONTROL);
414 /* Returns true if the panel was already on when called */
415 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
417 struct drm_device *dev = intel_encoder->base.dev;
418 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
419 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
421 if (intel_dp->panel_on)
422 return true;
424 DRM_DEBUG_KMS("\n");
425 pp = REG_READ(PP_CONTROL);
426 pp &= ~PANEL_UNLOCK_MASK;
428 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
429 REG_WRITE(PP_CONTROL, pp);
430 REG_READ(PP_CONTROL);
432 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
433 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
434 intel_dp->panel_on = false;
435 } else
436 intel_dp->panel_on = true;
437 msleep(intel_dp->panel_power_up_delay);
439 return false;
442 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
444 struct drm_device *dev = intel_encoder->base.dev;
445 u32 pp, idle_off_mask = PP_ON ;
446 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
448 DRM_DEBUG_KMS("\n");
450 pp = REG_READ(PP_CONTROL);
452 if ((pp & POWER_TARGET_ON) == 0)
453 return;
455 intel_dp->panel_on = false;
456 pp &= ~PANEL_UNLOCK_MASK;
457 /* ILK workaround: disable reset around power sequence */
459 pp &= ~POWER_TARGET_ON;
460 pp &= ~EDP_FORCE_VDD;
461 pp &= ~EDP_BLC_ENABLE;
462 REG_WRITE(PP_CONTROL, pp);
463 REG_READ(PP_CONTROL);
464 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
466 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
467 DRM_DEBUG_KMS("Error in turning off Panel\n");
470 msleep(intel_dp->panel_power_cycle_delay);
471 DRM_DEBUG_KMS("Over\n");
474 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
476 struct drm_device *dev = intel_encoder->base.dev;
477 u32 pp;
479 DRM_DEBUG_KMS("\n");
481 * If we enable the backlight right away following a panel power
482 * on, we may see slight flicker as the panel syncs with the eDP
483 * link. So delay a bit to make sure the image is solid before
484 * allowing it to appear.
486 msleep(300);
487 pp = REG_READ(PP_CONTROL);
489 pp |= EDP_BLC_ENABLE;
490 REG_WRITE(PP_CONTROL, pp);
491 gma_backlight_enable(dev);
494 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
496 struct drm_device *dev = intel_encoder->base.dev;
497 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
498 u32 pp;
500 DRM_DEBUG_KMS("\n");
501 gma_backlight_disable(dev);
502 msleep(10);
503 pp = REG_READ(PP_CONTROL);
505 pp &= ~EDP_BLC_ENABLE;
506 REG_WRITE(PP_CONTROL, pp);
507 msleep(intel_dp->backlight_off_delay);
510 static enum drm_mode_status
511 cdv_intel_dp_mode_valid(struct drm_connector *connector,
512 struct drm_display_mode *mode)
514 struct gma_encoder *encoder = gma_attached_encoder(connector);
515 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
516 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
517 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
518 struct drm_psb_private *dev_priv = connector->dev->dev_private;
520 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
521 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
522 return MODE_PANEL;
523 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
524 return MODE_PANEL;
527 /* only refuse the mode on non eDP since we have seen some weird eDP panels
528 which are outside spec tolerances but somehow work by magic */
529 if (!is_edp(encoder) &&
530 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
531 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
532 return MODE_CLOCK_HIGH;
534 if (is_edp(encoder)) {
535 if (cdv_intel_dp_link_required(mode->clock, 24)
536 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
537 return MODE_CLOCK_HIGH;
540 if (mode->clock < 10000)
541 return MODE_CLOCK_LOW;
543 return MODE_OK;
546 static uint32_t
547 pack_aux(uint8_t *src, int src_bytes)
549 int i;
550 uint32_t v = 0;
552 if (src_bytes > 4)
553 src_bytes = 4;
554 for (i = 0; i < src_bytes; i++)
555 v |= ((uint32_t) src[i]) << ((3-i) * 8);
556 return v;
559 static void
560 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
562 int i;
563 if (dst_bytes > 4)
564 dst_bytes = 4;
565 for (i = 0; i < dst_bytes; i++)
566 dst[i] = src >> ((3-i) * 8);
569 static int
570 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
571 uint8_t *send, int send_bytes,
572 uint8_t *recv, int recv_size)
574 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
575 uint32_t output_reg = intel_dp->output_reg;
576 struct drm_device *dev = encoder->base.dev;
577 uint32_t ch_ctl = output_reg + 0x10;
578 uint32_t ch_data = ch_ctl + 4;
579 int i;
580 int recv_bytes;
581 uint32_t status;
582 uint32_t aux_clock_divider;
583 int try, precharge;
585 /* The clock divider is based off the hrawclk,
586 * and would like to run at 2MHz. So, take the
587 * hrawclk value and divide by 2 and use that
588 * On CDV platform it uses 200MHz as hrawclk.
591 aux_clock_divider = 200 / 2;
593 precharge = 4;
594 if (is_edp(encoder))
595 precharge = 10;
597 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
598 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
599 REG_READ(ch_ctl));
600 return -EBUSY;
603 /* Must try at least 3 times according to DP spec */
604 for (try = 0; try < 5; try++) {
605 /* Load the send data into the aux channel data registers */
606 for (i = 0; i < send_bytes; i += 4)
607 REG_WRITE(ch_data + i,
608 pack_aux(send + i, send_bytes - i));
610 /* Send the command and wait for it to complete */
611 REG_WRITE(ch_ctl,
612 DP_AUX_CH_CTL_SEND_BUSY |
613 DP_AUX_CH_CTL_TIME_OUT_400us |
614 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
615 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
616 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
617 DP_AUX_CH_CTL_DONE |
618 DP_AUX_CH_CTL_TIME_OUT_ERROR |
619 DP_AUX_CH_CTL_RECEIVE_ERROR);
620 for (;;) {
621 status = REG_READ(ch_ctl);
622 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
623 break;
624 udelay(100);
627 /* Clear done status and any errors */
628 REG_WRITE(ch_ctl,
629 status |
630 DP_AUX_CH_CTL_DONE |
631 DP_AUX_CH_CTL_TIME_OUT_ERROR |
632 DP_AUX_CH_CTL_RECEIVE_ERROR);
633 if (status & DP_AUX_CH_CTL_DONE)
634 break;
637 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
638 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
639 return -EBUSY;
642 /* Check for timeout or receive error.
643 * Timeouts occur when the sink is not connected
645 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
646 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
647 return -EIO;
650 /* Timeouts occur when the device isn't connected, so they're
651 * "normal" -- don't fill the kernel log with these */
652 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
653 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
654 return -ETIMEDOUT;
657 /* Unload any bytes sent back from the other side */
658 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
659 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
660 if (recv_bytes > recv_size)
661 recv_bytes = recv_size;
663 for (i = 0; i < recv_bytes; i += 4)
664 unpack_aux(REG_READ(ch_data + i),
665 recv + i, recv_bytes - i);
667 return recv_bytes;
670 /* Write data to the aux channel in native mode */
671 static int
672 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
673 uint16_t address, uint8_t *send, int send_bytes)
675 int ret;
676 uint8_t msg[20];
677 int msg_bytes;
678 uint8_t ack;
680 if (send_bytes > 16)
681 return -1;
682 msg[0] = DP_AUX_NATIVE_WRITE << 4;
683 msg[1] = address >> 8;
684 msg[2] = address & 0xff;
685 msg[3] = send_bytes - 1;
686 memcpy(&msg[4], send, send_bytes);
687 msg_bytes = send_bytes + 4;
688 for (;;) {
689 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
690 if (ret < 0)
691 return ret;
692 ack >>= 4;
693 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
694 break;
695 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
696 udelay(100);
697 else
698 return -EIO;
700 return send_bytes;
703 /* Write a single byte to the aux channel in native mode */
704 static int
705 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
706 uint16_t address, uint8_t byte)
708 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
711 /* read bytes from a native aux channel */
712 static int
713 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
714 uint16_t address, uint8_t *recv, int recv_bytes)
716 uint8_t msg[4];
717 int msg_bytes;
718 uint8_t reply[20];
719 int reply_bytes;
720 uint8_t ack;
721 int ret;
723 msg[0] = DP_AUX_NATIVE_READ << 4;
724 msg[1] = address >> 8;
725 msg[2] = address & 0xff;
726 msg[3] = recv_bytes - 1;
728 msg_bytes = 4;
729 reply_bytes = recv_bytes + 1;
731 for (;;) {
732 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
733 reply, reply_bytes);
734 if (ret == 0)
735 return -EPROTO;
736 if (ret < 0)
737 return ret;
738 ack = reply[0] >> 4;
739 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
740 memcpy(recv, reply + 1, ret - 1);
741 return ret - 1;
743 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
744 udelay(100);
745 else
746 return -EIO;
750 static int
751 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
752 uint8_t write_byte, uint8_t *read_byte)
754 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
755 struct cdv_intel_dp *intel_dp = container_of(adapter,
756 struct cdv_intel_dp,
757 adapter);
758 struct gma_encoder *encoder = intel_dp->encoder;
759 uint16_t address = algo_data->address;
760 uint8_t msg[5];
761 uint8_t reply[2];
762 unsigned retry;
763 int msg_bytes;
764 int reply_bytes;
765 int ret;
767 /* Set up the command byte */
768 if (mode & MODE_I2C_READ)
769 msg[0] = DP_AUX_I2C_READ << 4;
770 else
771 msg[0] = DP_AUX_I2C_WRITE << 4;
773 if (!(mode & MODE_I2C_STOP))
774 msg[0] |= DP_AUX_I2C_MOT << 4;
776 msg[1] = address >> 8;
777 msg[2] = address;
779 switch (mode) {
780 case MODE_I2C_WRITE:
781 msg[3] = 0;
782 msg[4] = write_byte;
783 msg_bytes = 5;
784 reply_bytes = 1;
785 break;
786 case MODE_I2C_READ:
787 msg[3] = 0;
788 msg_bytes = 4;
789 reply_bytes = 2;
790 break;
791 default:
792 msg_bytes = 3;
793 reply_bytes = 1;
794 break;
797 for (retry = 0; retry < 5; retry++) {
798 ret = cdv_intel_dp_aux_ch(encoder,
799 msg, msg_bytes,
800 reply, reply_bytes);
801 if (ret < 0) {
802 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
803 return ret;
806 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
807 case DP_AUX_NATIVE_REPLY_ACK:
808 /* I2C-over-AUX Reply field is only valid
809 * when paired with AUX ACK.
811 break;
812 case DP_AUX_NATIVE_REPLY_NACK:
813 DRM_DEBUG_KMS("aux_ch native nack\n");
814 return -EREMOTEIO;
815 case DP_AUX_NATIVE_REPLY_DEFER:
816 udelay(100);
817 continue;
818 default:
819 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
820 reply[0]);
821 return -EREMOTEIO;
824 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
825 case DP_AUX_I2C_REPLY_ACK:
826 if (mode == MODE_I2C_READ) {
827 *read_byte = reply[1];
829 return reply_bytes - 1;
830 case DP_AUX_I2C_REPLY_NACK:
831 DRM_DEBUG_KMS("aux_i2c nack\n");
832 return -EREMOTEIO;
833 case DP_AUX_I2C_REPLY_DEFER:
834 DRM_DEBUG_KMS("aux_i2c defer\n");
835 udelay(100);
836 break;
837 default:
838 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
839 return -EREMOTEIO;
843 DRM_ERROR("too many retries, giving up\n");
844 return -EREMOTEIO;
847 static int
848 cdv_intel_dp_i2c_init(struct gma_connector *connector,
849 struct gma_encoder *encoder, const char *name)
851 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
852 int ret;
854 DRM_DEBUG_KMS("i2c_init %s\n", name);
856 intel_dp->algo.running = false;
857 intel_dp->algo.address = 0;
858 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
860 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
861 intel_dp->adapter.owner = THIS_MODULE;
862 intel_dp->adapter.class = I2C_CLASS_DDC;
863 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
864 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
865 intel_dp->adapter.algo_data = &intel_dp->algo;
866 intel_dp->adapter.dev.parent = connector->base.kdev;
868 if (is_edp(encoder))
869 cdv_intel_edp_panel_vdd_on(encoder);
870 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
871 if (is_edp(encoder))
872 cdv_intel_edp_panel_vdd_off(encoder);
874 return ret;
877 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
878 struct drm_display_mode *adjusted_mode)
880 adjusted_mode->hdisplay = fixed_mode->hdisplay;
881 adjusted_mode->hsync_start = fixed_mode->hsync_start;
882 adjusted_mode->hsync_end = fixed_mode->hsync_end;
883 adjusted_mode->htotal = fixed_mode->htotal;
885 adjusted_mode->vdisplay = fixed_mode->vdisplay;
886 adjusted_mode->vsync_start = fixed_mode->vsync_start;
887 adjusted_mode->vsync_end = fixed_mode->vsync_end;
888 adjusted_mode->vtotal = fixed_mode->vtotal;
890 adjusted_mode->clock = fixed_mode->clock;
892 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
895 static bool
896 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
897 struct drm_display_mode *adjusted_mode)
899 struct drm_psb_private *dev_priv = encoder->dev->dev_private;
900 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
901 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
902 int lane_count, clock;
903 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
904 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
905 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
906 int refclock = mode->clock;
907 int bpp = 24;
909 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
910 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
911 refclock = intel_dp->panel_fixed_mode->clock;
912 bpp = dev_priv->edp.bpp;
915 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
916 for (clock = max_clock; clock >= 0; clock--) {
917 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
919 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
920 intel_dp->link_bw = bws[clock];
921 intel_dp->lane_count = lane_count;
922 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
923 DRM_DEBUG_KMS("Display port link bw %02x lane "
924 "count %d clock %d\n",
925 intel_dp->link_bw, intel_dp->lane_count,
926 adjusted_mode->clock);
927 return true;
931 if (is_edp(intel_encoder)) {
932 /* okay we failed just pick the highest */
933 intel_dp->lane_count = max_lane_count;
934 intel_dp->link_bw = bws[max_clock];
935 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
936 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
937 "count %d clock %d\n",
938 intel_dp->link_bw, intel_dp->lane_count,
939 adjusted_mode->clock);
941 return true;
943 return false;
946 struct cdv_intel_dp_m_n {
947 uint32_t tu;
948 uint32_t gmch_m;
949 uint32_t gmch_n;
950 uint32_t link_m;
951 uint32_t link_n;
954 static void
955 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
958 while (*num > 0xffffff || *den > 0xffffff) {
959 *num >>= 1;
960 *den >>= 1;
962 uint64_t value, m;
963 m = *num;
964 value = m * (0x800000);
965 m = do_div(value, *den);
966 *num = value;
967 *den = 0x800000;
970 static void
971 cdv_intel_dp_compute_m_n(int bpp,
972 int nlanes,
973 int pixel_clock,
974 int link_clock,
975 struct cdv_intel_dp_m_n *m_n)
977 m_n->tu = 64;
978 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
979 m_n->gmch_n = link_clock * nlanes;
980 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
981 m_n->link_m = pixel_clock;
982 m_n->link_n = link_clock;
983 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
986 void
987 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
988 struct drm_display_mode *adjusted_mode)
990 struct drm_device *dev = crtc->dev;
991 struct drm_psb_private *dev_priv = dev->dev_private;
992 struct drm_mode_config *mode_config = &dev->mode_config;
993 struct drm_encoder *encoder;
994 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
995 int lane_count = 4, bpp = 24;
996 struct cdv_intel_dp_m_n m_n;
997 int pipe = gma_crtc->pipe;
1000 * Find the lane count in the intel_encoder private
1002 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
1003 struct gma_encoder *intel_encoder;
1004 struct cdv_intel_dp *intel_dp;
1006 if (encoder->crtc != crtc)
1007 continue;
1009 intel_encoder = to_gma_encoder(encoder);
1010 intel_dp = intel_encoder->dev_priv;
1011 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1012 lane_count = intel_dp->lane_count;
1013 break;
1014 } else if (is_edp(intel_encoder)) {
1015 lane_count = intel_dp->lane_count;
1016 bpp = dev_priv->edp.bpp;
1017 break;
1022 * Compute the GMCH and Link ratios. The '3' here is
1023 * the number of bytes_per_pixel post-LUT, which we always
1024 * set up for 8-bits of R/G/B, or 3 bytes total.
1026 cdv_intel_dp_compute_m_n(bpp, lane_count,
1027 mode->clock, adjusted_mode->clock, &m_n);
1030 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1031 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1032 m_n.gmch_m);
1033 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1034 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1035 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1039 static void
1040 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1041 struct drm_display_mode *adjusted_mode)
1043 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1044 struct drm_crtc *crtc = encoder->crtc;
1045 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1046 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1047 struct drm_device *dev = encoder->dev;
1049 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1050 intel_dp->DP |= intel_dp->color_range;
1052 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1053 intel_dp->DP |= DP_SYNC_HS_HIGH;
1054 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1055 intel_dp->DP |= DP_SYNC_VS_HIGH;
1057 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1059 switch (intel_dp->lane_count) {
1060 case 1:
1061 intel_dp->DP |= DP_PORT_WIDTH_1;
1062 break;
1063 case 2:
1064 intel_dp->DP |= DP_PORT_WIDTH_2;
1065 break;
1066 case 4:
1067 intel_dp->DP |= DP_PORT_WIDTH_4;
1068 break;
1070 if (intel_dp->has_audio)
1071 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1073 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1074 intel_dp->link_configuration[0] = intel_dp->link_bw;
1075 intel_dp->link_configuration[1] = intel_dp->lane_count;
1078 * Check for DPCD version > 1.1 and enhanced framing support
1080 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1081 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1082 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1083 intel_dp->DP |= DP_ENHANCED_FRAMING;
1086 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1087 if (gma_crtc->pipe == 1)
1088 intel_dp->DP |= DP_PIPEB_SELECT;
1090 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1091 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1092 if (is_edp(intel_encoder)) {
1093 uint32_t pfit_control;
1094 cdv_intel_edp_panel_on(intel_encoder);
1096 if (mode->hdisplay != adjusted_mode->hdisplay ||
1097 mode->vdisplay != adjusted_mode->vdisplay)
1098 pfit_control = PFIT_ENABLE;
1099 else
1100 pfit_control = 0;
1102 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1104 REG_WRITE(PFIT_CONTROL, pfit_control);
1109 /* If the sink supports it, try to set the power state appropriately */
1110 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1112 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1113 int ret, i;
1115 /* Should have a valid DPCD by this point */
1116 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1117 return;
1119 if (mode != DRM_MODE_DPMS_ON) {
1120 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1121 DP_SET_POWER_D3);
1122 if (ret != 1)
1123 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1124 } else {
1126 * When turning on, we need to retry for 1ms to give the sink
1127 * time to wake up.
1129 for (i = 0; i < 3; i++) {
1130 ret = cdv_intel_dp_aux_native_write_1(encoder,
1131 DP_SET_POWER,
1132 DP_SET_POWER_D0);
1133 if (ret == 1)
1134 break;
1135 udelay(1000);
1140 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1142 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1143 int edp = is_edp(intel_encoder);
1145 if (edp) {
1146 cdv_intel_edp_backlight_off(intel_encoder);
1147 cdv_intel_edp_panel_off(intel_encoder);
1148 cdv_intel_edp_panel_vdd_on(intel_encoder);
1150 /* Wake up the sink first */
1151 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1152 cdv_intel_dp_link_down(intel_encoder);
1153 if (edp)
1154 cdv_intel_edp_panel_vdd_off(intel_encoder);
1157 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1159 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1160 int edp = is_edp(intel_encoder);
1162 if (edp)
1163 cdv_intel_edp_panel_on(intel_encoder);
1164 cdv_intel_dp_start_link_train(intel_encoder);
1165 cdv_intel_dp_complete_link_train(intel_encoder);
1166 if (edp)
1167 cdv_intel_edp_backlight_on(intel_encoder);
1170 static void
1171 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1173 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1174 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1175 struct drm_device *dev = encoder->dev;
1176 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1177 int edp = is_edp(intel_encoder);
1179 if (mode != DRM_MODE_DPMS_ON) {
1180 if (edp) {
1181 cdv_intel_edp_backlight_off(intel_encoder);
1182 cdv_intel_edp_panel_vdd_on(intel_encoder);
1184 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1185 cdv_intel_dp_link_down(intel_encoder);
1186 if (edp) {
1187 cdv_intel_edp_panel_vdd_off(intel_encoder);
1188 cdv_intel_edp_panel_off(intel_encoder);
1190 } else {
1191 if (edp)
1192 cdv_intel_edp_panel_on(intel_encoder);
1193 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1194 if (!(dp_reg & DP_PORT_EN)) {
1195 cdv_intel_dp_start_link_train(intel_encoder);
1196 cdv_intel_dp_complete_link_train(intel_encoder);
1198 if (edp)
1199 cdv_intel_edp_backlight_on(intel_encoder);
1204 * Native read with retry for link status and receiver capability reads for
1205 * cases where the sink may still be asleep.
1207 static bool
1208 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1209 uint8_t *recv, int recv_bytes)
1211 int ret, i;
1214 * Sinks are *supposed* to come up within 1ms from an off state,
1215 * but we're also supposed to retry 3 times per the spec.
1217 for (i = 0; i < 3; i++) {
1218 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1219 recv_bytes);
1220 if (ret == recv_bytes)
1221 return true;
1222 udelay(1000);
1225 return false;
1229 * Fetch AUX CH registers 0x202 - 0x207 which contain
1230 * link status information
1232 static bool
1233 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1235 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1236 return cdv_intel_dp_aux_native_read_retry(encoder,
1237 DP_LANE0_1_STATUS,
1238 intel_dp->link_status,
1239 DP_LINK_STATUS_SIZE);
1242 static uint8_t
1243 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1244 int r)
1246 return link_status[r - DP_LANE0_1_STATUS];
1249 static uint8_t
1250 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1251 int lane)
1253 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1254 int s = ((lane & 1) ?
1255 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1256 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1257 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1259 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1262 static uint8_t
1263 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1264 int lane)
1266 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1267 int s = ((lane & 1) ?
1268 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1269 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1270 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1272 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1275 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1277 static void
1278 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1280 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1281 uint8_t v = 0;
1282 uint8_t p = 0;
1283 int lane;
1285 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1286 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1287 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1289 if (this_v > v)
1290 v = this_v;
1291 if (this_p > p)
1292 p = this_p;
1295 if (v >= CDV_DP_VOLTAGE_MAX)
1296 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1298 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1299 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1301 for (lane = 0; lane < 4; lane++)
1302 intel_dp->train_set[lane] = v | p;
1306 static uint8_t
1307 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1308 int lane)
1310 int i = DP_LANE0_1_STATUS + (lane >> 1);
1311 int s = (lane & 1) * 4;
1312 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1314 return (l >> s) & 0xf;
1317 /* Check for clock recovery is done on all channels */
1318 static bool
1319 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1321 int lane;
1322 uint8_t lane_status;
1324 for (lane = 0; lane < lane_count; lane++) {
1325 lane_status = cdv_intel_get_lane_status(link_status, lane);
1326 if ((lane_status & DP_LANE_CR_DONE) == 0)
1327 return false;
1329 return true;
1332 /* Check to see if channel eq is done on all channels */
1333 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1334 DP_LANE_CHANNEL_EQ_DONE|\
1335 DP_LANE_SYMBOL_LOCKED)
1336 static bool
1337 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1339 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1340 uint8_t lane_align;
1341 uint8_t lane_status;
1342 int lane;
1344 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1345 DP_LANE_ALIGN_STATUS_UPDATED);
1346 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1347 return false;
1348 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1349 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1350 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1351 return false;
1353 return true;
1356 static bool
1357 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1358 uint32_t dp_reg_value,
1359 uint8_t dp_train_pat)
1362 struct drm_device *dev = encoder->base.dev;
1363 int ret;
1364 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1366 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1367 REG_READ(intel_dp->output_reg);
1369 ret = cdv_intel_dp_aux_native_write_1(encoder,
1370 DP_TRAINING_PATTERN_SET,
1371 dp_train_pat);
1373 if (ret != 1) {
1374 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1375 dp_train_pat);
1376 return false;
1379 return true;
1383 static bool
1384 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1385 uint8_t dp_train_pat)
1388 int ret;
1389 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1391 ret = cdv_intel_dp_aux_native_write(encoder,
1392 DP_TRAINING_LANE0_SET,
1393 intel_dp->train_set,
1394 intel_dp->lane_count);
1396 if (ret != intel_dp->lane_count) {
1397 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1398 intel_dp->train_set[0], intel_dp->lane_count);
1399 return false;
1401 return true;
1404 static void
1405 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1407 struct drm_device *dev = encoder->base.dev;
1408 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1409 struct ddi_regoff *ddi_reg;
1410 int vswing, premph, index;
1412 if (intel_dp->output_reg == DP_B)
1413 ddi_reg = &ddi_DP_train_table[0];
1414 else
1415 ddi_reg = &ddi_DP_train_table[1];
1417 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1418 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1419 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1421 if (vswing + premph > 3)
1422 return;
1423 #ifdef CDV_FAST_LINK_TRAIN
1424 return;
1425 #endif
1426 DRM_DEBUG_KMS("Test2\n");
1427 //return ;
1428 cdv_sb_reset(dev);
1429 /* ;Swing voltage programming
1430 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1431 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1433 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1434 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1436 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1437 * The VSwing_PreEmph table is also considered based on the vswing/premp
1439 index = (vswing + premph) * 2;
1440 if (premph == 1 && vswing == 1) {
1441 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1442 } else
1443 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1445 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1446 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1447 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1448 else
1449 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1451 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1452 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1454 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1455 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1457 /* ;Pre emphasis programming
1458 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1460 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1462 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1463 index = 2 * premph + 1;
1464 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1465 return;
1469 /* Enable corresponding port and start training pattern 1 */
1470 static void
1471 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1473 struct drm_device *dev = encoder->base.dev;
1474 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1475 int i;
1476 uint8_t voltage;
1477 bool clock_recovery = false;
1478 int tries;
1479 u32 reg;
1480 uint32_t DP = intel_dp->DP;
1482 DP |= DP_PORT_EN;
1483 DP &= ~DP_LINK_TRAIN_MASK;
1485 reg = DP;
1486 reg |= DP_LINK_TRAIN_PAT_1;
1487 /* Enable output, wait for it to become active */
1488 REG_WRITE(intel_dp->output_reg, reg);
1489 REG_READ(intel_dp->output_reg);
1490 gma_wait_for_vblank(dev);
1492 DRM_DEBUG_KMS("Link config\n");
1493 /* Write the link configuration data */
1494 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1495 intel_dp->link_configuration,
1498 memset(intel_dp->train_set, 0, 4);
1499 voltage = 0;
1500 tries = 0;
1501 clock_recovery = false;
1503 DRM_DEBUG_KMS("Start train\n");
1504 reg = DP | DP_LINK_TRAIN_PAT_1;
1506 for (;;) {
1507 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1508 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1509 intel_dp->train_set[0],
1510 intel_dp->link_configuration[0],
1511 intel_dp->link_configuration[1]);
1513 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1514 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1516 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1517 /* Set training pattern 1 */
1519 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1521 udelay(200);
1522 if (!cdv_intel_dp_get_link_status(encoder))
1523 break;
1525 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1526 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1527 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1529 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1530 DRM_DEBUG_KMS("PT1 train is done\n");
1531 clock_recovery = true;
1532 break;
1535 /* Check to see if we've tried the max voltage */
1536 for (i = 0; i < intel_dp->lane_count; i++)
1537 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1538 break;
1539 if (i == intel_dp->lane_count)
1540 break;
1542 /* Check to see if we've tried the same voltage 5 times */
1543 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1544 ++tries;
1545 if (tries == 5)
1546 break;
1547 } else
1548 tries = 0;
1549 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1551 /* Compute new intel_dp->train_set as requested by target */
1552 cdv_intel_get_adjust_train(encoder);
1556 if (!clock_recovery) {
1557 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
1560 intel_dp->DP = DP;
1563 static void
1564 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1566 struct drm_device *dev = encoder->base.dev;
1567 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1568 int tries, cr_tries;
1569 u32 reg;
1570 uint32_t DP = intel_dp->DP;
1572 /* channel equalization */
1573 tries = 0;
1574 cr_tries = 0;
1576 DRM_DEBUG_KMS("\n");
1577 reg = DP | DP_LINK_TRAIN_PAT_2;
1579 for (;;) {
1581 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1582 intel_dp->train_set[0],
1583 intel_dp->link_configuration[0],
1584 intel_dp->link_configuration[1]);
1585 /* channel eq pattern */
1587 if (!cdv_intel_dp_set_link_train(encoder, reg,
1588 DP_TRAINING_PATTERN_2)) {
1589 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1591 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1593 if (cr_tries > 5) {
1594 DRM_ERROR("failed to train DP, aborting\n");
1595 cdv_intel_dp_link_down(encoder);
1596 break;
1599 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1601 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1603 udelay(1000);
1604 if (!cdv_intel_dp_get_link_status(encoder))
1605 break;
1607 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1608 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1609 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1611 /* Make sure clock is still ok */
1612 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1613 cdv_intel_dp_start_link_train(encoder);
1614 cr_tries++;
1615 continue;
1618 if (cdv_intel_channel_eq_ok(encoder)) {
1619 DRM_DEBUG_KMS("PT2 train is done\n");
1620 break;
1623 /* Try 5 times, then try clock recovery if that fails */
1624 if (tries > 5) {
1625 cdv_intel_dp_link_down(encoder);
1626 cdv_intel_dp_start_link_train(encoder);
1627 tries = 0;
1628 cr_tries++;
1629 continue;
1632 /* Compute new intel_dp->train_set as requested by target */
1633 cdv_intel_get_adjust_train(encoder);
1634 ++tries;
1638 reg = DP | DP_LINK_TRAIN_OFF;
1640 REG_WRITE(intel_dp->output_reg, reg);
1641 REG_READ(intel_dp->output_reg);
1642 cdv_intel_dp_aux_native_write_1(encoder,
1643 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1646 static void
1647 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1649 struct drm_device *dev = encoder->base.dev;
1650 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1651 uint32_t DP = intel_dp->DP;
1653 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1654 return;
1656 DRM_DEBUG_KMS("\n");
1660 DP &= ~DP_LINK_TRAIN_MASK;
1661 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1663 REG_READ(intel_dp->output_reg);
1665 msleep(17);
1667 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1668 REG_READ(intel_dp->output_reg);
1671 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1673 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1674 enum drm_connector_status status;
1676 status = connector_status_disconnected;
1677 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1678 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1680 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1681 status = connector_status_connected;
1683 if (status == connector_status_connected)
1684 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1685 intel_dp->dpcd[0], intel_dp->dpcd[1],
1686 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1687 return status;
1691 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1693 * \return true if DP port is connected.
1694 * \return false if DP port is disconnected.
1696 static enum drm_connector_status
1697 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1699 struct gma_encoder *encoder = gma_attached_encoder(connector);
1700 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1701 enum drm_connector_status status;
1702 struct edid *edid = NULL;
1703 int edp = is_edp(encoder);
1705 intel_dp->has_audio = false;
1707 if (edp)
1708 cdv_intel_edp_panel_vdd_on(encoder);
1709 status = cdv_dp_detect(encoder);
1710 if (status != connector_status_connected) {
1711 if (edp)
1712 cdv_intel_edp_panel_vdd_off(encoder);
1713 return status;
1716 if (intel_dp->force_audio) {
1717 intel_dp->has_audio = intel_dp->force_audio > 0;
1718 } else {
1719 edid = drm_get_edid(connector, &intel_dp->adapter);
1720 if (edid) {
1721 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1722 kfree(edid);
1725 if (edp)
1726 cdv_intel_edp_panel_vdd_off(encoder);
1728 return connector_status_connected;
1731 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1733 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1734 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1735 struct edid *edid = NULL;
1736 int ret = 0;
1737 int edp = is_edp(intel_encoder);
1740 edid = drm_get_edid(connector, &intel_dp->adapter);
1741 if (edid) {
1742 drm_connector_update_edid_property(connector, edid);
1743 ret = drm_add_edid_modes(connector, edid);
1744 kfree(edid);
1747 if (is_edp(intel_encoder)) {
1748 struct drm_device *dev = connector->dev;
1749 struct drm_psb_private *dev_priv = dev->dev_private;
1751 cdv_intel_edp_panel_vdd_off(intel_encoder);
1752 if (ret) {
1753 if (edp && !intel_dp->panel_fixed_mode) {
1754 struct drm_display_mode *newmode;
1755 list_for_each_entry(newmode, &connector->probed_modes,
1756 head) {
1757 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1758 intel_dp->panel_fixed_mode =
1759 drm_mode_duplicate(dev, newmode);
1760 break;
1765 return ret;
1767 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1768 intel_dp->panel_fixed_mode =
1769 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1770 if (intel_dp->panel_fixed_mode) {
1771 intel_dp->panel_fixed_mode->type |=
1772 DRM_MODE_TYPE_PREFERRED;
1775 if (intel_dp->panel_fixed_mode != NULL) {
1776 struct drm_display_mode *mode;
1777 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1778 drm_mode_probed_add(connector, mode);
1779 return 1;
1783 return ret;
1786 static bool
1787 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1789 struct gma_encoder *encoder = gma_attached_encoder(connector);
1790 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1791 struct edid *edid;
1792 bool has_audio = false;
1793 int edp = is_edp(encoder);
1795 if (edp)
1796 cdv_intel_edp_panel_vdd_on(encoder);
1798 edid = drm_get_edid(connector, &intel_dp->adapter);
1799 if (edid) {
1800 has_audio = drm_detect_monitor_audio(edid);
1801 kfree(edid);
1803 if (edp)
1804 cdv_intel_edp_panel_vdd_off(encoder);
1806 return has_audio;
1809 static int
1810 cdv_intel_dp_set_property(struct drm_connector *connector,
1811 struct drm_property *property,
1812 uint64_t val)
1814 struct drm_psb_private *dev_priv = connector->dev->dev_private;
1815 struct gma_encoder *encoder = gma_attached_encoder(connector);
1816 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1817 int ret;
1819 ret = drm_object_property_set_value(&connector->base, property, val);
1820 if (ret)
1821 return ret;
1823 if (property == dev_priv->force_audio_property) {
1824 int i = val;
1825 bool has_audio;
1827 if (i == intel_dp->force_audio)
1828 return 0;
1830 intel_dp->force_audio = i;
1832 if (i == 0)
1833 has_audio = cdv_intel_dp_detect_audio(connector);
1834 else
1835 has_audio = i > 0;
1837 if (has_audio == intel_dp->has_audio)
1838 return 0;
1840 intel_dp->has_audio = has_audio;
1841 goto done;
1844 if (property == dev_priv->broadcast_rgb_property) {
1845 if (val == !!intel_dp->color_range)
1846 return 0;
1848 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1849 goto done;
1852 return -EINVAL;
1854 done:
1855 if (encoder->base.crtc) {
1856 struct drm_crtc *crtc = encoder->base.crtc;
1857 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1858 crtc->x, crtc->y,
1859 crtc->primary->fb);
1862 return 0;
1865 static void
1866 cdv_intel_dp_destroy(struct drm_connector *connector)
1868 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1869 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1871 if (is_edp(gma_encoder)) {
1872 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1873 kfree(intel_dp->panel_fixed_mode);
1874 intel_dp->panel_fixed_mode = NULL;
1876 i2c_del_adapter(&intel_dp->adapter);
1877 drm_connector_unregister(connector);
1878 drm_connector_cleanup(connector);
1879 kfree(connector);
1882 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1883 .dpms = cdv_intel_dp_dpms,
1884 .mode_fixup = cdv_intel_dp_mode_fixup,
1885 .prepare = cdv_intel_dp_prepare,
1886 .mode_set = cdv_intel_dp_mode_set,
1887 .commit = cdv_intel_dp_commit,
1890 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1891 .dpms = drm_helper_connector_dpms,
1892 .detect = cdv_intel_dp_detect,
1893 .fill_modes = drm_helper_probe_single_connector_modes,
1894 .set_property = cdv_intel_dp_set_property,
1895 .destroy = cdv_intel_dp_destroy,
1898 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1899 .get_modes = cdv_intel_dp_get_modes,
1900 .mode_valid = cdv_intel_dp_mode_valid,
1901 .best_encoder = gma_best_encoder,
1904 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1906 cdv_intel_attach_force_audio_property(connector);
1907 cdv_intel_attach_broadcast_rgb_property(connector);
1910 /* check the VBT to see whether the eDP is on DP-D port */
1911 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1913 struct drm_psb_private *dev_priv = dev->dev_private;
1914 struct child_device_config *p_child;
1915 int i;
1917 if (!dev_priv->child_dev_num)
1918 return false;
1920 for (i = 0; i < dev_priv->child_dev_num; i++) {
1921 p_child = dev_priv->child_dev + i;
1923 if (p_child->dvo_port == PORT_IDPC &&
1924 p_child->device_type == DEVICE_TYPE_eDP)
1925 return true;
1927 return false;
1930 /* Cedarview display clock gating
1932 We need this disable dot get correct behaviour while enabling
1933 DP/eDP. TODO - investigate if we can turn it back to normality
1934 after enabling */
1935 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1937 u32 reg_value;
1938 reg_value = REG_READ(DSPCLK_GATE_D);
1940 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1941 DPUNIT_PIPEA_GATE_DISABLE |
1942 DPCUNIT_CLOCK_GATE_DISABLE |
1943 DPLSUNIT_CLOCK_GATE_DISABLE |
1944 DPOUNIT_CLOCK_GATE_DISABLE |
1945 DPIOUNIT_CLOCK_GATE_DISABLE);
1947 REG_WRITE(DSPCLK_GATE_D, reg_value);
1949 udelay(500);
1952 void
1953 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1955 struct gma_encoder *gma_encoder;
1956 struct gma_connector *gma_connector;
1957 struct drm_connector *connector;
1958 struct drm_encoder *encoder;
1959 struct cdv_intel_dp *intel_dp;
1960 const char *name = NULL;
1961 int type = DRM_MODE_CONNECTOR_DisplayPort;
1963 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
1964 if (!gma_encoder)
1965 return;
1966 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
1967 if (!gma_connector)
1968 goto err_connector;
1969 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
1970 if (!intel_dp)
1971 goto err_priv;
1973 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
1974 type = DRM_MODE_CONNECTOR_eDP;
1976 connector = &gma_connector->base;
1977 encoder = &gma_encoder->base;
1979 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
1980 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS);
1982 gma_connector_attach_encoder(gma_connector, gma_encoder);
1984 if (type == DRM_MODE_CONNECTOR_DisplayPort)
1985 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1986 else
1987 gma_encoder->type = INTEL_OUTPUT_EDP;
1990 gma_encoder->dev_priv=intel_dp;
1991 intel_dp->encoder = gma_encoder;
1992 intel_dp->output_reg = output_reg;
1994 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
1995 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
1997 connector->polled = DRM_CONNECTOR_POLL_HPD;
1998 connector->interlace_allowed = false;
1999 connector->doublescan_allowed = false;
2001 drm_connector_register(connector);
2003 /* Set up the DDC bus. */
2004 switch (output_reg) {
2005 case DP_B:
2006 name = "DPDDC-B";
2007 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
2008 break;
2009 case DP_C:
2010 name = "DPDDC-C";
2011 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2012 break;
2015 cdv_disable_intel_clock_gating(dev);
2017 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2018 /* FIXME:fail check */
2019 cdv_intel_dp_add_properties(connector);
2021 if (is_edp(gma_encoder)) {
2022 int ret;
2023 struct edp_power_seq cur;
2024 u32 pp_on, pp_off, pp_div;
2025 u32 pwm_ctrl;
2027 pp_on = REG_READ(PP_CONTROL);
2028 pp_on &= ~PANEL_UNLOCK_MASK;
2029 pp_on |= PANEL_UNLOCK_REGS;
2031 REG_WRITE(PP_CONTROL, pp_on);
2033 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2034 pwm_ctrl |= PWM_PIPE_B;
2035 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2037 pp_on = REG_READ(PP_ON_DELAYS);
2038 pp_off = REG_READ(PP_OFF_DELAYS);
2039 pp_div = REG_READ(PP_DIVISOR);
2041 /* Pull timing values out of registers */
2042 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2043 PANEL_POWER_UP_DELAY_SHIFT;
2045 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2046 PANEL_LIGHT_ON_DELAY_SHIFT;
2048 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2049 PANEL_LIGHT_OFF_DELAY_SHIFT;
2051 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2052 PANEL_POWER_DOWN_DELAY_SHIFT;
2054 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2055 PANEL_POWER_CYCLE_DELAY_SHIFT);
2057 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2058 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2061 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2062 intel_dp->backlight_on_delay = cur.t8 / 10;
2063 intel_dp->backlight_off_delay = cur.t9 / 10;
2064 intel_dp->panel_power_down_delay = cur.t10 / 10;
2065 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2067 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2068 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2069 intel_dp->panel_power_cycle_delay);
2071 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2072 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2075 cdv_intel_edp_panel_vdd_on(gma_encoder);
2076 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2077 intel_dp->dpcd,
2078 sizeof(intel_dp->dpcd));
2079 cdv_intel_edp_panel_vdd_off(gma_encoder);
2080 if (ret <= 0) {
2081 /* if this fails, presume the device is a ghost */
2082 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2083 drm_encoder_cleanup(encoder);
2084 cdv_intel_dp_destroy(connector);
2085 goto err_connector;
2086 } else {
2087 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2088 intel_dp->dpcd[0], intel_dp->dpcd[1],
2089 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2092 /* The CDV reference driver moves pnale backlight setup into the displays that
2093 have a backlight: this is a good idea and one we should probably adopt, however
2094 we need to migrate all the drivers before we can do that */
2095 /*cdv_intel_panel_setup_backlight(dev); */
2097 return;
2099 err_priv:
2100 kfree(gma_connector);
2101 err_connector:
2102 kfree(gma_encoder);