drm/msm/hdmi: Enable HPD after HDMI IRQ is set up
[linux/fpc-iii.git] / drivers / gpu / drm / mediatek / mtk_dsi.c
blob66df1b1779592195e38fe0437874c39241a2f297
1 /*
2 * Copyright (c) 2015 MediaTek Inc.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
14 #include <drm/drmP.h>
15 #include <drm/drm_atomic_helper.h>
16 #include <drm/drm_crtc_helper.h>
17 #include <drm/drm_mipi_dsi.h>
18 #include <drm/drm_panel.h>
19 #include <drm/drm_of.h>
20 #include <linux/clk.h>
21 #include <linux/component.h>
22 #include <linux/iopoll.h>
23 #include <linux/irq.h>
24 #include <linux/of.h>
25 #include <linux/of_platform.h>
26 #include <linux/phy/phy.h>
27 #include <linux/platform_device.h>
28 #include <video/mipi_display.h>
29 #include <video/videomode.h>
31 #include "mtk_drm_ddp_comp.h"
33 #define DSI_START 0x00
35 #define DSI_INTEN 0x08
37 #define DSI_INTSTA 0x0c
38 #define LPRX_RD_RDY_INT_FLAG BIT(0)
39 #define CMD_DONE_INT_FLAG BIT(1)
40 #define TE_RDY_INT_FLAG BIT(2)
41 #define VM_DONE_INT_FLAG BIT(3)
42 #define EXT_TE_RDY_INT_FLAG BIT(4)
43 #define DSI_BUSY BIT(31)
45 #define DSI_CON_CTRL 0x10
46 #define DSI_RESET BIT(0)
47 #define DSI_EN BIT(1)
49 #define DSI_MODE_CTRL 0x14
50 #define MODE (3)
51 #define CMD_MODE 0
52 #define SYNC_PULSE_MODE 1
53 #define SYNC_EVENT_MODE 2
54 #define BURST_MODE 3
55 #define FRM_MODE BIT(16)
56 #define MIX_MODE BIT(17)
58 #define DSI_TXRX_CTRL 0x18
59 #define VC_NUM BIT(1)
60 #define LANE_NUM (0xf << 2)
61 #define DIS_EOT BIT(6)
62 #define NULL_EN BIT(7)
63 #define TE_FREERUN BIT(8)
64 #define EXT_TE_EN BIT(9)
65 #define EXT_TE_EDGE BIT(10)
66 #define MAX_RTN_SIZE (0xf << 12)
67 #define HSTX_CKLP_EN BIT(16)
69 #define DSI_PSCTRL 0x1c
70 #define DSI_PS_WC 0x3fff
71 #define DSI_PS_SEL (3 << 16)
72 #define PACKED_PS_16BIT_RGB565 (0 << 16)
73 #define LOOSELY_PS_18BIT_RGB666 (1 << 16)
74 #define PACKED_PS_18BIT_RGB666 (2 << 16)
75 #define PACKED_PS_24BIT_RGB888 (3 << 16)
77 #define DSI_VSA_NL 0x20
78 #define DSI_VBP_NL 0x24
79 #define DSI_VFP_NL 0x28
80 #define DSI_VACT_NL 0x2C
81 #define DSI_HSA_WC 0x50
82 #define DSI_HBP_WC 0x54
83 #define DSI_HFP_WC 0x58
85 #define DSI_CMDQ_SIZE 0x60
86 #define CMDQ_SIZE 0x3f
88 #define DSI_HSTX_CKL_WC 0x64
90 #define DSI_RX_DATA0 0x74
91 #define DSI_RX_DATA1 0x78
92 #define DSI_RX_DATA2 0x7c
93 #define DSI_RX_DATA3 0x80
95 #define DSI_RACK 0x84
96 #define RACK BIT(0)
98 #define DSI_PHY_LCCON 0x104
99 #define LC_HS_TX_EN BIT(0)
100 #define LC_ULPM_EN BIT(1)
101 #define LC_WAKEUP_EN BIT(2)
103 #define DSI_PHY_LD0CON 0x108
104 #define LD0_HS_TX_EN BIT(0)
105 #define LD0_ULPM_EN BIT(1)
106 #define LD0_WAKEUP_EN BIT(2)
108 #define DSI_PHY_TIMECON0 0x110
109 #define LPX (0xff << 0)
110 #define HS_PREP (0xff << 8)
111 #define HS_ZERO (0xff << 16)
112 #define HS_TRAIL (0xff << 24)
114 #define DSI_PHY_TIMECON1 0x114
115 #define TA_GO (0xff << 0)
116 #define TA_SURE (0xff << 8)
117 #define TA_GET (0xff << 16)
118 #define DA_HS_EXIT (0xff << 24)
120 #define DSI_PHY_TIMECON2 0x118
121 #define CONT_DET (0xff << 0)
122 #define CLK_ZERO (0xff << 16)
123 #define CLK_TRAIL (0xff << 24)
125 #define DSI_PHY_TIMECON3 0x11c
126 #define CLK_HS_PREP (0xff << 0)
127 #define CLK_HS_POST (0xff << 8)
128 #define CLK_HS_EXIT (0xff << 16)
130 #define DSI_VM_CMD_CON 0x130
131 #define VM_CMD_EN BIT(0)
132 #define TS_VFP_EN BIT(5)
134 #define DSI_CMDQ0 0x180
135 #define CONFIG (0xff << 0)
136 #define SHORT_PACKET 0
137 #define LONG_PACKET 2
138 #define BTA BIT(2)
139 #define DATA_ID (0xff << 8)
140 #define DATA_0 (0xff << 16)
141 #define DATA_1 (0xff << 24)
143 #define T_LPX 5
144 #define T_HS_PREP 6
145 #define T_HS_TRAIL 8
146 #define T_HS_EXIT 7
147 #define T_HS_ZERO 10
149 #define NS_TO_CYCLE(n, c) ((n) / (c) + (((n) % (c)) ? 1 : 0))
151 #define MTK_DSI_HOST_IS_READ(type) \
152 ((type == MIPI_DSI_GENERIC_READ_REQUEST_0_PARAM) || \
153 (type == MIPI_DSI_GENERIC_READ_REQUEST_1_PARAM) || \
154 (type == MIPI_DSI_GENERIC_READ_REQUEST_2_PARAM) || \
155 (type == MIPI_DSI_DCS_READ))
157 struct phy;
159 struct mtk_dsi {
160 struct mtk_ddp_comp ddp_comp;
161 struct device *dev;
162 struct mipi_dsi_host host;
163 struct drm_encoder encoder;
164 struct drm_connector conn;
165 struct drm_panel *panel;
166 struct drm_bridge *bridge;
167 struct phy *phy;
169 void __iomem *regs;
171 struct clk *engine_clk;
172 struct clk *digital_clk;
173 struct clk *hs_clk;
175 u32 data_rate;
177 unsigned long mode_flags;
178 enum mipi_dsi_pixel_format format;
179 unsigned int lanes;
180 struct videomode vm;
181 int refcount;
182 bool enabled;
183 u32 irq_data;
184 wait_queue_head_t irq_wait_queue;
187 static inline struct mtk_dsi *encoder_to_dsi(struct drm_encoder *e)
189 return container_of(e, struct mtk_dsi, encoder);
192 static inline struct mtk_dsi *connector_to_dsi(struct drm_connector *c)
194 return container_of(c, struct mtk_dsi, conn);
197 static inline struct mtk_dsi *host_to_dsi(struct mipi_dsi_host *h)
199 return container_of(h, struct mtk_dsi, host);
202 static void mtk_dsi_mask(struct mtk_dsi *dsi, u32 offset, u32 mask, u32 data)
204 u32 temp = readl(dsi->regs + offset);
206 writel((temp & ~mask) | (data & mask), dsi->regs + offset);
209 static void mtk_dsi_phy_timconfig(struct mtk_dsi *dsi)
211 u32 timcon0, timcon1, timcon2, timcon3;
212 u32 ui, cycle_time;
214 ui = 1000 / dsi->data_rate + 0x01;
215 cycle_time = 8000 / dsi->data_rate + 0x01;
217 timcon0 = T_LPX | T_HS_PREP << 8 | T_HS_ZERO << 16 | T_HS_TRAIL << 24;
218 timcon1 = 4 * T_LPX | (3 * T_LPX / 2) << 8 | 5 * T_LPX << 16 |
219 T_HS_EXIT << 24;
220 timcon2 = ((NS_TO_CYCLE(0x64, cycle_time) + 0xa) << 24) |
221 (NS_TO_CYCLE(0x150, cycle_time) << 16);
222 timcon3 = NS_TO_CYCLE(0x40, cycle_time) | (2 * T_LPX) << 16 |
223 NS_TO_CYCLE(80 + 52 * ui, cycle_time) << 8;
225 writel(timcon0, dsi->regs + DSI_PHY_TIMECON0);
226 writel(timcon1, dsi->regs + DSI_PHY_TIMECON1);
227 writel(timcon2, dsi->regs + DSI_PHY_TIMECON2);
228 writel(timcon3, dsi->regs + DSI_PHY_TIMECON3);
231 static void mtk_dsi_enable(struct mtk_dsi *dsi)
233 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, DSI_EN);
236 static void mtk_dsi_disable(struct mtk_dsi *dsi)
238 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, 0);
241 static void mtk_dsi_reset_engine(struct mtk_dsi *dsi)
243 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, DSI_RESET);
244 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, 0);
247 static void mtk_dsi_clk_ulp_mode_enter(struct mtk_dsi *dsi)
249 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0);
250 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0);
253 static void mtk_dsi_clk_ulp_mode_leave(struct mtk_dsi *dsi)
255 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0);
256 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, LC_WAKEUP_EN);
257 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, 0);
260 static void mtk_dsi_lane0_ulp_mode_enter(struct mtk_dsi *dsi)
262 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_HS_TX_EN, 0);
263 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0);
266 static void mtk_dsi_lane0_ulp_mode_leave(struct mtk_dsi *dsi)
268 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0);
269 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, LD0_WAKEUP_EN);
270 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, 0);
273 static bool mtk_dsi_clk_hs_state(struct mtk_dsi *dsi)
275 u32 tmp_reg1;
277 tmp_reg1 = readl(dsi->regs + DSI_PHY_LCCON);
278 return ((tmp_reg1 & LC_HS_TX_EN) == 1) ? true : false;
281 static void mtk_dsi_clk_hs_mode(struct mtk_dsi *dsi, bool enter)
283 if (enter && !mtk_dsi_clk_hs_state(dsi))
284 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, LC_HS_TX_EN);
285 else if (!enter && mtk_dsi_clk_hs_state(dsi))
286 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0);
289 static void mtk_dsi_set_mode(struct mtk_dsi *dsi)
291 u32 vid_mode = CMD_MODE;
293 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
294 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
295 vid_mode = BURST_MODE;
296 else if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
297 vid_mode = SYNC_PULSE_MODE;
298 else
299 vid_mode = SYNC_EVENT_MODE;
302 writel(vid_mode, dsi->regs + DSI_MODE_CTRL);
305 static void mtk_dsi_set_vm_cmd(struct mtk_dsi *dsi)
307 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, VM_CMD_EN, VM_CMD_EN);
308 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, TS_VFP_EN, TS_VFP_EN);
311 static void mtk_dsi_ps_control_vact(struct mtk_dsi *dsi)
313 struct videomode *vm = &dsi->vm;
314 u32 dsi_buf_bpp, ps_wc;
315 u32 ps_bpp_mode;
317 if (dsi->format == MIPI_DSI_FMT_RGB565)
318 dsi_buf_bpp = 2;
319 else
320 dsi_buf_bpp = 3;
322 ps_wc = vm->hactive * dsi_buf_bpp;
323 ps_bpp_mode = ps_wc;
325 switch (dsi->format) {
326 case MIPI_DSI_FMT_RGB888:
327 ps_bpp_mode |= PACKED_PS_24BIT_RGB888;
328 break;
329 case MIPI_DSI_FMT_RGB666:
330 ps_bpp_mode |= PACKED_PS_18BIT_RGB666;
331 break;
332 case MIPI_DSI_FMT_RGB666_PACKED:
333 ps_bpp_mode |= LOOSELY_PS_18BIT_RGB666;
334 break;
335 case MIPI_DSI_FMT_RGB565:
336 ps_bpp_mode |= PACKED_PS_16BIT_RGB565;
337 break;
340 writel(vm->vactive, dsi->regs + DSI_VACT_NL);
341 writel(ps_bpp_mode, dsi->regs + DSI_PSCTRL);
342 writel(ps_wc, dsi->regs + DSI_HSTX_CKL_WC);
345 static void mtk_dsi_rxtx_control(struct mtk_dsi *dsi)
347 u32 tmp_reg;
349 switch (dsi->lanes) {
350 case 1:
351 tmp_reg = 1 << 2;
352 break;
353 case 2:
354 tmp_reg = 3 << 2;
355 break;
356 case 3:
357 tmp_reg = 7 << 2;
358 break;
359 case 4:
360 tmp_reg = 0xf << 2;
361 break;
362 default:
363 tmp_reg = 0xf << 2;
364 break;
367 tmp_reg |= (dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) << 6;
368 tmp_reg |= (dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET) >> 3;
370 writel(tmp_reg, dsi->regs + DSI_TXRX_CTRL);
373 static void mtk_dsi_ps_control(struct mtk_dsi *dsi)
375 u32 dsi_tmp_buf_bpp;
376 u32 tmp_reg;
378 switch (dsi->format) {
379 case MIPI_DSI_FMT_RGB888:
380 tmp_reg = PACKED_PS_24BIT_RGB888;
381 dsi_tmp_buf_bpp = 3;
382 break;
383 case MIPI_DSI_FMT_RGB666:
384 tmp_reg = LOOSELY_PS_18BIT_RGB666;
385 dsi_tmp_buf_bpp = 3;
386 break;
387 case MIPI_DSI_FMT_RGB666_PACKED:
388 tmp_reg = PACKED_PS_18BIT_RGB666;
389 dsi_tmp_buf_bpp = 3;
390 break;
391 case MIPI_DSI_FMT_RGB565:
392 tmp_reg = PACKED_PS_16BIT_RGB565;
393 dsi_tmp_buf_bpp = 2;
394 break;
395 default:
396 tmp_reg = PACKED_PS_24BIT_RGB888;
397 dsi_tmp_buf_bpp = 3;
398 break;
401 tmp_reg += dsi->vm.hactive * dsi_tmp_buf_bpp & DSI_PS_WC;
402 writel(tmp_reg, dsi->regs + DSI_PSCTRL);
405 static void mtk_dsi_config_vdo_timing(struct mtk_dsi *dsi)
407 u32 horizontal_sync_active_byte;
408 u32 horizontal_backporch_byte;
409 u32 horizontal_frontporch_byte;
410 u32 dsi_tmp_buf_bpp;
412 struct videomode *vm = &dsi->vm;
414 if (dsi->format == MIPI_DSI_FMT_RGB565)
415 dsi_tmp_buf_bpp = 2;
416 else
417 dsi_tmp_buf_bpp = 3;
419 writel(vm->vsync_len, dsi->regs + DSI_VSA_NL);
420 writel(vm->vback_porch, dsi->regs + DSI_VBP_NL);
421 writel(vm->vfront_porch, dsi->regs + DSI_VFP_NL);
422 writel(vm->vactive, dsi->regs + DSI_VACT_NL);
424 horizontal_sync_active_byte = (vm->hsync_len * dsi_tmp_buf_bpp - 10);
426 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
427 horizontal_backporch_byte =
428 (vm->hback_porch * dsi_tmp_buf_bpp - 10);
429 else
430 horizontal_backporch_byte = ((vm->hback_porch + vm->hsync_len) *
431 dsi_tmp_buf_bpp - 10);
433 horizontal_frontporch_byte = (vm->hfront_porch * dsi_tmp_buf_bpp - 12);
435 writel(horizontal_sync_active_byte, dsi->regs + DSI_HSA_WC);
436 writel(horizontal_backporch_byte, dsi->regs + DSI_HBP_WC);
437 writel(horizontal_frontporch_byte, dsi->regs + DSI_HFP_WC);
439 mtk_dsi_ps_control(dsi);
442 static void mtk_dsi_start(struct mtk_dsi *dsi)
444 writel(0, dsi->regs + DSI_START);
445 writel(1, dsi->regs + DSI_START);
448 static void mtk_dsi_stop(struct mtk_dsi *dsi)
450 writel(0, dsi->regs + DSI_START);
453 static void mtk_dsi_set_cmd_mode(struct mtk_dsi *dsi)
455 writel(CMD_MODE, dsi->regs + DSI_MODE_CTRL);
458 static void mtk_dsi_set_interrupt_enable(struct mtk_dsi *dsi)
460 u32 inten = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG;
462 writel(inten, dsi->regs + DSI_INTEN);
465 static void mtk_dsi_irq_data_set(struct mtk_dsi *dsi, u32 irq_bit)
467 dsi->irq_data |= irq_bit;
470 static void mtk_dsi_irq_data_clear(struct mtk_dsi *dsi, u32 irq_bit)
472 dsi->irq_data &= ~irq_bit;
475 static s32 mtk_dsi_wait_for_irq_done(struct mtk_dsi *dsi, u32 irq_flag,
476 unsigned int timeout)
478 s32 ret = 0;
479 unsigned long jiffies = msecs_to_jiffies(timeout);
481 ret = wait_event_interruptible_timeout(dsi->irq_wait_queue,
482 dsi->irq_data & irq_flag,
483 jiffies);
484 if (ret == 0) {
485 DRM_WARN("Wait DSI IRQ(0x%08x) Timeout\n", irq_flag);
487 mtk_dsi_enable(dsi);
488 mtk_dsi_reset_engine(dsi);
491 return ret;
494 static irqreturn_t mtk_dsi_irq(int irq, void *dev_id)
496 struct mtk_dsi *dsi = dev_id;
497 u32 status, tmp;
498 u32 flag = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG;
500 status = readl(dsi->regs + DSI_INTSTA) & flag;
502 if (status) {
503 do {
504 mtk_dsi_mask(dsi, DSI_RACK, RACK, RACK);
505 tmp = readl(dsi->regs + DSI_INTSTA);
506 } while (tmp & DSI_BUSY);
508 mtk_dsi_mask(dsi, DSI_INTSTA, status, 0);
509 mtk_dsi_irq_data_set(dsi, status);
510 wake_up_interruptible(&dsi->irq_wait_queue);
513 return IRQ_HANDLED;
516 static s32 mtk_dsi_switch_to_cmd_mode(struct mtk_dsi *dsi, u8 irq_flag, u32 t)
518 mtk_dsi_irq_data_clear(dsi, irq_flag);
519 mtk_dsi_set_cmd_mode(dsi);
521 if (!mtk_dsi_wait_for_irq_done(dsi, irq_flag, t)) {
522 DRM_ERROR("failed to switch cmd mode\n");
523 return -ETIME;
524 } else {
525 return 0;
529 static int mtk_dsi_poweron(struct mtk_dsi *dsi)
531 struct device *dev = dsi->dev;
532 int ret;
533 u64 pixel_clock, total_bits;
534 u32 htotal, htotal_bits, bit_per_pixel, overhead_cycles, overhead_bits;
536 if (++dsi->refcount != 1)
537 return 0;
539 switch (dsi->format) {
540 case MIPI_DSI_FMT_RGB565:
541 bit_per_pixel = 16;
542 break;
543 case MIPI_DSI_FMT_RGB666_PACKED:
544 bit_per_pixel = 18;
545 break;
546 case MIPI_DSI_FMT_RGB666:
547 case MIPI_DSI_FMT_RGB888:
548 default:
549 bit_per_pixel = 24;
550 break;
554 * htotal_time = htotal * byte_per_pixel / num_lanes
555 * overhead_time = lpx + hs_prepare + hs_zero + hs_trail + hs_exit
556 * mipi_ratio = (htotal_time + overhead_time) / htotal_time
557 * data_rate = pixel_clock * bit_per_pixel * mipi_ratio / num_lanes;
559 pixel_clock = dsi->vm.pixelclock;
560 htotal = dsi->vm.hactive + dsi->vm.hback_porch + dsi->vm.hfront_porch +
561 dsi->vm.hsync_len;
562 htotal_bits = htotal * bit_per_pixel;
564 overhead_cycles = T_LPX + T_HS_PREP + T_HS_ZERO + T_HS_TRAIL +
565 T_HS_EXIT;
566 overhead_bits = overhead_cycles * dsi->lanes * 8;
567 total_bits = htotal_bits + overhead_bits;
569 dsi->data_rate = DIV_ROUND_UP_ULL(pixel_clock * total_bits,
570 htotal * dsi->lanes);
572 ret = clk_set_rate(dsi->hs_clk, dsi->data_rate);
573 if (ret < 0) {
574 dev_err(dev, "Failed to set data rate: %d\n", ret);
575 goto err_refcount;
578 phy_power_on(dsi->phy);
580 ret = clk_prepare_enable(dsi->engine_clk);
581 if (ret < 0) {
582 dev_err(dev, "Failed to enable engine clock: %d\n", ret);
583 goto err_phy_power_off;
586 ret = clk_prepare_enable(dsi->digital_clk);
587 if (ret < 0) {
588 dev_err(dev, "Failed to enable digital clock: %d\n", ret);
589 goto err_disable_engine_clk;
592 mtk_dsi_enable(dsi);
593 mtk_dsi_reset_engine(dsi);
594 mtk_dsi_phy_timconfig(dsi);
596 mtk_dsi_rxtx_control(dsi);
597 mtk_dsi_ps_control_vact(dsi);
598 mtk_dsi_set_vm_cmd(dsi);
599 mtk_dsi_config_vdo_timing(dsi);
600 mtk_dsi_set_interrupt_enable(dsi);
602 mtk_dsi_clk_ulp_mode_leave(dsi);
603 mtk_dsi_lane0_ulp_mode_leave(dsi);
604 mtk_dsi_clk_hs_mode(dsi, 0);
606 if (dsi->panel) {
607 if (drm_panel_prepare(dsi->panel)) {
608 DRM_ERROR("failed to prepare the panel\n");
609 goto err_disable_digital_clk;
613 return 0;
614 err_disable_digital_clk:
615 clk_disable_unprepare(dsi->digital_clk);
616 err_disable_engine_clk:
617 clk_disable_unprepare(dsi->engine_clk);
618 err_phy_power_off:
619 phy_power_off(dsi->phy);
620 err_refcount:
621 dsi->refcount--;
622 return ret;
625 static void mtk_dsi_poweroff(struct mtk_dsi *dsi)
627 if (WARN_ON(dsi->refcount == 0))
628 return;
630 if (--dsi->refcount != 0)
631 return;
633 if (!mtk_dsi_switch_to_cmd_mode(dsi, VM_DONE_INT_FLAG, 500)) {
634 if (dsi->panel) {
635 if (drm_panel_unprepare(dsi->panel)) {
636 DRM_ERROR("failed to unprepare the panel\n");
637 return;
642 mtk_dsi_reset_engine(dsi);
643 mtk_dsi_lane0_ulp_mode_enter(dsi);
644 mtk_dsi_clk_ulp_mode_enter(dsi);
646 mtk_dsi_disable(dsi);
648 clk_disable_unprepare(dsi->engine_clk);
649 clk_disable_unprepare(dsi->digital_clk);
651 phy_power_off(dsi->phy);
654 static void mtk_output_dsi_enable(struct mtk_dsi *dsi)
656 int ret;
658 if (dsi->enabled)
659 return;
661 ret = mtk_dsi_poweron(dsi);
662 if (ret < 0) {
663 DRM_ERROR("failed to power on dsi\n");
664 return;
667 mtk_dsi_set_mode(dsi);
668 mtk_dsi_clk_hs_mode(dsi, 1);
670 mtk_dsi_start(dsi);
672 if (dsi->panel) {
673 if (drm_panel_enable(dsi->panel)) {
674 DRM_ERROR("failed to enable the panel\n");
675 goto err_dsi_power_off;
679 dsi->enabled = true;
681 return;
682 err_dsi_power_off:
683 mtk_dsi_stop(dsi);
684 mtk_dsi_poweroff(dsi);
687 static void mtk_output_dsi_disable(struct mtk_dsi *dsi)
689 if (!dsi->enabled)
690 return;
692 if (dsi->panel) {
693 if (drm_panel_disable(dsi->panel)) {
694 DRM_ERROR("failed to disable the panel\n");
695 return;
699 mtk_dsi_stop(dsi);
700 mtk_dsi_poweroff(dsi);
702 dsi->enabled = false;
705 static void mtk_dsi_encoder_destroy(struct drm_encoder *encoder)
707 drm_encoder_cleanup(encoder);
710 static const struct drm_encoder_funcs mtk_dsi_encoder_funcs = {
711 .destroy = mtk_dsi_encoder_destroy,
714 static bool mtk_dsi_encoder_mode_fixup(struct drm_encoder *encoder,
715 const struct drm_display_mode *mode,
716 struct drm_display_mode *adjusted_mode)
718 return true;
721 static void mtk_dsi_encoder_mode_set(struct drm_encoder *encoder,
722 struct drm_display_mode *mode,
723 struct drm_display_mode *adjusted)
725 struct mtk_dsi *dsi = encoder_to_dsi(encoder);
727 drm_display_mode_to_videomode(adjusted, &dsi->vm);
730 static void mtk_dsi_encoder_disable(struct drm_encoder *encoder)
732 struct mtk_dsi *dsi = encoder_to_dsi(encoder);
734 mtk_output_dsi_disable(dsi);
737 static void mtk_dsi_encoder_enable(struct drm_encoder *encoder)
739 struct mtk_dsi *dsi = encoder_to_dsi(encoder);
741 mtk_output_dsi_enable(dsi);
744 static int mtk_dsi_connector_get_modes(struct drm_connector *connector)
746 struct mtk_dsi *dsi = connector_to_dsi(connector);
748 return drm_panel_get_modes(dsi->panel);
751 static const struct drm_encoder_helper_funcs mtk_dsi_encoder_helper_funcs = {
752 .mode_fixup = mtk_dsi_encoder_mode_fixup,
753 .mode_set = mtk_dsi_encoder_mode_set,
754 .disable = mtk_dsi_encoder_disable,
755 .enable = mtk_dsi_encoder_enable,
758 static const struct drm_connector_funcs mtk_dsi_connector_funcs = {
759 .fill_modes = drm_helper_probe_single_connector_modes,
760 .destroy = drm_connector_cleanup,
761 .reset = drm_atomic_helper_connector_reset,
762 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
763 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
766 static const struct drm_connector_helper_funcs
767 mtk_dsi_connector_helper_funcs = {
768 .get_modes = mtk_dsi_connector_get_modes,
771 static int mtk_dsi_create_connector(struct drm_device *drm, struct mtk_dsi *dsi)
773 int ret;
775 ret = drm_connector_init(drm, &dsi->conn, &mtk_dsi_connector_funcs,
776 DRM_MODE_CONNECTOR_DSI);
777 if (ret) {
778 DRM_ERROR("Failed to connector init to drm\n");
779 return ret;
782 drm_connector_helper_add(&dsi->conn, &mtk_dsi_connector_helper_funcs);
784 dsi->conn.dpms = DRM_MODE_DPMS_OFF;
785 drm_connector_attach_encoder(&dsi->conn, &dsi->encoder);
787 if (dsi->panel) {
788 ret = drm_panel_attach(dsi->panel, &dsi->conn);
789 if (ret) {
790 DRM_ERROR("Failed to attach panel to drm\n");
791 goto err_connector_cleanup;
795 return 0;
797 err_connector_cleanup:
798 drm_connector_cleanup(&dsi->conn);
799 return ret;
802 static int mtk_dsi_create_conn_enc(struct drm_device *drm, struct mtk_dsi *dsi)
804 int ret;
806 ret = drm_encoder_init(drm, &dsi->encoder, &mtk_dsi_encoder_funcs,
807 DRM_MODE_ENCODER_DSI, NULL);
808 if (ret) {
809 DRM_ERROR("Failed to encoder init to drm\n");
810 return ret;
812 drm_encoder_helper_add(&dsi->encoder, &mtk_dsi_encoder_helper_funcs);
815 * Currently display data paths are statically assigned to a crtc each.
816 * crtc 0 is OVL0 -> COLOR0 -> AAL -> OD -> RDMA0 -> UFOE -> DSI0
818 dsi->encoder.possible_crtcs = 1;
820 /* If there's a bridge, attach to it and let it create the connector */
821 ret = drm_bridge_attach(&dsi->encoder, dsi->bridge, NULL);
822 if (ret) {
823 DRM_ERROR("Failed to attach bridge to drm\n");
825 /* Otherwise create our own connector and attach to a panel */
826 ret = mtk_dsi_create_connector(drm, dsi);
827 if (ret)
828 goto err_encoder_cleanup;
831 return 0;
833 err_encoder_cleanup:
834 drm_encoder_cleanup(&dsi->encoder);
835 return ret;
838 static void mtk_dsi_destroy_conn_enc(struct mtk_dsi *dsi)
840 drm_encoder_cleanup(&dsi->encoder);
841 /* Skip connector cleanup if creation was delegated to the bridge */
842 if (dsi->conn.dev)
843 drm_connector_cleanup(&dsi->conn);
846 static void mtk_dsi_ddp_start(struct mtk_ddp_comp *comp)
848 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp);
850 mtk_dsi_poweron(dsi);
853 static void mtk_dsi_ddp_stop(struct mtk_ddp_comp *comp)
855 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp);
857 mtk_dsi_poweroff(dsi);
860 static const struct mtk_ddp_comp_funcs mtk_dsi_funcs = {
861 .start = mtk_dsi_ddp_start,
862 .stop = mtk_dsi_ddp_stop,
865 static int mtk_dsi_host_attach(struct mipi_dsi_host *host,
866 struct mipi_dsi_device *device)
868 struct mtk_dsi *dsi = host_to_dsi(host);
870 dsi->lanes = device->lanes;
871 dsi->format = device->format;
872 dsi->mode_flags = device->mode_flags;
874 if (dsi->conn.dev)
875 drm_helper_hpd_irq_event(dsi->conn.dev);
877 return 0;
880 static int mtk_dsi_host_detach(struct mipi_dsi_host *host,
881 struct mipi_dsi_device *device)
883 struct mtk_dsi *dsi = host_to_dsi(host);
885 if (dsi->conn.dev)
886 drm_helper_hpd_irq_event(dsi->conn.dev);
888 return 0;
891 static void mtk_dsi_wait_for_idle(struct mtk_dsi *dsi)
893 int ret;
894 u32 val;
896 ret = readl_poll_timeout(dsi->regs + DSI_INTSTA, val, !(val & DSI_BUSY),
897 4, 2000000);
898 if (ret) {
899 DRM_WARN("polling dsi wait not busy timeout!\n");
901 mtk_dsi_enable(dsi);
902 mtk_dsi_reset_engine(dsi);
906 static u32 mtk_dsi_recv_cnt(u8 type, u8 *read_data)
908 switch (type) {
909 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE:
910 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE:
911 return 1;
912 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE:
913 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE:
914 return 2;
915 case MIPI_DSI_RX_GENERIC_LONG_READ_RESPONSE:
916 case MIPI_DSI_RX_DCS_LONG_READ_RESPONSE:
917 return read_data[1] + read_data[2] * 16;
918 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT:
919 DRM_INFO("type is 0x02, try again\n");
920 break;
921 default:
922 DRM_INFO("type(0x%x) not recognized\n", type);
923 break;
926 return 0;
929 static void mtk_dsi_cmdq(struct mtk_dsi *dsi, const struct mipi_dsi_msg *msg)
931 const char *tx_buf = msg->tx_buf;
932 u8 config, cmdq_size, cmdq_off, type = msg->type;
933 u32 reg_val, cmdq_mask, i;
935 if (MTK_DSI_HOST_IS_READ(type))
936 config = BTA;
937 else
938 config = (msg->tx_len > 2) ? LONG_PACKET : SHORT_PACKET;
940 if (msg->tx_len > 2) {
941 cmdq_size = 1 + (msg->tx_len + 3) / 4;
942 cmdq_off = 4;
943 cmdq_mask = CONFIG | DATA_ID | DATA_0 | DATA_1;
944 reg_val = (msg->tx_len << 16) | (type << 8) | config;
945 } else {
946 cmdq_size = 1;
947 cmdq_off = 2;
948 cmdq_mask = CONFIG | DATA_ID;
949 reg_val = (type << 8) | config;
952 for (i = 0; i < msg->tx_len; i++)
953 writeb(tx_buf[i], dsi->regs + DSI_CMDQ0 + cmdq_off + i);
955 mtk_dsi_mask(dsi, DSI_CMDQ0, cmdq_mask, reg_val);
956 mtk_dsi_mask(dsi, DSI_CMDQ_SIZE, CMDQ_SIZE, cmdq_size);
959 static ssize_t mtk_dsi_host_send_cmd(struct mtk_dsi *dsi,
960 const struct mipi_dsi_msg *msg, u8 flag)
962 mtk_dsi_wait_for_idle(dsi);
963 mtk_dsi_irq_data_clear(dsi, flag);
964 mtk_dsi_cmdq(dsi, msg);
965 mtk_dsi_start(dsi);
967 if (!mtk_dsi_wait_for_irq_done(dsi, flag, 2000))
968 return -ETIME;
969 else
970 return 0;
973 static ssize_t mtk_dsi_host_transfer(struct mipi_dsi_host *host,
974 const struct mipi_dsi_msg *msg)
976 struct mtk_dsi *dsi = host_to_dsi(host);
977 u32 recv_cnt, i;
978 u8 read_data[16];
979 void *src_addr;
980 u8 irq_flag = CMD_DONE_INT_FLAG;
982 if (readl(dsi->regs + DSI_MODE_CTRL) & MODE) {
983 DRM_ERROR("dsi engine is not command mode\n");
984 return -EINVAL;
987 if (MTK_DSI_HOST_IS_READ(msg->type))
988 irq_flag |= LPRX_RD_RDY_INT_FLAG;
990 if (mtk_dsi_host_send_cmd(dsi, msg, irq_flag) < 0)
991 return -ETIME;
993 if (!MTK_DSI_HOST_IS_READ(msg->type))
994 return 0;
996 if (!msg->rx_buf) {
997 DRM_ERROR("dsi receive buffer size may be NULL\n");
998 return -EINVAL;
1001 for (i = 0; i < 16; i++)
1002 *(read_data + i) = readb(dsi->regs + DSI_RX_DATA0 + i);
1004 recv_cnt = mtk_dsi_recv_cnt(read_data[0], read_data);
1006 if (recv_cnt > 2)
1007 src_addr = &read_data[4];
1008 else
1009 src_addr = &read_data[1];
1011 if (recv_cnt > 10)
1012 recv_cnt = 10;
1014 if (recv_cnt > msg->rx_len)
1015 recv_cnt = msg->rx_len;
1017 if (recv_cnt)
1018 memcpy(msg->rx_buf, src_addr, recv_cnt);
1020 DRM_INFO("dsi get %d byte data from the panel address(0x%x)\n",
1021 recv_cnt, *((u8 *)(msg->tx_buf)));
1023 return recv_cnt;
1026 static const struct mipi_dsi_host_ops mtk_dsi_ops = {
1027 .attach = mtk_dsi_host_attach,
1028 .detach = mtk_dsi_host_detach,
1029 .transfer = mtk_dsi_host_transfer,
1032 static int mtk_dsi_bind(struct device *dev, struct device *master, void *data)
1034 int ret;
1035 struct drm_device *drm = data;
1036 struct mtk_dsi *dsi = dev_get_drvdata(dev);
1038 ret = mtk_ddp_comp_register(drm, &dsi->ddp_comp);
1039 if (ret < 0) {
1040 dev_err(dev, "Failed to register component %pOF: %d\n",
1041 dev->of_node, ret);
1042 return ret;
1045 ret = mipi_dsi_host_register(&dsi->host);
1046 if (ret < 0) {
1047 dev_err(dev, "failed to register DSI host: %d\n", ret);
1048 goto err_ddp_comp_unregister;
1051 ret = mtk_dsi_create_conn_enc(drm, dsi);
1052 if (ret) {
1053 DRM_ERROR("Encoder create failed with %d\n", ret);
1054 goto err_unregister;
1057 return 0;
1059 err_unregister:
1060 mipi_dsi_host_unregister(&dsi->host);
1061 err_ddp_comp_unregister:
1062 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp);
1063 return ret;
1066 static void mtk_dsi_unbind(struct device *dev, struct device *master,
1067 void *data)
1069 struct drm_device *drm = data;
1070 struct mtk_dsi *dsi = dev_get_drvdata(dev);
1072 mtk_dsi_destroy_conn_enc(dsi);
1073 mipi_dsi_host_unregister(&dsi->host);
1074 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp);
1077 static const struct component_ops mtk_dsi_component_ops = {
1078 .bind = mtk_dsi_bind,
1079 .unbind = mtk_dsi_unbind,
1082 static int mtk_dsi_probe(struct platform_device *pdev)
1084 struct mtk_dsi *dsi;
1085 struct device *dev = &pdev->dev;
1086 struct resource *regs;
1087 int irq_num;
1088 int comp_id;
1089 int ret;
1091 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
1092 if (!dsi)
1093 return -ENOMEM;
1095 dsi->host.ops = &mtk_dsi_ops;
1096 dsi->host.dev = dev;
1098 ret = drm_of_find_panel_or_bridge(dev->of_node, 0, 0,
1099 &dsi->panel, &dsi->bridge);
1100 if (ret)
1101 return ret;
1103 dsi->engine_clk = devm_clk_get(dev, "engine");
1104 if (IS_ERR(dsi->engine_clk)) {
1105 ret = PTR_ERR(dsi->engine_clk);
1106 dev_err(dev, "Failed to get engine clock: %d\n", ret);
1107 return ret;
1110 dsi->digital_clk = devm_clk_get(dev, "digital");
1111 if (IS_ERR(dsi->digital_clk)) {
1112 ret = PTR_ERR(dsi->digital_clk);
1113 dev_err(dev, "Failed to get digital clock: %d\n", ret);
1114 return ret;
1117 dsi->hs_clk = devm_clk_get(dev, "hs");
1118 if (IS_ERR(dsi->hs_clk)) {
1119 ret = PTR_ERR(dsi->hs_clk);
1120 dev_err(dev, "Failed to get hs clock: %d\n", ret);
1121 return ret;
1124 regs = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1125 dsi->regs = devm_ioremap_resource(dev, regs);
1126 if (IS_ERR(dsi->regs)) {
1127 ret = PTR_ERR(dsi->regs);
1128 dev_err(dev, "Failed to ioremap memory: %d\n", ret);
1129 return ret;
1132 dsi->phy = devm_phy_get(dev, "dphy");
1133 if (IS_ERR(dsi->phy)) {
1134 ret = PTR_ERR(dsi->phy);
1135 dev_err(dev, "Failed to get MIPI-DPHY: %d\n", ret);
1136 return ret;
1139 comp_id = mtk_ddp_comp_get_id(dev->of_node, MTK_DSI);
1140 if (comp_id < 0) {
1141 dev_err(dev, "Failed to identify by alias: %d\n", comp_id);
1142 return comp_id;
1145 ret = mtk_ddp_comp_init(dev, dev->of_node, &dsi->ddp_comp, comp_id,
1146 &mtk_dsi_funcs);
1147 if (ret) {
1148 dev_err(dev, "Failed to initialize component: %d\n", ret);
1149 return ret;
1152 irq_num = platform_get_irq(pdev, 0);
1153 if (irq_num < 0) {
1154 dev_err(&pdev->dev, "failed to request dsi irq resource\n");
1155 return -EPROBE_DEFER;
1158 irq_set_status_flags(irq_num, IRQ_TYPE_LEVEL_LOW);
1159 ret = devm_request_irq(&pdev->dev, irq_num, mtk_dsi_irq,
1160 IRQF_TRIGGER_LOW, dev_name(&pdev->dev), dsi);
1161 if (ret) {
1162 dev_err(&pdev->dev, "failed to request mediatek dsi irq\n");
1163 return -EPROBE_DEFER;
1166 init_waitqueue_head(&dsi->irq_wait_queue);
1168 platform_set_drvdata(pdev, dsi);
1170 return component_add(&pdev->dev, &mtk_dsi_component_ops);
1173 static int mtk_dsi_remove(struct platform_device *pdev)
1175 struct mtk_dsi *dsi = platform_get_drvdata(pdev);
1177 mtk_output_dsi_disable(dsi);
1178 component_del(&pdev->dev, &mtk_dsi_component_ops);
1180 return 0;
1183 static const struct of_device_id mtk_dsi_of_match[] = {
1184 { .compatible = "mediatek,mt2701-dsi" },
1185 { .compatible = "mediatek,mt8173-dsi" },
1186 { },
1189 struct platform_driver mtk_dsi_driver = {
1190 .probe = mtk_dsi_probe,
1191 .remove = mtk_dsi_remove,
1192 .driver = {
1193 .name = "mtk-dsi",
1194 .of_match_table = mtk_dsi_of_match,