1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2015 MediaTek Inc.
7 #include <linux/component.h>
8 #include <linux/iopoll.h>
11 #include <linux/of_platform.h>
12 #include <linux/phy/phy.h>
13 #include <linux/platform_device.h>
15 #include <video/mipi_display.h>
16 #include <video/videomode.h>
18 #include <drm/drm_atomic_helper.h>
19 #include <drm/drm_bridge.h>
20 #include <drm/drm_mipi_dsi.h>
21 #include <drm/drm_of.h>
22 #include <drm/drm_panel.h>
23 #include <drm/drm_print.h>
24 #include <drm/drm_probe_helper.h>
26 #include "mtk_drm_ddp_comp.h"
28 #define DSI_START 0x00
30 #define DSI_INTEN 0x08
32 #define DSI_INTSTA 0x0c
33 #define LPRX_RD_RDY_INT_FLAG BIT(0)
34 #define CMD_DONE_INT_FLAG BIT(1)
35 #define TE_RDY_INT_FLAG BIT(2)
36 #define VM_DONE_INT_FLAG BIT(3)
37 #define EXT_TE_RDY_INT_FLAG BIT(4)
38 #define DSI_BUSY BIT(31)
40 #define DSI_CON_CTRL 0x10
41 #define DSI_RESET BIT(0)
43 #define DPHY_RESET BIT(2)
45 #define DSI_MODE_CTRL 0x14
48 #define SYNC_PULSE_MODE 1
49 #define SYNC_EVENT_MODE 2
51 #define FRM_MODE BIT(16)
52 #define MIX_MODE BIT(17)
54 #define DSI_TXRX_CTRL 0x18
56 #define LANE_NUM (0xf << 2)
57 #define DIS_EOT BIT(6)
58 #define NULL_EN BIT(7)
59 #define TE_FREERUN BIT(8)
60 #define EXT_TE_EN BIT(9)
61 #define EXT_TE_EDGE BIT(10)
62 #define MAX_RTN_SIZE (0xf << 12)
63 #define HSTX_CKLP_EN BIT(16)
65 #define DSI_PSCTRL 0x1c
66 #define DSI_PS_WC 0x3fff
67 #define DSI_PS_SEL (3 << 16)
68 #define PACKED_PS_16BIT_RGB565 (0 << 16)
69 #define LOOSELY_PS_18BIT_RGB666 (1 << 16)
70 #define PACKED_PS_18BIT_RGB666 (2 << 16)
71 #define PACKED_PS_24BIT_RGB888 (3 << 16)
73 #define DSI_VSA_NL 0x20
74 #define DSI_VBP_NL 0x24
75 #define DSI_VFP_NL 0x28
76 #define DSI_VACT_NL 0x2C
77 #define DSI_SIZE_CON 0x38
78 #define DSI_HSA_WC 0x50
79 #define DSI_HBP_WC 0x54
80 #define DSI_HFP_WC 0x58
82 #define DSI_CMDQ_SIZE 0x60
83 #define CMDQ_SIZE 0x3f
85 #define DSI_HSTX_CKL_WC 0x64
87 #define DSI_RX_DATA0 0x74
88 #define DSI_RX_DATA1 0x78
89 #define DSI_RX_DATA2 0x7c
90 #define DSI_RX_DATA3 0x80
95 #define DSI_PHY_LCCON 0x104
96 #define LC_HS_TX_EN BIT(0)
97 #define LC_ULPM_EN BIT(1)
98 #define LC_WAKEUP_EN BIT(2)
100 #define DSI_PHY_LD0CON 0x108
101 #define LD0_HS_TX_EN BIT(0)
102 #define LD0_ULPM_EN BIT(1)
103 #define LD0_WAKEUP_EN BIT(2)
105 #define DSI_PHY_TIMECON0 0x110
106 #define LPX (0xff << 0)
107 #define HS_PREP (0xff << 8)
108 #define HS_ZERO (0xff << 16)
109 #define HS_TRAIL (0xff << 24)
111 #define DSI_PHY_TIMECON1 0x114
112 #define TA_GO (0xff << 0)
113 #define TA_SURE (0xff << 8)
114 #define TA_GET (0xff << 16)
115 #define DA_HS_EXIT (0xff << 24)
117 #define DSI_PHY_TIMECON2 0x118
118 #define CONT_DET (0xff << 0)
119 #define CLK_ZERO (0xff << 16)
120 #define CLK_TRAIL (0xff << 24)
122 #define DSI_PHY_TIMECON3 0x11c
123 #define CLK_HS_PREP (0xff << 0)
124 #define CLK_HS_POST (0xff << 8)
125 #define CLK_HS_EXIT (0xff << 16)
127 #define DSI_VM_CMD_CON 0x130
128 #define VM_CMD_EN BIT(0)
129 #define TS_VFP_EN BIT(5)
131 #define DSI_SHADOW_DEBUG 0x190U
132 #define FORCE_COMMIT BIT(0)
133 #define BYPASS_SHADOW BIT(1)
135 #define CONFIG (0xff << 0)
136 #define SHORT_PACKET 0
137 #define LONG_PACKET 2
139 #define DATA_ID (0xff << 8)
140 #define DATA_0 (0xff << 16)
141 #define DATA_1 (0xff << 24)
143 #define NS_TO_CYCLE(n, c) ((n) / (c) + (((n) % (c)) ? 1 : 0))
145 #define MTK_DSI_HOST_IS_READ(type) \
146 ((type == MIPI_DSI_GENERIC_READ_REQUEST_0_PARAM) || \
147 (type == MIPI_DSI_GENERIC_READ_REQUEST_1_PARAM) || \
148 (type == MIPI_DSI_GENERIC_READ_REQUEST_2_PARAM) || \
149 (type == MIPI_DSI_DCS_READ))
151 struct mtk_phy_timing
{
172 struct mtk_dsi_driver_data
{
173 const u32 reg_cmdq_off
;
179 struct mtk_ddp_comp ddp_comp
;
181 struct mipi_dsi_host host
;
182 struct drm_encoder encoder
;
183 struct drm_connector conn
;
184 struct drm_panel
*panel
;
185 struct drm_bridge
*bridge
;
190 struct clk
*engine_clk
;
191 struct clk
*digital_clk
;
196 unsigned long mode_flags
;
197 enum mipi_dsi_pixel_format format
;
200 struct mtk_phy_timing phy_timing
;
204 wait_queue_head_t irq_wait_queue
;
205 const struct mtk_dsi_driver_data
*driver_data
;
208 static inline struct mtk_dsi
*encoder_to_dsi(struct drm_encoder
*e
)
210 return container_of(e
, struct mtk_dsi
, encoder
);
213 static inline struct mtk_dsi
*connector_to_dsi(struct drm_connector
*c
)
215 return container_of(c
, struct mtk_dsi
, conn
);
218 static inline struct mtk_dsi
*host_to_dsi(struct mipi_dsi_host
*h
)
220 return container_of(h
, struct mtk_dsi
, host
);
223 static void mtk_dsi_mask(struct mtk_dsi
*dsi
, u32 offset
, u32 mask
, u32 data
)
225 u32 temp
= readl(dsi
->regs
+ offset
);
227 writel((temp
& ~mask
) | (data
& mask
), dsi
->regs
+ offset
);
230 static void mtk_dsi_phy_timconfig(struct mtk_dsi
*dsi
)
232 u32 timcon0
, timcon1
, timcon2
, timcon3
;
233 u32 data_rate_mhz
= DIV_ROUND_UP(dsi
->data_rate
, 1000000);
234 struct mtk_phy_timing
*timing
= &dsi
->phy_timing
;
236 timing
->lpx
= (60 * data_rate_mhz
/ (8 * 1000)) + 1;
237 timing
->da_hs_prepare
= (80 * data_rate_mhz
+ 4 * 1000) / 8000;
238 timing
->da_hs_zero
= (170 * data_rate_mhz
+ 10 * 1000) / 8000 + 1 -
239 timing
->da_hs_prepare
;
240 timing
->da_hs_trail
= timing
->da_hs_prepare
+ 1;
242 timing
->ta_go
= 4 * timing
->lpx
- 2;
243 timing
->ta_sure
= timing
->lpx
+ 2;
244 timing
->ta_get
= 4 * timing
->lpx
;
245 timing
->da_hs_exit
= 2 * timing
->lpx
+ 1;
247 timing
->clk_hs_prepare
= 70 * data_rate_mhz
/ (8 * 1000);
248 timing
->clk_hs_post
= timing
->clk_hs_prepare
+ 8;
249 timing
->clk_hs_trail
= timing
->clk_hs_prepare
;
250 timing
->clk_hs_zero
= timing
->clk_hs_trail
* 4;
251 timing
->clk_hs_exit
= 2 * timing
->clk_hs_trail
;
253 timcon0
= timing
->lpx
| timing
->da_hs_prepare
<< 8 |
254 timing
->da_hs_zero
<< 16 | timing
->da_hs_trail
<< 24;
255 timcon1
= timing
->ta_go
| timing
->ta_sure
<< 8 |
256 timing
->ta_get
<< 16 | timing
->da_hs_exit
<< 24;
257 timcon2
= 1 << 8 | timing
->clk_hs_zero
<< 16 |
258 timing
->clk_hs_trail
<< 24;
259 timcon3
= timing
->clk_hs_prepare
| timing
->clk_hs_post
<< 8 |
260 timing
->clk_hs_exit
<< 16;
262 writel(timcon0
, dsi
->regs
+ DSI_PHY_TIMECON0
);
263 writel(timcon1
, dsi
->regs
+ DSI_PHY_TIMECON1
);
264 writel(timcon2
, dsi
->regs
+ DSI_PHY_TIMECON2
);
265 writel(timcon3
, dsi
->regs
+ DSI_PHY_TIMECON3
);
268 static void mtk_dsi_enable(struct mtk_dsi
*dsi
)
270 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DSI_EN
, DSI_EN
);
273 static void mtk_dsi_disable(struct mtk_dsi
*dsi
)
275 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DSI_EN
, 0);
278 static void mtk_dsi_reset_engine(struct mtk_dsi
*dsi
)
280 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DSI_RESET
, DSI_RESET
);
281 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DSI_RESET
, 0);
284 static void mtk_dsi_reset_dphy(struct mtk_dsi
*dsi
)
286 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DPHY_RESET
, DPHY_RESET
);
287 mtk_dsi_mask(dsi
, DSI_CON_CTRL
, DPHY_RESET
, 0);
290 static void mtk_dsi_clk_ulp_mode_enter(struct mtk_dsi
*dsi
)
292 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_HS_TX_EN
, 0);
293 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_ULPM_EN
, 0);
296 static void mtk_dsi_clk_ulp_mode_leave(struct mtk_dsi
*dsi
)
298 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_ULPM_EN
, 0);
299 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_WAKEUP_EN
, LC_WAKEUP_EN
);
300 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_WAKEUP_EN
, 0);
303 static void mtk_dsi_lane0_ulp_mode_enter(struct mtk_dsi
*dsi
)
305 mtk_dsi_mask(dsi
, DSI_PHY_LD0CON
, LD0_HS_TX_EN
, 0);
306 mtk_dsi_mask(dsi
, DSI_PHY_LD0CON
, LD0_ULPM_EN
, 0);
309 static void mtk_dsi_lane0_ulp_mode_leave(struct mtk_dsi
*dsi
)
311 mtk_dsi_mask(dsi
, DSI_PHY_LD0CON
, LD0_ULPM_EN
, 0);
312 mtk_dsi_mask(dsi
, DSI_PHY_LD0CON
, LD0_WAKEUP_EN
, LD0_WAKEUP_EN
);
313 mtk_dsi_mask(dsi
, DSI_PHY_LD0CON
, LD0_WAKEUP_EN
, 0);
316 static bool mtk_dsi_clk_hs_state(struct mtk_dsi
*dsi
)
320 tmp_reg1
= readl(dsi
->regs
+ DSI_PHY_LCCON
);
321 return ((tmp_reg1
& LC_HS_TX_EN
) == 1) ? true : false;
324 static void mtk_dsi_clk_hs_mode(struct mtk_dsi
*dsi
, bool enter
)
326 if (enter
&& !mtk_dsi_clk_hs_state(dsi
))
327 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_HS_TX_EN
, LC_HS_TX_EN
);
328 else if (!enter
&& mtk_dsi_clk_hs_state(dsi
))
329 mtk_dsi_mask(dsi
, DSI_PHY_LCCON
, LC_HS_TX_EN
, 0);
332 static void mtk_dsi_set_mode(struct mtk_dsi
*dsi
)
334 u32 vid_mode
= CMD_MODE
;
336 if (dsi
->mode_flags
& MIPI_DSI_MODE_VIDEO
) {
337 if (dsi
->mode_flags
& MIPI_DSI_MODE_VIDEO_BURST
)
338 vid_mode
= BURST_MODE
;
339 else if (dsi
->mode_flags
& MIPI_DSI_MODE_VIDEO_SYNC_PULSE
)
340 vid_mode
= SYNC_PULSE_MODE
;
342 vid_mode
= SYNC_EVENT_MODE
;
345 writel(vid_mode
, dsi
->regs
+ DSI_MODE_CTRL
);
348 static void mtk_dsi_set_vm_cmd(struct mtk_dsi
*dsi
)
350 mtk_dsi_mask(dsi
, DSI_VM_CMD_CON
, VM_CMD_EN
, VM_CMD_EN
);
351 mtk_dsi_mask(dsi
, DSI_VM_CMD_CON
, TS_VFP_EN
, TS_VFP_EN
);
354 static void mtk_dsi_ps_control_vact(struct mtk_dsi
*dsi
)
356 struct videomode
*vm
= &dsi
->vm
;
357 u32 dsi_buf_bpp
, ps_wc
;
360 if (dsi
->format
== MIPI_DSI_FMT_RGB565
)
365 ps_wc
= vm
->hactive
* dsi_buf_bpp
;
368 switch (dsi
->format
) {
369 case MIPI_DSI_FMT_RGB888
:
370 ps_bpp_mode
|= PACKED_PS_24BIT_RGB888
;
372 case MIPI_DSI_FMT_RGB666
:
373 ps_bpp_mode
|= PACKED_PS_18BIT_RGB666
;
375 case MIPI_DSI_FMT_RGB666_PACKED
:
376 ps_bpp_mode
|= LOOSELY_PS_18BIT_RGB666
;
378 case MIPI_DSI_FMT_RGB565
:
379 ps_bpp_mode
|= PACKED_PS_16BIT_RGB565
;
383 writel(vm
->vactive
, dsi
->regs
+ DSI_VACT_NL
);
384 writel(ps_bpp_mode
, dsi
->regs
+ DSI_PSCTRL
);
385 writel(ps_wc
, dsi
->regs
+ DSI_HSTX_CKL_WC
);
388 static void mtk_dsi_rxtx_control(struct mtk_dsi
*dsi
)
392 switch (dsi
->lanes
) {
410 tmp_reg
|= (dsi
->mode_flags
& MIPI_DSI_CLOCK_NON_CONTINUOUS
) << 6;
411 tmp_reg
|= (dsi
->mode_flags
& MIPI_DSI_MODE_EOT_PACKET
) >> 3;
413 writel(tmp_reg
, dsi
->regs
+ DSI_TXRX_CTRL
);
416 static void mtk_dsi_ps_control(struct mtk_dsi
*dsi
)
421 switch (dsi
->format
) {
422 case MIPI_DSI_FMT_RGB888
:
423 tmp_reg
= PACKED_PS_24BIT_RGB888
;
426 case MIPI_DSI_FMT_RGB666
:
427 tmp_reg
= LOOSELY_PS_18BIT_RGB666
;
430 case MIPI_DSI_FMT_RGB666_PACKED
:
431 tmp_reg
= PACKED_PS_18BIT_RGB666
;
434 case MIPI_DSI_FMT_RGB565
:
435 tmp_reg
= PACKED_PS_16BIT_RGB565
;
439 tmp_reg
= PACKED_PS_24BIT_RGB888
;
444 tmp_reg
+= dsi
->vm
.hactive
* dsi_tmp_buf_bpp
& DSI_PS_WC
;
445 writel(tmp_reg
, dsi
->regs
+ DSI_PSCTRL
);
448 static void mtk_dsi_config_vdo_timing(struct mtk_dsi
*dsi
)
450 u32 horizontal_sync_active_byte
;
451 u32 horizontal_backporch_byte
;
452 u32 horizontal_frontporch_byte
;
453 u32 dsi_tmp_buf_bpp
, data_phy_cycles
;
454 struct mtk_phy_timing
*timing
= &dsi
->phy_timing
;
456 struct videomode
*vm
= &dsi
->vm
;
458 if (dsi
->format
== MIPI_DSI_FMT_RGB565
)
463 writel(vm
->vsync_len
, dsi
->regs
+ DSI_VSA_NL
);
464 writel(vm
->vback_porch
, dsi
->regs
+ DSI_VBP_NL
);
465 writel(vm
->vfront_porch
, dsi
->regs
+ DSI_VFP_NL
);
466 writel(vm
->vactive
, dsi
->regs
+ DSI_VACT_NL
);
468 if (dsi
->driver_data
->has_size_ctl
)
469 writel(vm
->vactive
<< 16 | vm
->hactive
,
470 dsi
->regs
+ DSI_SIZE_CON
);
472 horizontal_sync_active_byte
= (vm
->hsync_len
* dsi_tmp_buf_bpp
- 10);
474 if (dsi
->mode_flags
& MIPI_DSI_MODE_VIDEO_SYNC_PULSE
)
475 horizontal_backporch_byte
=
476 (vm
->hback_porch
* dsi_tmp_buf_bpp
- 10);
478 horizontal_backporch_byte
= ((vm
->hback_porch
+ vm
->hsync_len
) *
479 dsi_tmp_buf_bpp
- 10);
481 data_phy_cycles
= timing
->lpx
+ timing
->da_hs_prepare
+
482 timing
->da_hs_zero
+ timing
->da_hs_exit
+ 3;
484 if (dsi
->mode_flags
& MIPI_DSI_MODE_VIDEO_BURST
) {
485 if ((vm
->hfront_porch
+ vm
->hback_porch
) * dsi_tmp_buf_bpp
>
486 data_phy_cycles
* dsi
->lanes
+ 18) {
487 horizontal_frontporch_byte
=
488 vm
->hfront_porch
* dsi_tmp_buf_bpp
-
489 (data_phy_cycles
* dsi
->lanes
+ 18) *
491 (vm
->hfront_porch
+ vm
->hback_porch
);
493 horizontal_backporch_byte
=
494 horizontal_backporch_byte
-
495 (data_phy_cycles
* dsi
->lanes
+ 18) *
497 (vm
->hfront_porch
+ vm
->hback_porch
);
499 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n");
500 horizontal_frontporch_byte
= vm
->hfront_porch
*
504 if ((vm
->hfront_porch
+ vm
->hback_porch
) * dsi_tmp_buf_bpp
>
505 data_phy_cycles
* dsi
->lanes
+ 12) {
506 horizontal_frontporch_byte
=
507 vm
->hfront_porch
* dsi_tmp_buf_bpp
-
508 (data_phy_cycles
* dsi
->lanes
+ 12) *
510 (vm
->hfront_porch
+ vm
->hback_porch
);
511 horizontal_backporch_byte
= horizontal_backporch_byte
-
512 (data_phy_cycles
* dsi
->lanes
+ 12) *
514 (vm
->hfront_porch
+ vm
->hback_porch
);
516 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n");
517 horizontal_frontporch_byte
= vm
->hfront_porch
*
522 writel(horizontal_sync_active_byte
, dsi
->regs
+ DSI_HSA_WC
);
523 writel(horizontal_backporch_byte
, dsi
->regs
+ DSI_HBP_WC
);
524 writel(horizontal_frontporch_byte
, dsi
->regs
+ DSI_HFP_WC
);
526 mtk_dsi_ps_control(dsi
);
529 static void mtk_dsi_start(struct mtk_dsi
*dsi
)
531 writel(0, dsi
->regs
+ DSI_START
);
532 writel(1, dsi
->regs
+ DSI_START
);
535 static void mtk_dsi_stop(struct mtk_dsi
*dsi
)
537 writel(0, dsi
->regs
+ DSI_START
);
540 static void mtk_dsi_set_cmd_mode(struct mtk_dsi
*dsi
)
542 writel(CMD_MODE
, dsi
->regs
+ DSI_MODE_CTRL
);
545 static void mtk_dsi_set_interrupt_enable(struct mtk_dsi
*dsi
)
547 u32 inten
= LPRX_RD_RDY_INT_FLAG
| CMD_DONE_INT_FLAG
| VM_DONE_INT_FLAG
;
549 writel(inten
, dsi
->regs
+ DSI_INTEN
);
552 static void mtk_dsi_irq_data_set(struct mtk_dsi
*dsi
, u32 irq_bit
)
554 dsi
->irq_data
|= irq_bit
;
557 static void mtk_dsi_irq_data_clear(struct mtk_dsi
*dsi
, u32 irq_bit
)
559 dsi
->irq_data
&= ~irq_bit
;
562 static s32
mtk_dsi_wait_for_irq_done(struct mtk_dsi
*dsi
, u32 irq_flag
,
563 unsigned int timeout
)
566 unsigned long jiffies
= msecs_to_jiffies(timeout
);
568 ret
= wait_event_interruptible_timeout(dsi
->irq_wait_queue
,
569 dsi
->irq_data
& irq_flag
,
572 DRM_WARN("Wait DSI IRQ(0x%08x) Timeout\n", irq_flag
);
575 mtk_dsi_reset_engine(dsi
);
581 static irqreturn_t
mtk_dsi_irq(int irq
, void *dev_id
)
583 struct mtk_dsi
*dsi
= dev_id
;
585 u32 flag
= LPRX_RD_RDY_INT_FLAG
| CMD_DONE_INT_FLAG
| VM_DONE_INT_FLAG
;
587 status
= readl(dsi
->regs
+ DSI_INTSTA
) & flag
;
591 mtk_dsi_mask(dsi
, DSI_RACK
, RACK
, RACK
);
592 tmp
= readl(dsi
->regs
+ DSI_INTSTA
);
593 } while (tmp
& DSI_BUSY
);
595 mtk_dsi_mask(dsi
, DSI_INTSTA
, status
, 0);
596 mtk_dsi_irq_data_set(dsi
, status
);
597 wake_up_interruptible(&dsi
->irq_wait_queue
);
603 static s32
mtk_dsi_switch_to_cmd_mode(struct mtk_dsi
*dsi
, u8 irq_flag
, u32 t
)
605 mtk_dsi_irq_data_clear(dsi
, irq_flag
);
606 mtk_dsi_set_cmd_mode(dsi
);
608 if (!mtk_dsi_wait_for_irq_done(dsi
, irq_flag
, t
)) {
609 DRM_ERROR("failed to switch cmd mode\n");
616 static int mtk_dsi_poweron(struct mtk_dsi
*dsi
)
618 struct device
*dev
= dsi
->host
.dev
;
622 if (++dsi
->refcount
!= 1)
625 switch (dsi
->format
) {
626 case MIPI_DSI_FMT_RGB565
:
629 case MIPI_DSI_FMT_RGB666_PACKED
:
632 case MIPI_DSI_FMT_RGB666
:
633 case MIPI_DSI_FMT_RGB888
:
639 dsi
->data_rate
= DIV_ROUND_UP_ULL(dsi
->vm
.pixelclock
* bit_per_pixel
,
642 ret
= clk_set_rate(dsi
->hs_clk
, dsi
->data_rate
);
644 dev_err(dev
, "Failed to set data rate: %d\n", ret
);
648 phy_power_on(dsi
->phy
);
650 ret
= clk_prepare_enable(dsi
->engine_clk
);
652 dev_err(dev
, "Failed to enable engine clock: %d\n", ret
);
653 goto err_phy_power_off
;
656 ret
= clk_prepare_enable(dsi
->digital_clk
);
658 dev_err(dev
, "Failed to enable digital clock: %d\n", ret
);
659 goto err_disable_engine_clk
;
664 if (dsi
->driver_data
->has_shadow_ctl
)
665 writel(FORCE_COMMIT
| BYPASS_SHADOW
,
666 dsi
->regs
+ DSI_SHADOW_DEBUG
);
668 mtk_dsi_reset_engine(dsi
);
669 mtk_dsi_phy_timconfig(dsi
);
671 mtk_dsi_rxtx_control(dsi
);
672 usleep_range(30, 100);
673 mtk_dsi_reset_dphy(dsi
);
674 mtk_dsi_ps_control_vact(dsi
);
675 mtk_dsi_set_vm_cmd(dsi
);
676 mtk_dsi_config_vdo_timing(dsi
);
677 mtk_dsi_set_interrupt_enable(dsi
);
679 mtk_dsi_clk_ulp_mode_leave(dsi
);
680 mtk_dsi_lane0_ulp_mode_leave(dsi
);
681 mtk_dsi_clk_hs_mode(dsi
, 0);
684 if (drm_panel_prepare(dsi
->panel
)) {
685 DRM_ERROR("failed to prepare the panel\n");
686 goto err_disable_digital_clk
;
691 err_disable_digital_clk
:
692 clk_disable_unprepare(dsi
->digital_clk
);
693 err_disable_engine_clk
:
694 clk_disable_unprepare(dsi
->engine_clk
);
696 phy_power_off(dsi
->phy
);
702 static void mtk_dsi_poweroff(struct mtk_dsi
*dsi
)
704 if (WARN_ON(dsi
->refcount
== 0))
707 if (--dsi
->refcount
!= 0)
711 * mtk_dsi_stop() and mtk_dsi_start() is asymmetric, since
712 * mtk_dsi_stop() should be called after mtk_drm_crtc_atomic_disable(),
713 * which needs irq for vblank, and mtk_dsi_stop() will disable irq.
714 * mtk_dsi_start() needs to be called in mtk_output_dsi_enable(),
715 * after dsi is fully set.
719 if (!mtk_dsi_switch_to_cmd_mode(dsi
, VM_DONE_INT_FLAG
, 500)) {
721 if (drm_panel_unprepare(dsi
->panel
)) {
722 DRM_ERROR("failed to unprepare the panel\n");
728 mtk_dsi_reset_engine(dsi
);
729 mtk_dsi_lane0_ulp_mode_enter(dsi
);
730 mtk_dsi_clk_ulp_mode_enter(dsi
);
732 mtk_dsi_disable(dsi
);
734 clk_disable_unprepare(dsi
->engine_clk
);
735 clk_disable_unprepare(dsi
->digital_clk
);
737 phy_power_off(dsi
->phy
);
740 static void mtk_output_dsi_enable(struct mtk_dsi
*dsi
)
747 ret
= mtk_dsi_poweron(dsi
);
749 DRM_ERROR("failed to power on dsi\n");
753 mtk_dsi_set_mode(dsi
);
754 mtk_dsi_clk_hs_mode(dsi
, 1);
759 if (drm_panel_enable(dsi
->panel
)) {
760 DRM_ERROR("failed to enable the panel\n");
761 goto err_dsi_power_off
;
770 mtk_dsi_poweroff(dsi
);
773 static void mtk_output_dsi_disable(struct mtk_dsi
*dsi
)
779 if (drm_panel_disable(dsi
->panel
)) {
780 DRM_ERROR("failed to disable the panel\n");
785 mtk_dsi_poweroff(dsi
);
787 dsi
->enabled
= false;
790 static void mtk_dsi_encoder_destroy(struct drm_encoder
*encoder
)
792 drm_encoder_cleanup(encoder
);
795 static const struct drm_encoder_funcs mtk_dsi_encoder_funcs
= {
796 .destroy
= mtk_dsi_encoder_destroy
,
799 static bool mtk_dsi_encoder_mode_fixup(struct drm_encoder
*encoder
,
800 const struct drm_display_mode
*mode
,
801 struct drm_display_mode
*adjusted_mode
)
806 static void mtk_dsi_encoder_mode_set(struct drm_encoder
*encoder
,
807 struct drm_display_mode
*mode
,
808 struct drm_display_mode
*adjusted
)
810 struct mtk_dsi
*dsi
= encoder_to_dsi(encoder
);
812 drm_display_mode_to_videomode(adjusted
, &dsi
->vm
);
815 static void mtk_dsi_encoder_disable(struct drm_encoder
*encoder
)
817 struct mtk_dsi
*dsi
= encoder_to_dsi(encoder
);
819 mtk_output_dsi_disable(dsi
);
822 static void mtk_dsi_encoder_enable(struct drm_encoder
*encoder
)
824 struct mtk_dsi
*dsi
= encoder_to_dsi(encoder
);
826 mtk_output_dsi_enable(dsi
);
829 static int mtk_dsi_connector_get_modes(struct drm_connector
*connector
)
831 struct mtk_dsi
*dsi
= connector_to_dsi(connector
);
833 return drm_panel_get_modes(dsi
->panel
, connector
);
836 static const struct drm_encoder_helper_funcs mtk_dsi_encoder_helper_funcs
= {
837 .mode_fixup
= mtk_dsi_encoder_mode_fixup
,
838 .mode_set
= mtk_dsi_encoder_mode_set
,
839 .disable
= mtk_dsi_encoder_disable
,
840 .enable
= mtk_dsi_encoder_enable
,
843 static const struct drm_connector_funcs mtk_dsi_connector_funcs
= {
844 .fill_modes
= drm_helper_probe_single_connector_modes
,
845 .destroy
= drm_connector_cleanup
,
846 .reset
= drm_atomic_helper_connector_reset
,
847 .atomic_duplicate_state
= drm_atomic_helper_connector_duplicate_state
,
848 .atomic_destroy_state
= drm_atomic_helper_connector_destroy_state
,
851 static const struct drm_connector_helper_funcs
852 mtk_dsi_connector_helper_funcs
= {
853 .get_modes
= mtk_dsi_connector_get_modes
,
856 static int mtk_dsi_create_connector(struct drm_device
*drm
, struct mtk_dsi
*dsi
)
860 ret
= drm_connector_init(drm
, &dsi
->conn
, &mtk_dsi_connector_funcs
,
861 DRM_MODE_CONNECTOR_DSI
);
863 DRM_ERROR("Failed to connector init to drm\n");
867 drm_connector_helper_add(&dsi
->conn
, &mtk_dsi_connector_helper_funcs
);
869 dsi
->conn
.dpms
= DRM_MODE_DPMS_OFF
;
870 drm_connector_attach_encoder(&dsi
->conn
, &dsi
->encoder
);
873 ret
= drm_panel_attach(dsi
->panel
, &dsi
->conn
);
875 DRM_ERROR("Failed to attach panel to drm\n");
876 goto err_connector_cleanup
;
882 err_connector_cleanup
:
883 drm_connector_cleanup(&dsi
->conn
);
887 static int mtk_dsi_create_conn_enc(struct drm_device
*drm
, struct mtk_dsi
*dsi
)
891 ret
= drm_encoder_init(drm
, &dsi
->encoder
, &mtk_dsi_encoder_funcs
,
892 DRM_MODE_ENCODER_DSI
, NULL
);
894 DRM_ERROR("Failed to encoder init to drm\n");
897 drm_encoder_helper_add(&dsi
->encoder
, &mtk_dsi_encoder_helper_funcs
);
900 * Currently display data paths are statically assigned to a crtc each.
901 * crtc 0 is OVL0 -> COLOR0 -> AAL -> OD -> RDMA0 -> UFOE -> DSI0
903 dsi
->encoder
.possible_crtcs
= 1;
905 /* If there's a bridge, attach to it and let it create the connector */
907 ret
= drm_bridge_attach(&dsi
->encoder
, dsi
->bridge
, NULL
);
909 DRM_ERROR("Failed to attach bridge to drm\n");
910 goto err_encoder_cleanup
;
913 /* Otherwise create our own connector and attach to a panel */
914 ret
= mtk_dsi_create_connector(drm
, dsi
);
916 goto err_encoder_cleanup
;
922 drm_encoder_cleanup(&dsi
->encoder
);
926 static void mtk_dsi_destroy_conn_enc(struct mtk_dsi
*dsi
)
928 drm_encoder_cleanup(&dsi
->encoder
);
929 /* Skip connector cleanup if creation was delegated to the bridge */
931 drm_connector_cleanup(&dsi
->conn
);
933 drm_panel_detach(dsi
->panel
);
936 static void mtk_dsi_ddp_start(struct mtk_ddp_comp
*comp
)
938 struct mtk_dsi
*dsi
= container_of(comp
, struct mtk_dsi
, ddp_comp
);
940 mtk_dsi_poweron(dsi
);
943 static void mtk_dsi_ddp_stop(struct mtk_ddp_comp
*comp
)
945 struct mtk_dsi
*dsi
= container_of(comp
, struct mtk_dsi
, ddp_comp
);
947 mtk_dsi_poweroff(dsi
);
950 static const struct mtk_ddp_comp_funcs mtk_dsi_funcs
= {
951 .start
= mtk_dsi_ddp_start
,
952 .stop
= mtk_dsi_ddp_stop
,
955 static int mtk_dsi_host_attach(struct mipi_dsi_host
*host
,
956 struct mipi_dsi_device
*device
)
958 struct mtk_dsi
*dsi
= host_to_dsi(host
);
960 dsi
->lanes
= device
->lanes
;
961 dsi
->format
= device
->format
;
962 dsi
->mode_flags
= device
->mode_flags
;
965 drm_helper_hpd_irq_event(dsi
->conn
.dev
);
970 static int mtk_dsi_host_detach(struct mipi_dsi_host
*host
,
971 struct mipi_dsi_device
*device
)
973 struct mtk_dsi
*dsi
= host_to_dsi(host
);
976 drm_helper_hpd_irq_event(dsi
->conn
.dev
);
981 static void mtk_dsi_wait_for_idle(struct mtk_dsi
*dsi
)
986 ret
= readl_poll_timeout(dsi
->regs
+ DSI_INTSTA
, val
, !(val
& DSI_BUSY
),
989 DRM_WARN("polling dsi wait not busy timeout!\n");
992 mtk_dsi_reset_engine(dsi
);
996 static u32
mtk_dsi_recv_cnt(u8 type
, u8
*read_data
)
999 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE
:
1000 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE
:
1002 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE
:
1003 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE
:
1005 case MIPI_DSI_RX_GENERIC_LONG_READ_RESPONSE
:
1006 case MIPI_DSI_RX_DCS_LONG_READ_RESPONSE
:
1007 return read_data
[1] + read_data
[2] * 16;
1008 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT
:
1009 DRM_INFO("type is 0x02, try again\n");
1012 DRM_INFO("type(0x%x) not recognized\n", type
);
1019 static void mtk_dsi_cmdq(struct mtk_dsi
*dsi
, const struct mipi_dsi_msg
*msg
)
1021 const char *tx_buf
= msg
->tx_buf
;
1022 u8 config
, cmdq_size
, cmdq_off
, type
= msg
->type
;
1023 u32 reg_val
, cmdq_mask
, i
;
1024 u32 reg_cmdq_off
= dsi
->driver_data
->reg_cmdq_off
;
1026 if (MTK_DSI_HOST_IS_READ(type
))
1029 config
= (msg
->tx_len
> 2) ? LONG_PACKET
: SHORT_PACKET
;
1031 if (msg
->tx_len
> 2) {
1032 cmdq_size
= 1 + (msg
->tx_len
+ 3) / 4;
1034 cmdq_mask
= CONFIG
| DATA_ID
| DATA_0
| DATA_1
;
1035 reg_val
= (msg
->tx_len
<< 16) | (type
<< 8) | config
;
1039 cmdq_mask
= CONFIG
| DATA_ID
;
1040 reg_val
= (type
<< 8) | config
;
1043 for (i
= 0; i
< msg
->tx_len
; i
++)
1044 mtk_dsi_mask(dsi
, (reg_cmdq_off
+ cmdq_off
+ i
) & (~0x3U
),
1045 (0xffUL
<< (((i
+ cmdq_off
) & 3U) * 8U)),
1046 tx_buf
[i
] << (((i
+ cmdq_off
) & 3U) * 8U));
1048 mtk_dsi_mask(dsi
, reg_cmdq_off
, cmdq_mask
, reg_val
);
1049 mtk_dsi_mask(dsi
, DSI_CMDQ_SIZE
, CMDQ_SIZE
, cmdq_size
);
1052 static ssize_t
mtk_dsi_host_send_cmd(struct mtk_dsi
*dsi
,
1053 const struct mipi_dsi_msg
*msg
, u8 flag
)
1055 mtk_dsi_wait_for_idle(dsi
);
1056 mtk_dsi_irq_data_clear(dsi
, flag
);
1057 mtk_dsi_cmdq(dsi
, msg
);
1060 if (!mtk_dsi_wait_for_irq_done(dsi
, flag
, 2000))
1066 static ssize_t
mtk_dsi_host_transfer(struct mipi_dsi_host
*host
,
1067 const struct mipi_dsi_msg
*msg
)
1069 struct mtk_dsi
*dsi
= host_to_dsi(host
);
1073 u8 irq_flag
= CMD_DONE_INT_FLAG
;
1075 if (readl(dsi
->regs
+ DSI_MODE_CTRL
) & MODE
) {
1076 DRM_ERROR("dsi engine is not command mode\n");
1080 if (MTK_DSI_HOST_IS_READ(msg
->type
))
1081 irq_flag
|= LPRX_RD_RDY_INT_FLAG
;
1083 if (mtk_dsi_host_send_cmd(dsi
, msg
, irq_flag
) < 0)
1086 if (!MTK_DSI_HOST_IS_READ(msg
->type
))
1090 DRM_ERROR("dsi receive buffer size may be NULL\n");
1094 for (i
= 0; i
< 16; i
++)
1095 *(read_data
+ i
) = readb(dsi
->regs
+ DSI_RX_DATA0
+ i
);
1097 recv_cnt
= mtk_dsi_recv_cnt(read_data
[0], read_data
);
1100 src_addr
= &read_data
[4];
1102 src_addr
= &read_data
[1];
1107 if (recv_cnt
> msg
->rx_len
)
1108 recv_cnt
= msg
->rx_len
;
1111 memcpy(msg
->rx_buf
, src_addr
, recv_cnt
);
1113 DRM_INFO("dsi get %d byte data from the panel address(0x%x)\n",
1114 recv_cnt
, *((u8
*)(msg
->tx_buf
)));
1119 static const struct mipi_dsi_host_ops mtk_dsi_ops
= {
1120 .attach
= mtk_dsi_host_attach
,
1121 .detach
= mtk_dsi_host_detach
,
1122 .transfer
= mtk_dsi_host_transfer
,
1125 static int mtk_dsi_bind(struct device
*dev
, struct device
*master
, void *data
)
1128 struct drm_device
*drm
= data
;
1129 struct mtk_dsi
*dsi
= dev_get_drvdata(dev
);
1131 ret
= mtk_ddp_comp_register(drm
, &dsi
->ddp_comp
);
1133 dev_err(dev
, "Failed to register component %pOF: %d\n",
1138 ret
= mtk_dsi_create_conn_enc(drm
, dsi
);
1140 DRM_ERROR("Encoder create failed with %d\n", ret
);
1141 goto err_unregister
;
1147 mtk_ddp_comp_unregister(drm
, &dsi
->ddp_comp
);
1151 static void mtk_dsi_unbind(struct device
*dev
, struct device
*master
,
1154 struct drm_device
*drm
= data
;
1155 struct mtk_dsi
*dsi
= dev_get_drvdata(dev
);
1157 mtk_dsi_destroy_conn_enc(dsi
);
1158 mtk_ddp_comp_unregister(drm
, &dsi
->ddp_comp
);
1161 static const struct component_ops mtk_dsi_component_ops
= {
1162 .bind
= mtk_dsi_bind
,
1163 .unbind
= mtk_dsi_unbind
,
1166 static int mtk_dsi_probe(struct platform_device
*pdev
)
1168 struct mtk_dsi
*dsi
;
1169 struct device
*dev
= &pdev
->dev
;
1170 struct resource
*regs
;
1175 dsi
= devm_kzalloc(dev
, sizeof(*dsi
), GFP_KERNEL
);
1179 dsi
->host
.ops
= &mtk_dsi_ops
;
1180 dsi
->host
.dev
= dev
;
1181 ret
= mipi_dsi_host_register(&dsi
->host
);
1183 dev_err(dev
, "failed to register DSI host: %d\n", ret
);
1187 ret
= drm_of_find_panel_or_bridge(dev
->of_node
, 0, 0,
1188 &dsi
->panel
, &dsi
->bridge
);
1190 goto err_unregister_host
;
1192 dsi
->driver_data
= of_device_get_match_data(dev
);
1194 dsi
->engine_clk
= devm_clk_get(dev
, "engine");
1195 if (IS_ERR(dsi
->engine_clk
)) {
1196 ret
= PTR_ERR(dsi
->engine_clk
);
1197 dev_err(dev
, "Failed to get engine clock: %d\n", ret
);
1198 goto err_unregister_host
;
1201 dsi
->digital_clk
= devm_clk_get(dev
, "digital");
1202 if (IS_ERR(dsi
->digital_clk
)) {
1203 ret
= PTR_ERR(dsi
->digital_clk
);
1204 dev_err(dev
, "Failed to get digital clock: %d\n", ret
);
1205 goto err_unregister_host
;
1208 dsi
->hs_clk
= devm_clk_get(dev
, "hs");
1209 if (IS_ERR(dsi
->hs_clk
)) {
1210 ret
= PTR_ERR(dsi
->hs_clk
);
1211 dev_err(dev
, "Failed to get hs clock: %d\n", ret
);
1212 goto err_unregister_host
;
1215 regs
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1216 dsi
->regs
= devm_ioremap_resource(dev
, regs
);
1217 if (IS_ERR(dsi
->regs
)) {
1218 ret
= PTR_ERR(dsi
->regs
);
1219 dev_err(dev
, "Failed to ioremap memory: %d\n", ret
);
1220 goto err_unregister_host
;
1223 dsi
->phy
= devm_phy_get(dev
, "dphy");
1224 if (IS_ERR(dsi
->phy
)) {
1225 ret
= PTR_ERR(dsi
->phy
);
1226 dev_err(dev
, "Failed to get MIPI-DPHY: %d\n", ret
);
1227 goto err_unregister_host
;
1230 comp_id
= mtk_ddp_comp_get_id(dev
->of_node
, MTK_DSI
);
1232 dev_err(dev
, "Failed to identify by alias: %d\n", comp_id
);
1234 goto err_unregister_host
;
1237 ret
= mtk_ddp_comp_init(dev
, dev
->of_node
, &dsi
->ddp_comp
, comp_id
,
1240 dev_err(dev
, "Failed to initialize component: %d\n", ret
);
1241 goto err_unregister_host
;
1244 irq_num
= platform_get_irq(pdev
, 0);
1246 dev_err(&pdev
->dev
, "failed to get dsi irq_num: %d\n", irq_num
);
1248 goto err_unregister_host
;
1251 irq_set_status_flags(irq_num
, IRQ_TYPE_LEVEL_LOW
);
1252 ret
= devm_request_irq(&pdev
->dev
, irq_num
, mtk_dsi_irq
,
1253 IRQF_TRIGGER_LOW
, dev_name(&pdev
->dev
), dsi
);
1255 dev_err(&pdev
->dev
, "failed to request mediatek dsi irq\n");
1256 goto err_unregister_host
;
1259 init_waitqueue_head(&dsi
->irq_wait_queue
);
1261 platform_set_drvdata(pdev
, dsi
);
1263 ret
= component_add(&pdev
->dev
, &mtk_dsi_component_ops
);
1265 dev_err(&pdev
->dev
, "failed to add component: %d\n", ret
);
1266 goto err_unregister_host
;
1271 err_unregister_host
:
1272 mipi_dsi_host_unregister(&dsi
->host
);
1276 static int mtk_dsi_remove(struct platform_device
*pdev
)
1278 struct mtk_dsi
*dsi
= platform_get_drvdata(pdev
);
1280 mtk_output_dsi_disable(dsi
);
1281 component_del(&pdev
->dev
, &mtk_dsi_component_ops
);
1282 mipi_dsi_host_unregister(&dsi
->host
);
1287 static const struct mtk_dsi_driver_data mt8173_dsi_driver_data
= {
1288 .reg_cmdq_off
= 0x200,
1291 static const struct mtk_dsi_driver_data mt2701_dsi_driver_data
= {
1292 .reg_cmdq_off
= 0x180,
1295 static const struct mtk_dsi_driver_data mt8183_dsi_driver_data
= {
1296 .reg_cmdq_off
= 0x200,
1297 .has_shadow_ctl
= true,
1298 .has_size_ctl
= true,
1301 static const struct of_device_id mtk_dsi_of_match
[] = {
1302 { .compatible
= "mediatek,mt2701-dsi",
1303 .data
= &mt2701_dsi_driver_data
},
1304 { .compatible
= "mediatek,mt8173-dsi",
1305 .data
= &mt8173_dsi_driver_data
},
1306 { .compatible
= "mediatek,mt8183-dsi",
1307 .data
= &mt8183_dsi_driver_data
},
1311 struct platform_driver mtk_dsi_driver
= {
1312 .probe
= mtk_dsi_probe
,
1313 .remove
= mtk_dsi_remove
,
1316 .of_match_table
= mtk_dsi_of_match
,