1 // SPDX-License-Identifier: GPL-2.0-only
3 * DesignWare MIPI DSI Host Controller v1.02 driver
5 * Copyright (c) 2016 Linaro Limited.
6 * Copyright (c) 2014-2016 Hisilicon Limited.
9 * Xinliang Liu <z.liuxinliang@hisilicon.com>
10 * Xinliang Liu <xinliang.liu@linaro.org>
11 * Xinwei Kong <kong.kongxinwei@hisilicon.com>
14 #include <linux/clk.h>
15 #include <linux/component.h>
16 #include <linux/delay.h>
17 #include <linux/module.h>
18 #include <linux/platform_device.h>
20 #include <drm/drm_atomic_helper.h>
21 #include <drm/drm_bridge.h>
22 #include <drm/drm_device.h>
23 #include <drm/drm_mipi_dsi.h>
24 #include <drm/drm_of.h>
25 #include <drm/drm_print.h>
26 #include <drm/drm_probe_helper.h>
27 #include <drm/drm_simple_kms_helper.h>
29 #include "dw_dsi_reg.h"
31 #define MAX_TX_ESC_CLK 10
32 #define ROUND(x, y) ((x) / (y) + \
33 ((x) % (y) * 10 / (y) >= 5 ? 1 : 0))
34 #define PHY_REF_CLK_RATE 19200000
35 #define PHY_REF_CLK_PERIOD_PS (1000000000 / (PHY_REF_CLK_RATE / 1000))
37 #define encoder_to_dsi(encoder) \
38 container_of(encoder, struct dw_dsi, encoder)
39 #define host_to_dsi(host) \
40 container_of(host, struct dw_dsi, host)
42 struct mipi_phy_params
{
49 u32 data_t_hs_prepare
;
71 u32 clk_to_data_delay
;
72 u32 data_to_clk_delay
;
73 u32 lane_byte_clk_kHz
;
83 struct drm_encoder encoder
;
84 struct drm_bridge
*bridge
;
85 struct mipi_dsi_host host
;
86 struct drm_display_mode cur_mode
;
87 struct dsi_hw_ctx
*ctx
;
88 struct mipi_phy_params phy
;
91 enum mipi_dsi_pixel_format format
;
92 unsigned long mode_flags
;
98 struct dsi_hw_ctx ctx
;
101 struct dsi_phy_range
{
108 static const struct dsi_phy_range dphy_range_info
[] = {
109 { 46875, 62500, 1, 7 },
110 { 62500, 93750, 0, 7 },
111 { 93750, 125000, 1, 6 },
112 { 125000, 187500, 0, 6 },
113 { 187500, 250000, 1, 5 },
114 { 250000, 375000, 0, 5 },
115 { 375000, 500000, 1, 4 },
116 { 500000, 750000, 0, 4 },
117 { 750000, 1000000, 1, 0 },
118 { 1000000, 1500000, 0, 0 }
121 static u32
dsi_calc_phy_rate(u32 req_kHz
, struct mipi_phy_params
*phy
)
123 u32 ref_clk_ps
= PHY_REF_CLK_PERIOD_PS
;
124 u32 tmp_kHz
= req_kHz
;
136 * Find a rate >= req_kHz.
141 for (i
= 0; i
< ARRAY_SIZE(dphy_range_info
); i
++)
142 if (f_kHz
>= dphy_range_info
[i
].min_range_kHz
&&
143 f_kHz
<= dphy_range_info
[i
].max_range_kHz
)
146 if (i
== ARRAY_SIZE(dphy_range_info
)) {
147 DRM_ERROR("%dkHz out of range\n", f_kHz
);
151 phy
->pll_vco_750M
= dphy_range_info
[i
].pll_vco_750M
;
152 phy
->hstx_ckg_sel
= dphy_range_info
[i
].hstx_ckg_sel
;
154 if (phy
->hstx_ckg_sel
<= 7 &&
155 phy
->hstx_ckg_sel
>= 4)
156 q_pll
= 0x10 >> (7 - phy
->hstx_ckg_sel
);
158 temp
= f_kHz
* (u64
)q_pll
* (u64
)ref_clk_ps
;
159 m_n_int
= temp
/ (u64
)1000000000;
160 m_n
= (temp
% (u64
)1000000000) / (u64
)100000000;
162 if (m_n_int
% 2 == 0) {
165 m_pll
= (m_n_int
+ 1) * n_pll
;
166 } else if (m_n
* 6 >= 30) {
168 m_pll
= m_n_int
* n_pll
+ 2;
171 m_pll
= m_n_int
* n_pll
;
176 m_pll
= (m_n_int
+ 1) * n_pll
;
177 } else if (m_n
* 6 >= 30) {
179 m_pll
= (m_n_int
+ 1) * n_pll
;
180 } else if (m_n
* 6 >= 10) {
182 m_pll
= m_n_int
* n_pll
+ 1;
185 m_pll
= m_n_int
* n_pll
;
191 phy
->pll_pre_div1p
= 1;
193 phy
->pll_fbd_p
= n_pll
;
194 phy
->pll_pre_div1p
= 0;
197 if (phy
->pll_fbd_2p
<= 7 && phy
->pll_fbd_2p
>= 4)
198 r_pll
= 0x10 >> (7 - phy
->pll_fbd_2p
);
203 phy
->pll_fbd_div1f
= 0;
204 phy
->pll_fbd_div5f
= 1;
205 } else if (m_pll
>= 2 * 2 * r_pll
&& m_pll
<= 2 * 4 * r_pll
) {
206 phy
->pll_pre_p
= m_pll
/ (2 * r_pll
);
208 phy
->pll_fbd_div1f
= 1;
209 phy
->pll_fbd_div5f
= 0;
210 } else if (m_pll
>= 2 * 5 * r_pll
&& m_pll
<= 2 * 150 * r_pll
) {
211 if (((m_pll
/ (2 * r_pll
)) % 2) == 0) {
213 (m_pll
/ (2 * r_pll
)) / 2 - 1;
215 (m_pll
/ (2 * r_pll
)) % 2 + 2;
218 (m_pll
/ (2 * r_pll
)) / 2;
220 (m_pll
/ (2 * r_pll
)) % 2;
222 phy
->pll_fbd_div1f
= 0;
223 phy
->pll_fbd_div5f
= 0;
227 phy
->pll_fbd_div1f
= 0;
228 phy
->pll_fbd_div5f
= 1;
231 f_kHz
= (u64
)1000000000 * (u64
)m_pll
/
232 ((u64
)ref_clk_ps
* (u64
)n_pll
* (u64
)q_pll
);
234 if (f_kHz
>= req_kHz
)
244 static void dsi_get_phy_params(u32 phy_req_kHz
,
245 struct mipi_phy_params
*phy
)
247 u32 ref_clk_ps
= PHY_REF_CLK_PERIOD_PS
;
251 memset(phy
, 0, sizeof(*phy
));
253 phy_rate_kHz
= dsi_calc_phy_rate(phy_req_kHz
, phy
);
257 ui
= 1000000 / phy_rate_kHz
;
259 phy
->clk_t_lpx
= ROUND(50, 8 * ui
);
260 phy
->clk_t_hs_prepare
= ROUND(133, 16 * ui
) - 1;
262 phy
->clk_t_hs_zero
= ROUND(262, 8 * ui
);
263 phy
->clk_t_hs_trial
= 2 * (ROUND(60, 8 * ui
) - 1);
264 phy
->clk_t_wakeup
= ROUND(1000000, (ref_clk_ps
/ 1000) - 1);
265 if (phy
->clk_t_wakeup
> 0xff)
266 phy
->clk_t_wakeup
= 0xff;
267 phy
->data_t_wakeup
= phy
->clk_t_wakeup
;
268 phy
->data_t_lpx
= phy
->clk_t_lpx
;
269 phy
->data_t_hs_prepare
= ROUND(125 + 10 * ui
, 16 * ui
) - 1;
270 phy
->data_t_hs_zero
= ROUND(105 + 6 * ui
, 8 * ui
);
271 phy
->data_t_hs_trial
= 2 * (ROUND(60 + 4 * ui
, 8 * ui
) - 1);
272 phy
->data_t_ta_go
= 3;
273 phy
->data_t_ta_get
= 4;
276 phy
->clklp2hs_time
= ROUND(407, 8 * ui
) + 12;
277 phy
->clkhs2lp_time
= ROUND(105 + 12 * ui
, 8 * ui
);
278 phy
->lp2hs_time
= ROUND(240 + 12 * ui
, 8 * ui
) + 1;
279 phy
->hs2lp_time
= phy
->clkhs2lp_time
;
280 phy
->clk_to_data_delay
= 1 + phy
->clklp2hs_time
;
281 phy
->data_to_clk_delay
= ROUND(60 + 52 * ui
, 8 * ui
) +
284 phy
->lane_byte_clk_kHz
= phy_rate_kHz
/ 8;
286 DIV_ROUND_UP(phy
->lane_byte_clk_kHz
, MAX_TX_ESC_CLK
);
289 static u32
dsi_get_dpi_color_coding(enum mipi_dsi_pixel_format format
)
294 * TODO: only support RGB888 now, to support more
297 case MIPI_DSI_FMT_RGB888
:
309 * dsi phy reg write function
311 static void dsi_phy_tst_set(void __iomem
*base
, u32 reg
, u32 val
)
313 u32 reg_write
= 0x10000 + reg
;
318 writel(reg_write
, base
+ PHY_TST_CTRL1
);
319 writel(0x02, base
+ PHY_TST_CTRL0
);
320 writel(0x00, base
+ PHY_TST_CTRL0
);
325 writel(val
, base
+ PHY_TST_CTRL1
);
326 writel(0x02, base
+ PHY_TST_CTRL0
);
327 writel(0x00, base
+ PHY_TST_CTRL0
);
330 static void dsi_set_phy_timer(void __iomem
*base
,
331 struct mipi_phy_params
*phy
,
337 * Set lane value and phy stop wait time.
339 val
= (lanes
- 1) | (PHY_STOP_WAIT_TIME
<< 8);
340 writel(val
, base
+ PHY_IF_CFG
);
343 * Set phy clk division.
345 val
= readl(base
+ CLKMGR_CFG
) | phy
->clk_division
;
346 writel(val
, base
+ CLKMGR_CFG
);
349 * Set lp and hs switching params.
351 dw_update_bits(base
+ PHY_TMR_CFG
, 24, MASK(8), phy
->hs2lp_time
);
352 dw_update_bits(base
+ PHY_TMR_CFG
, 16, MASK(8), phy
->lp2hs_time
);
353 dw_update_bits(base
+ PHY_TMR_LPCLK_CFG
, 16, MASK(10),
355 dw_update_bits(base
+ PHY_TMR_LPCLK_CFG
, 0, MASK(10),
357 dw_update_bits(base
+ CLK_DATA_TMR_CFG
, 8, MASK(8),
358 phy
->data_to_clk_delay
);
359 dw_update_bits(base
+ CLK_DATA_TMR_CFG
, 0, MASK(8),
360 phy
->clk_to_data_delay
);
363 static void dsi_set_mipi_phy(void __iomem
*base
,
364 struct mipi_phy_params
*phy
,
371 /* phy timer setting */
372 dsi_set_phy_timer(base
, phy
, lanes
);
375 * Reset to clean up phy tst params.
377 writel(0, base
+ PHY_RSTZ
);
378 writel(0, base
+ PHY_TST_CTRL0
);
379 writel(1, base
+ PHY_TST_CTRL0
);
380 writel(0, base
+ PHY_TST_CTRL0
);
383 * Clock lane timing control setting: TLPX, THS-PREPARE,
384 * THS-ZERO, THS-TRAIL, TWAKEUP.
386 dsi_phy_tst_set(base
, CLK_TLPX
, phy
->clk_t_lpx
);
387 dsi_phy_tst_set(base
, CLK_THS_PREPARE
, phy
->clk_t_hs_prepare
);
388 dsi_phy_tst_set(base
, CLK_THS_ZERO
, phy
->clk_t_hs_zero
);
389 dsi_phy_tst_set(base
, CLK_THS_TRAIL
, phy
->clk_t_hs_trial
);
390 dsi_phy_tst_set(base
, CLK_TWAKEUP
, phy
->clk_t_wakeup
);
393 * Data lane timing control setting: TLPX, THS-PREPARE,
394 * THS-ZERO, THS-TRAIL, TTA-GO, TTA-GET, TWAKEUP.
396 for (i
= 0; i
< lanes
; i
++) {
397 dsi_phy_tst_set(base
, DATA_TLPX(i
), phy
->data_t_lpx
);
398 dsi_phy_tst_set(base
, DATA_THS_PREPARE(i
),
399 phy
->data_t_hs_prepare
);
400 dsi_phy_tst_set(base
, DATA_THS_ZERO(i
), phy
->data_t_hs_zero
);
401 dsi_phy_tst_set(base
, DATA_THS_TRAIL(i
), phy
->data_t_hs_trial
);
402 dsi_phy_tst_set(base
, DATA_TTA_GO(i
), phy
->data_t_ta_go
);
403 dsi_phy_tst_set(base
, DATA_TTA_GET(i
), phy
->data_t_ta_get
);
404 dsi_phy_tst_set(base
, DATA_TWAKEUP(i
), phy
->data_t_wakeup
);
408 * physical configuration: I, pll I, pll II, pll III,
411 dsi_phy_tst_set(base
, PHY_CFG_I
, phy
->hstx_ckg_sel
);
412 val
= (phy
->pll_fbd_div5f
<< 5) + (phy
->pll_fbd_div1f
<< 4) +
413 (phy
->pll_fbd_2p
<< 1) + phy
->pll_enbwt
;
414 dsi_phy_tst_set(base
, PHY_CFG_PLL_I
, val
);
415 dsi_phy_tst_set(base
, PHY_CFG_PLL_II
, phy
->pll_fbd_p
);
416 dsi_phy_tst_set(base
, PHY_CFG_PLL_III
, phy
->pll_fbd_s
);
417 val
= (phy
->pll_pre_div1p
<< 7) + phy
->pll_pre_p
;
418 dsi_phy_tst_set(base
, PHY_CFG_PLL_IV
, val
);
419 val
= (5 << 5) + (phy
->pll_vco_750M
<< 4) + (phy
->pll_lpf_rs
<< 2) +
421 dsi_phy_tst_set(base
, PHY_CFG_PLL_V
, val
);
423 writel(PHY_ENABLECLK
, base
+ PHY_RSTZ
);
425 writel(PHY_ENABLECLK
| PHY_UNSHUTDOWNZ
, base
+ PHY_RSTZ
);
427 writel(PHY_ENABLECLK
| PHY_UNRSTZ
| PHY_UNSHUTDOWNZ
, base
+ PHY_RSTZ
);
428 usleep_range(1000, 1500);
431 * wait for phy's clock ready
434 while (delay_count
) {
435 val
= readl(base
+ PHY_STATUS
);
436 if ((BIT(0) | BIT(2)) & val
)
444 DRM_INFO("phylock and phystopstateclklane is not ready.\n");
447 static void dsi_set_mode_timing(void __iomem
*base
,
448 u32 lane_byte_clk_kHz
,
449 struct drm_display_mode
*mode
,
450 enum mipi_dsi_pixel_format format
)
452 u32 hfp
, hbp
, hsw
, vfp
, vbp
, vsw
;
461 val
= dsi_get_dpi_color_coding(format
);
462 writel(val
, base
+ DPI_COLOR_CODING
);
464 val
= (mode
->flags
& DRM_MODE_FLAG_NHSYNC
? 1 : 0) << 2;
465 val
|= (mode
->flags
& DRM_MODE_FLAG_NVSYNC
? 1 : 0) << 1;
466 writel(val
, base
+ DPI_CFG_POL
);
469 * The DSI IP accepts vertical timing using lines as normal,
470 * but horizontal timing is a mixture of pixel-clocks for the
471 * active region and byte-lane clocks for the blanking-related
472 * timings. hfp is specified as the total hline_time in byte-
473 * lane clocks minus hsa, hbp and active.
475 pixel_clk_kHz
= mode
->clock
;
478 hfp
= mode
->hsync_start
- mode
->hdisplay
;
479 hbp
= mode
->htotal
- mode
->hsync_end
;
480 hsw
= mode
->hsync_end
- mode
->hsync_start
;
481 vfp
= mode
->vsync_start
- mode
->vdisplay
;
482 vbp
= mode
->vtotal
- mode
->vsync_end
;
483 vsw
= mode
->vsync_end
- mode
->vsync_start
;
485 DRM_DEBUG_DRIVER("vsw exceeded 15\n");
489 hsa_time
= (hsw
* lane_byte_clk_kHz
) / pixel_clk_kHz
;
490 hbp_time
= (hbp
* lane_byte_clk_kHz
) / pixel_clk_kHz
;
491 tmp
= (u64
)htot
* (u64
)lane_byte_clk_kHz
;
492 hline_time
= DIV_ROUND_UP(tmp
, pixel_clk_kHz
);
494 /* all specified in byte-lane clocks */
495 writel(hsa_time
, base
+ VID_HSA_TIME
);
496 writel(hbp_time
, base
+ VID_HBP_TIME
);
497 writel(hline_time
, base
+ VID_HLINE_TIME
);
499 writel(vsw
, base
+ VID_VSA_LINES
);
500 writel(vbp
, base
+ VID_VBP_LINES
);
501 writel(vfp
, base
+ VID_VFP_LINES
);
502 writel(mode
->vdisplay
, base
+ VID_VACTIVE_LINES
);
503 writel(mode
->hdisplay
, base
+ VID_PKT_SIZE
);
505 DRM_DEBUG_DRIVER("htot=%d, hfp=%d, hbp=%d, hsw=%d\n",
506 htot
, hfp
, hbp
, hsw
);
507 DRM_DEBUG_DRIVER("vtol=%d, vfp=%d, vbp=%d, vsw=%d\n",
508 vtot
, vfp
, vbp
, vsw
);
509 DRM_DEBUG_DRIVER("hsa_time=%d, hbp_time=%d, hline_time=%d\n",
510 hsa_time
, hbp_time
, hline_time
);
513 static void dsi_set_video_mode(void __iomem
*base
, unsigned long flags
)
516 u32 mode_mask
= MIPI_DSI_MODE_VIDEO
| MIPI_DSI_MODE_VIDEO_BURST
|
517 MIPI_DSI_MODE_VIDEO_SYNC_PULSE
;
518 u32 non_burst_sync_pulse
= MIPI_DSI_MODE_VIDEO
|
519 MIPI_DSI_MODE_VIDEO_SYNC_PULSE
;
520 u32 non_burst_sync_event
= MIPI_DSI_MODE_VIDEO
;
523 * choose video mode type
525 if ((flags
& mode_mask
) == non_burst_sync_pulse
)
526 val
= DSI_NON_BURST_SYNC_PULSES
;
527 else if ((flags
& mode_mask
) == non_burst_sync_event
)
528 val
= DSI_NON_BURST_SYNC_EVENTS
;
530 val
= DSI_BURST_SYNC_PULSES_1
;
531 writel(val
, base
+ VID_MODE_CFG
);
533 writel(PHY_TXREQUESTCLKHS
, base
+ LPCLK_CTRL
);
534 writel(DSI_VIDEO_MODE
, base
+ MODE_CFG
);
537 static void dsi_mipi_init(struct dw_dsi
*dsi
)
539 struct dsi_hw_ctx
*ctx
= dsi
->ctx
;
540 struct mipi_phy_params
*phy
= &dsi
->phy
;
541 struct drm_display_mode
*mode
= &dsi
->cur_mode
;
542 u32 bpp
= mipi_dsi_pixel_format_to_bpp(dsi
->format
);
543 void __iomem
*base
= ctx
->base
;
549 dphy_req_kHz
= mode
->clock
* bpp
/ dsi
->lanes
;
550 dsi_get_phy_params(dphy_req_kHz
, phy
);
553 writel(RESET
, base
+ PWR_UP
);
555 /* set dsi phy params */
556 dsi_set_mipi_phy(base
, phy
, dsi
->lanes
);
558 /* set dsi mode timing */
559 dsi_set_mode_timing(base
, phy
->lane_byte_clk_kHz
, mode
, dsi
->format
);
561 /* set dsi video mode */
562 dsi_set_video_mode(base
, dsi
->mode_flags
);
565 writel(POWERUP
, base
+ PWR_UP
);
567 DRM_DEBUG_DRIVER("lanes=%d, pixel_clk=%d kHz, bytes_freq=%d kHz\n",
568 dsi
->lanes
, mode
->clock
, phy
->lane_byte_clk_kHz
);
571 static void dsi_encoder_disable(struct drm_encoder
*encoder
)
573 struct dw_dsi
*dsi
= encoder_to_dsi(encoder
);
574 struct dsi_hw_ctx
*ctx
= dsi
->ctx
;
575 void __iomem
*base
= ctx
->base
;
580 writel(0, base
+ PWR_UP
);
581 writel(0, base
+ LPCLK_CTRL
);
582 writel(0, base
+ PHY_RSTZ
);
583 clk_disable_unprepare(ctx
->pclk
);
588 static void dsi_encoder_enable(struct drm_encoder
*encoder
)
590 struct dw_dsi
*dsi
= encoder_to_dsi(encoder
);
591 struct dsi_hw_ctx
*ctx
= dsi
->ctx
;
597 ret
= clk_prepare_enable(ctx
->pclk
);
599 DRM_ERROR("fail to enable pclk: %d\n", ret
);
608 static enum drm_mode_status
dsi_encoder_phy_mode_valid(
609 struct drm_encoder
*encoder
,
610 const struct drm_display_mode
*mode
)
612 struct dw_dsi
*dsi
= encoder_to_dsi(encoder
);
613 struct mipi_phy_params phy
;
614 u32 bpp
= mipi_dsi_pixel_format_to_bpp(dsi
->format
);
615 u32 req_kHz
, act_kHz
, lane_byte_clk_kHz
;
617 /* Calculate the lane byte clk using the adjusted mode clk */
618 memset(&phy
, 0, sizeof(phy
));
619 req_kHz
= mode
->clock
* bpp
/ dsi
->lanes
;
620 act_kHz
= dsi_calc_phy_rate(req_kHz
, &phy
);
621 lane_byte_clk_kHz
= act_kHz
/ 8;
623 DRM_DEBUG_DRIVER("Checking mode %ix%i-%i@%i clock: %i...",
624 mode
->hdisplay
, mode
->vdisplay
, bpp
,
625 drm_mode_vrefresh(mode
), mode
->clock
);
628 * Make sure the adjusted mode clock and the lane byte clk
629 * have a common denominator base frequency
631 if (mode
->clock
/dsi
->lanes
== lane_byte_clk_kHz
/3) {
632 DRM_DEBUG_DRIVER("OK!\n");
636 DRM_DEBUG_DRIVER("BAD!\n");
640 static enum drm_mode_status
dsi_encoder_mode_valid(struct drm_encoder
*encoder
,
641 const struct drm_display_mode
*mode
)
644 const struct drm_crtc_helper_funcs
*crtc_funcs
= NULL
;
645 struct drm_crtc
*crtc
= NULL
;
646 struct drm_display_mode adj_mode
;
647 enum drm_mode_status ret
;
650 * The crtc might adjust the mode, so go through the
651 * possible crtcs (technically just one) and call
652 * mode_fixup to figure out the adjusted mode before we
655 drm_for_each_crtc(crtc
, encoder
->dev
) {
657 * reset adj_mode to the mode value each time,
658 * so we don't adjust the mode twice
660 drm_mode_copy(&adj_mode
, mode
);
662 crtc_funcs
= crtc
->helper_private
;
663 if (crtc_funcs
&& crtc_funcs
->mode_fixup
)
664 if (!crtc_funcs
->mode_fixup(crtc
, mode
, &adj_mode
))
667 ret
= dsi_encoder_phy_mode_valid(encoder
, &adj_mode
);
674 static void dsi_encoder_mode_set(struct drm_encoder
*encoder
,
675 struct drm_display_mode
*mode
,
676 struct drm_display_mode
*adj_mode
)
678 struct dw_dsi
*dsi
= encoder_to_dsi(encoder
);
680 drm_mode_copy(&dsi
->cur_mode
, adj_mode
);
683 static int dsi_encoder_atomic_check(struct drm_encoder
*encoder
,
684 struct drm_crtc_state
*crtc_state
,
685 struct drm_connector_state
*conn_state
)
691 static const struct drm_encoder_helper_funcs dw_encoder_helper_funcs
= {
692 .atomic_check
= dsi_encoder_atomic_check
,
693 .mode_valid
= dsi_encoder_mode_valid
,
694 .mode_set
= dsi_encoder_mode_set
,
695 .enable
= dsi_encoder_enable
,
696 .disable
= dsi_encoder_disable
699 static int dw_drm_encoder_init(struct device
*dev
,
700 struct drm_device
*drm_dev
,
701 struct drm_encoder
*encoder
)
704 u32 crtc_mask
= drm_of_find_possible_crtcs(drm_dev
, dev
->of_node
);
707 DRM_ERROR("failed to find crtc mask\n");
711 encoder
->possible_crtcs
= crtc_mask
;
712 ret
= drm_simple_encoder_init(drm_dev
, encoder
, DRM_MODE_ENCODER_DSI
);
714 DRM_ERROR("failed to init dsi encoder\n");
718 drm_encoder_helper_add(encoder
, &dw_encoder_helper_funcs
);
723 static int dsi_host_attach(struct mipi_dsi_host
*host
,
724 struct mipi_dsi_device
*mdsi
)
726 struct dw_dsi
*dsi
= host_to_dsi(host
);
728 if (mdsi
->lanes
< 1 || mdsi
->lanes
> 4) {
729 DRM_ERROR("dsi device params invalid\n");
733 dsi
->lanes
= mdsi
->lanes
;
734 dsi
->format
= mdsi
->format
;
735 dsi
->mode_flags
= mdsi
->mode_flags
;
740 static int dsi_host_detach(struct mipi_dsi_host
*host
,
741 struct mipi_dsi_device
*mdsi
)
747 static const struct mipi_dsi_host_ops dsi_host_ops
= {
748 .attach
= dsi_host_attach
,
749 .detach
= dsi_host_detach
,
752 static int dsi_host_init(struct device
*dev
, struct dw_dsi
*dsi
)
754 struct mipi_dsi_host
*host
= &dsi
->host
;
758 host
->ops
= &dsi_host_ops
;
759 ret
= mipi_dsi_host_register(host
);
761 DRM_ERROR("failed to register dsi host\n");
768 static int dsi_bridge_init(struct drm_device
*dev
, struct dw_dsi
*dsi
)
770 struct drm_encoder
*encoder
= &dsi
->encoder
;
771 struct drm_bridge
*bridge
= dsi
->bridge
;
774 /* associate the bridge to dsi encoder */
775 ret
= drm_bridge_attach(encoder
, bridge
, NULL
, 0);
777 DRM_ERROR("failed to attach external bridge\n");
784 static int dsi_bind(struct device
*dev
, struct device
*master
, void *data
)
786 struct dsi_data
*ddata
= dev_get_drvdata(dev
);
787 struct dw_dsi
*dsi
= &ddata
->dsi
;
788 struct drm_device
*drm_dev
= data
;
791 ret
= dw_drm_encoder_init(dev
, drm_dev
, &dsi
->encoder
);
795 ret
= dsi_host_init(dev
, dsi
);
799 ret
= dsi_bridge_init(drm_dev
, dsi
);
806 static void dsi_unbind(struct device
*dev
, struct device
*master
, void *data
)
811 static const struct component_ops dsi_ops
= {
813 .unbind
= dsi_unbind
,
816 static int dsi_parse_dt(struct platform_device
*pdev
, struct dw_dsi
*dsi
)
818 struct dsi_hw_ctx
*ctx
= dsi
->ctx
;
819 struct device_node
*np
= pdev
->dev
.of_node
;
820 struct resource
*res
;
824 * Get the endpoint node. In our case, dsi has one output port1
825 * to which the external HDMI bridge is connected.
827 ret
= drm_of_find_panel_or_bridge(np
, 1, 0, NULL
, &dsi
->bridge
);
831 ctx
->pclk
= devm_clk_get(&pdev
->dev
, "pclk");
832 if (IS_ERR(ctx
->pclk
)) {
833 DRM_ERROR("failed to get pclk clock\n");
834 return PTR_ERR(ctx
->pclk
);
837 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
838 ctx
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
839 if (IS_ERR(ctx
->base
)) {
840 DRM_ERROR("failed to remap dsi io region\n");
841 return PTR_ERR(ctx
->base
);
847 static int dsi_probe(struct platform_device
*pdev
)
849 struct dsi_data
*data
;
851 struct dsi_hw_ctx
*ctx
;
854 data
= devm_kzalloc(&pdev
->dev
, sizeof(*data
), GFP_KERNEL
);
856 DRM_ERROR("failed to allocate dsi data.\n");
863 ret
= dsi_parse_dt(pdev
, dsi
);
867 platform_set_drvdata(pdev
, data
);
869 return component_add(&pdev
->dev
, &dsi_ops
);
872 static int dsi_remove(struct platform_device
*pdev
)
874 component_del(&pdev
->dev
, &dsi_ops
);
879 static const struct of_device_id dsi_of_match
[] = {
880 {.compatible
= "hisilicon,hi6220-dsi"},
883 MODULE_DEVICE_TABLE(of
, dsi_of_match
);
885 static struct platform_driver dsi_driver
= {
887 .remove
= dsi_remove
,
890 .of_match_table
= dsi_of_match
,
894 module_platform_driver(dsi_driver
);
896 MODULE_AUTHOR("Xinliang Liu <xinliang.liu@linaro.org>");
897 MODULE_AUTHOR("Xinliang Liu <z.liuxinliang@hisilicon.com>");
898 MODULE_AUTHOR("Xinwei Kong <kong.kongxinwei@hisilicon.com>");
899 MODULE_DESCRIPTION("DesignWare MIPI DSI Host Controller v1.02 driver");
900 MODULE_LICENSE("GPL v2");