1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2020 Unisoc Inc.
6 #include <linux/component.h>
7 #include <linux/module.h>
9 #include <linux/platform_device.h>
10 #include <video/mipi_display.h>
12 #include <drm/drm_atomic_helper.h>
13 #include <drm/drm_bridge.h>
14 #include <drm/drm_of.h>
15 #include <drm/drm_probe_helper.h>
21 #define SOFT_RESET 0x04
22 #define MASK_PROTOCOL_INT 0x0C
23 #define MASK_INTERNAL_INT 0x14
24 #define DSI_MODE_CFG 0x18
26 #define VIRTUAL_CHANNEL_ID 0x1C
27 #define GEN_RX_VCID GENMASK(1, 0)
28 #define VIDEO_PKT_VCID GENMASK(3, 2)
30 #define DPI_VIDEO_FORMAT 0x20
31 #define DPI_VIDEO_MODE_FORMAT GENMASK(5, 0)
32 #define LOOSELY18_EN BIT(6)
34 #define VIDEO_PKT_CONFIG 0x24
35 #define VIDEO_PKT_SIZE GENMASK(15, 0)
36 #define VIDEO_LINE_CHUNK_NUM GENMASK(31, 16)
38 #define VIDEO_LINE_HBLK_TIME 0x28
39 #define VIDEO_LINE_HBP_TIME GENMASK(15, 0)
40 #define VIDEO_LINE_HSA_TIME GENMASK(31, 16)
42 #define VIDEO_LINE_TIME 0x2C
44 #define VIDEO_VBLK_LINES 0x30
45 #define VFP_LINES GENMASK(9, 0)
46 #define VBP_LINES GENMASK(19, 10)
47 #define VSA_LINES GENMASK(29, 20)
49 #define VIDEO_VACTIVE_LINES 0x34
51 #define VID_MODE_CFG 0x38
52 #define VID_MODE_TYPE GENMASK(1, 0)
53 #define LP_VSA_EN BIT(8)
54 #define LP_VBP_EN BIT(9)
55 #define LP_VFP_EN BIT(10)
56 #define LP_VACT_EN BIT(11)
57 #define LP_HBP_EN BIT(12)
58 #define LP_HFP_EN BIT(13)
59 #define FRAME_BTA_ACK_EN BIT(14)
61 #define TIMEOUT_CNT_CLK_CONFIG 0x40
62 #define HTX_TO_CONFIG 0x44
63 #define LRX_H_TO_CONFIG 0x48
65 #define TX_ESC_CLK_CONFIG 0x5C
67 #define CMD_MODE_CFG 0x68
68 #define TEAR_FX_EN BIT(0)
71 #define GEN_DT GENMASK(5, 0)
72 #define GEN_VC GENMASK(7, 6)
74 #define GEN_PLD_DATA 0x70
76 #define PHY_CLK_LANE_LP_CTRL 0x74
77 #define PHY_CLKLANE_TX_REQ_HS BIT(0)
78 #define AUTO_CLKLANE_CTRL_EN BIT(1)
80 #define PHY_INTERFACE_CTRL 0x78
81 #define RF_PHY_SHUTDOWN BIT(0)
82 #define RF_PHY_RESET_N BIT(1)
83 #define RF_PHY_CLK_EN BIT(2)
85 #define CMD_MODE_STATUS 0x98
86 #define GEN_CMD_RDATA_FIFO_EMPTY BIT(1)
87 #define GEN_CMD_WDATA_FIFO_EMPTY BIT(3)
88 #define GEN_CMD_CMD_FIFO_EMPTY BIT(5)
89 #define GEN_CMD_RDCMD_DONE BIT(7)
91 #define PHY_STATUS 0x9C
92 #define PHY_LOCK BIT(1)
94 #define PHY_MIN_STOP_TIME 0xA0
95 #define PHY_LANE_NUM_CONFIG 0xA4
97 #define PHY_CLKLANE_TIME_CONFIG 0xA8
98 #define PHY_CLKLANE_LP_TO_HS_TIME GENMASK(15, 0)
99 #define PHY_CLKLANE_HS_TO_LP_TIME GENMASK(31, 16)
101 #define PHY_DATALANE_TIME_CONFIG 0xAC
102 #define PHY_DATALANE_LP_TO_HS_TIME GENMASK(15, 0)
103 #define PHY_DATALANE_HS_TO_LP_TIME GENMASK(31, 16)
105 #define MAX_READ_TIME 0xB0
107 #define RX_PKT_CHECK_CONFIG 0xB4
108 #define RX_PKT_ECC_EN BIT(0)
109 #define RX_PKT_CRC_EN BIT(1)
114 #define TX_EOTP_EN BIT(0)
115 #define RX_EOTP_EN BIT(1)
117 #define VIDEO_NULLPKT_SIZE 0xC0
118 #define DCS_WM_PKT_SIZE 0xC4
120 #define VIDEO_SIG_DELAY_CONFIG 0xD0
121 #define VIDEO_SIG_DELAY GENMASK(23, 0)
123 #define PHY_TST_CTRL0 0xF0
124 #define PHY_TESTCLR BIT(0)
125 #define PHY_TESTCLK BIT(1)
127 #define PHY_TST_CTRL1 0xF4
128 #define PHY_TESTDIN GENMASK(7, 0)
129 #define PHY_TESTDOUT GENMASK(15, 8)
130 #define PHY_TESTEN BIT(16)
132 #define host_to_dsi(host) \
133 container_of(host, struct sprd_dsi, host)
136 dsi_reg_rd(struct dsi_context
*ctx
, u32 offset
, u32 mask
,
139 return (readl(ctx
->base
+ offset
) & mask
) >> shift
;
143 dsi_reg_wr(struct dsi_context
*ctx
, u32 offset
, u32 mask
,
148 ret
= readl(ctx
->base
+ offset
);
150 ret
|= (val
<< shift
) & mask
;
151 writel(ret
, ctx
->base
+ offset
);
155 dsi_reg_up(struct dsi_context
*ctx
, u32 offset
, u32 mask
,
158 u32 ret
= readl(ctx
->base
+ offset
);
160 writel((ret
& ~mask
) | (val
& mask
), ctx
->base
+ offset
);
163 static int regmap_tst_io_write(void *context
, u32 reg
, u32 val
)
165 struct sprd_dsi
*dsi
= context
;
166 struct dsi_context
*ctx
= &dsi
->ctx
;
168 if (val
> 0xff || reg
> 0xff)
171 drm_dbg(dsi
->drm
, "reg = 0x%02x, val = 0x%02x\n", reg
, val
);
173 dsi_reg_up(ctx
, PHY_TST_CTRL1
, PHY_TESTEN
, PHY_TESTEN
);
174 dsi_reg_wr(ctx
, PHY_TST_CTRL1
, PHY_TESTDIN
, 0, reg
);
175 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, PHY_TESTCLK
);
176 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, 0);
177 dsi_reg_up(ctx
, PHY_TST_CTRL1
, PHY_TESTEN
, 0);
178 dsi_reg_wr(ctx
, PHY_TST_CTRL1
, PHY_TESTDIN
, 0, val
);
179 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, PHY_TESTCLK
);
180 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, 0);
185 static int regmap_tst_io_read(void *context
, u32 reg
, u32
*val
)
187 struct sprd_dsi
*dsi
= context
;
188 struct dsi_context
*ctx
= &dsi
->ctx
;
194 dsi_reg_up(ctx
, PHY_TST_CTRL1
, PHY_TESTEN
, PHY_TESTEN
);
195 dsi_reg_wr(ctx
, PHY_TST_CTRL1
, PHY_TESTDIN
, 0, reg
);
196 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, PHY_TESTCLK
);
197 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLK
, 0);
198 dsi_reg_up(ctx
, PHY_TST_CTRL1
, PHY_TESTEN
, 0);
202 ret
= dsi_reg_rd(ctx
, PHY_TST_CTRL1
, PHY_TESTDOUT
, 8);
208 drm_dbg(dsi
->drm
, "reg = 0x%02x, val = 0x%02x\n", reg
, *val
);
212 static const struct regmap_bus regmap_tst_io
= {
213 .reg_write
= regmap_tst_io_write
,
214 .reg_read
= regmap_tst_io_read
,
217 static const struct regmap_config byte_config
= {
222 static int dphy_wait_pll_locked(struct dsi_context
*ctx
)
224 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
227 for (i
= 0; i
< 50000; i
++) {
228 if (dsi_reg_rd(ctx
, PHY_STATUS
, PHY_LOCK
, 1))
233 drm_err(dsi
->drm
, "dphy pll can not be locked\n");
237 static int dsi_wait_tx_payload_fifo_empty(struct dsi_context
*ctx
)
241 for (i
= 0; i
< 5000; i
++) {
242 if (dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_WDATA_FIFO_EMPTY
, 3))
250 static int dsi_wait_tx_cmd_fifo_empty(struct dsi_context
*ctx
)
254 for (i
= 0; i
< 5000; i
++) {
255 if (dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_CMD_FIFO_EMPTY
, 5))
263 static int dsi_wait_rd_resp_completed(struct dsi_context
*ctx
)
267 for (i
= 0; i
< 10000; i
++) {
268 if (dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_RDCMD_DONE
, 7))
276 static u16
calc_bytes_per_pixel_x100(int coding
)
281 case COLOR_CODE_16BIT_CONFIG1
:
282 case COLOR_CODE_16BIT_CONFIG2
:
283 case COLOR_CODE_16BIT_CONFIG3
:
286 case COLOR_CODE_18BIT_CONFIG1
:
287 case COLOR_CODE_18BIT_CONFIG2
:
290 case COLOR_CODE_24BIT
:
293 case COLOR_CODE_COMPRESSTION
:
296 case COLOR_CODE_20BIT_YCC422_LOOSELY
:
299 case COLOR_CODE_24BIT_YCC422
:
302 case COLOR_CODE_16BIT_YCC422
:
305 case COLOR_CODE_30BIT
:
308 case COLOR_CODE_36BIT
:
311 case COLOR_CODE_12BIT_YCC420
:
315 DRM_ERROR("invalid color coding");
323 static u8
calc_video_size_step(int coding
)
328 case COLOR_CODE_16BIT_CONFIG1
:
329 case COLOR_CODE_16BIT_CONFIG2
:
330 case COLOR_CODE_16BIT_CONFIG3
:
331 case COLOR_CODE_18BIT_CONFIG1
:
332 case COLOR_CODE_18BIT_CONFIG2
:
333 case COLOR_CODE_24BIT
:
334 case COLOR_CODE_COMPRESSTION
:
335 return video_size_step
= 1;
336 case COLOR_CODE_20BIT_YCC422_LOOSELY
:
337 case COLOR_CODE_24BIT_YCC422
:
338 case COLOR_CODE_16BIT_YCC422
:
339 case COLOR_CODE_30BIT
:
340 case COLOR_CODE_36BIT
:
341 case COLOR_CODE_12BIT_YCC420
:
342 return video_size_step
= 2;
344 DRM_ERROR("invalid color coding");
349 static u16
round_video_size(int coding
, u16 video_size
)
352 case COLOR_CODE_16BIT_YCC422
:
353 case COLOR_CODE_24BIT_YCC422
:
354 case COLOR_CODE_20BIT_YCC422_LOOSELY
:
355 case COLOR_CODE_12BIT_YCC420
:
356 /* round up active H pixels to a multiple of 2 */
357 if ((video_size
% 2) != 0)
367 #define SPRD_MIPI_DSI_FMT_DSC 0xff
368 static u32
fmt_to_coding(u32 fmt
)
371 case MIPI_DSI_FMT_RGB565
:
372 return COLOR_CODE_16BIT_CONFIG1
;
373 case MIPI_DSI_FMT_RGB666
:
374 case MIPI_DSI_FMT_RGB666_PACKED
:
375 return COLOR_CODE_18BIT_CONFIG1
;
376 case MIPI_DSI_FMT_RGB888
:
377 return COLOR_CODE_24BIT
;
378 case SPRD_MIPI_DSI_FMT_DSC
:
379 return COLOR_CODE_COMPRESSTION
;
381 DRM_ERROR("Unsupported format (%d)\n", fmt
);
382 return COLOR_CODE_24BIT
;
386 #define ns_to_cycle(ns, byte_clk) \
387 DIV_ROUND_UP((ns) * (byte_clk), 1000000)
389 static void sprd_dsi_init(struct dsi_context
*ctx
)
391 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
392 u32 byte_clk
= dsi
->slave
->hs_rate
/ 8;
393 u16 data_hs2lp
, data_lp2hs
, clk_hs2lp
, clk_lp2hs
;
397 writel(0, ctx
->base
+ SOFT_RESET
);
398 writel(0xffffffff, ctx
->base
+ MASK_PROTOCOL_INT
);
399 writel(0xffffffff, ctx
->base
+ MASK_INTERNAL_INT
);
400 writel(1, ctx
->base
+ DSI_MODE_CFG
);
401 dsi_reg_up(ctx
, EOTP_EN
, RX_EOTP_EN
, 0);
402 dsi_reg_up(ctx
, EOTP_EN
, TX_EOTP_EN
, 0);
403 dsi_reg_up(ctx
, RX_PKT_CHECK_CONFIG
, RX_PKT_ECC_EN
, RX_PKT_ECC_EN
);
404 dsi_reg_up(ctx
, RX_PKT_CHECK_CONFIG
, RX_PKT_CRC_EN
, RX_PKT_CRC_EN
);
405 writel(1, ctx
->base
+ TA_EN
);
406 dsi_reg_up(ctx
, VIRTUAL_CHANNEL_ID
, VIDEO_PKT_VCID
, 0);
407 dsi_reg_up(ctx
, VIRTUAL_CHANNEL_ID
, GEN_RX_VCID
, 0);
409 div
= DIV_ROUND_UP(byte_clk
, dsi
->slave
->lp_rate
);
410 writel(div
, ctx
->base
+ TX_ESC_CLK_CONFIG
);
412 max_rd_time
= ns_to_cycle(ctx
->max_rd_time
, byte_clk
);
413 writel(max_rd_time
, ctx
->base
+ MAX_READ_TIME
);
415 data_hs2lp
= ns_to_cycle(ctx
->data_hs2lp
, byte_clk
);
416 data_lp2hs
= ns_to_cycle(ctx
->data_lp2hs
, byte_clk
);
417 clk_hs2lp
= ns_to_cycle(ctx
->clk_hs2lp
, byte_clk
);
418 clk_lp2hs
= ns_to_cycle(ctx
->clk_lp2hs
, byte_clk
);
419 dsi_reg_wr(ctx
, PHY_DATALANE_TIME_CONFIG
,
420 PHY_DATALANE_HS_TO_LP_TIME
, 16, data_hs2lp
);
421 dsi_reg_wr(ctx
, PHY_DATALANE_TIME_CONFIG
,
422 PHY_DATALANE_LP_TO_HS_TIME
, 0, data_lp2hs
);
423 dsi_reg_wr(ctx
, PHY_CLKLANE_TIME_CONFIG
,
424 PHY_CLKLANE_HS_TO_LP_TIME
, 16, clk_hs2lp
);
425 dsi_reg_wr(ctx
, PHY_CLKLANE_TIME_CONFIG
,
426 PHY_CLKLANE_LP_TO_HS_TIME
, 0, clk_lp2hs
);
428 writel(1, ctx
->base
+ SOFT_RESET
);
432 * Free up resources and shutdown host controller and PHY
434 static void sprd_dsi_fini(struct dsi_context
*ctx
)
436 writel(0xffffffff, ctx
->base
+ MASK_PROTOCOL_INT
);
437 writel(0xffffffff, ctx
->base
+ MASK_INTERNAL_INT
);
438 writel(0, ctx
->base
+ SOFT_RESET
);
442 * If not in burst mode, it will compute the video and null packet sizes
443 * according to necessity.
444 * Configure timers for data lanes and/or clock lane to return to LP when
445 * bandwidth is not filled by data.
447 static int sprd_dsi_dpi_video(struct dsi_context
*ctx
)
449 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
450 struct videomode
*vm
= &ctx
->vm
;
451 u32 byte_clk
= dsi
->slave
->hs_rate
/ 8;
455 u16 null_pkt_size
= 0;
463 const u8 pkt_header
= 6;
469 coding
= fmt_to_coding(dsi
->slave
->format
);
470 video_size
= round_video_size(coding
, vm
->hactive
);
471 bpp_x100
= calc_bytes_per_pixel_x100(coding
);
472 video_size_step
= calc_video_size_step(coding
);
473 ratio_x1000
= byte_clk
* 1000 / (vm
->pixelclock
/ 1000);
474 hline
= vm
->hactive
+ vm
->hsync_len
+ vm
->hfront_porch
+
477 writel(0, ctx
->base
+ SOFT_RESET
);
478 dsi_reg_wr(ctx
, VID_MODE_CFG
, FRAME_BTA_ACK_EN
, 15, ctx
->frame_ack_en
);
479 dsi_reg_wr(ctx
, DPI_VIDEO_FORMAT
, DPI_VIDEO_MODE_FORMAT
, 0, coding
);
480 dsi_reg_wr(ctx
, VID_MODE_CFG
, VID_MODE_TYPE
, 0, ctx
->burst_mode
);
481 byte_cycle
= 95 * hline
* ratio_x1000
/ 100000;
482 dsi_reg_wr(ctx
, VIDEO_SIG_DELAY_CONFIG
, VIDEO_SIG_DELAY
, 0, byte_cycle
);
483 byte_cycle
= hline
* ratio_x1000
/ 1000;
484 writel(byte_cycle
, ctx
->base
+ VIDEO_LINE_TIME
);
485 byte_cycle
= vm
->hsync_len
* ratio_x1000
/ 1000;
486 dsi_reg_wr(ctx
, VIDEO_LINE_HBLK_TIME
, VIDEO_LINE_HSA_TIME
, 16, byte_cycle
);
487 byte_cycle
= vm
->hback_porch
* ratio_x1000
/ 1000;
488 dsi_reg_wr(ctx
, VIDEO_LINE_HBLK_TIME
, VIDEO_LINE_HBP_TIME
, 0, byte_cycle
);
489 writel(vm
->vactive
, ctx
->base
+ VIDEO_VACTIVE_LINES
);
490 dsi_reg_wr(ctx
, VIDEO_VBLK_LINES
, VFP_LINES
, 0, vm
->vfront_porch
);
491 dsi_reg_wr(ctx
, VIDEO_VBLK_LINES
, VBP_LINES
, 10, vm
->vback_porch
);
492 dsi_reg_wr(ctx
, VIDEO_VBLK_LINES
, VSA_LINES
, 20, vm
->vsync_len
);
493 dsi_reg_up(ctx
, VID_MODE_CFG
, LP_HBP_EN
| LP_HFP_EN
| LP_VACT_EN
|
494 LP_VFP_EN
| LP_VBP_EN
| LP_VSA_EN
, LP_HBP_EN
| LP_HFP_EN
|
495 LP_VACT_EN
| LP_VFP_EN
| LP_VBP_EN
| LP_VSA_EN
);
497 hs_to
= (hline
* vm
->vactive
) + (2 * bpp_x100
) / 100;
498 for (div
= 0x80; (div
< hs_to
) && (div
> 2); div
--) {
499 if ((hs_to
% div
) == 0) {
500 writel(div
, ctx
->base
+ TIMEOUT_CNT_CLK_CONFIG
);
501 writel(hs_to
/ div
, ctx
->base
+ LRX_H_TO_CONFIG
);
502 writel(hs_to
/ div
, ctx
->base
+ HTX_TO_CONFIG
);
507 if (ctx
->burst_mode
== VIDEO_BURST_WITH_SYNC_PULSES
) {
508 dsi_reg_wr(ctx
, VIDEO_PKT_CONFIG
, VIDEO_PKT_SIZE
, 0, video_size
);
509 writel(0, ctx
->base
+ VIDEO_NULLPKT_SIZE
);
510 dsi_reg_up(ctx
, VIDEO_PKT_CONFIG
, VIDEO_LINE_CHUNK_NUM
, 0);
512 /* non burst transmission */
515 /* bytes to be sent - first as one chunk */
516 bytes_per_chunk
= vm
->hactive
* bpp_x100
/ 100 + pkt_header
;
518 /* hline total bytes from the DPI interface */
519 total_bytes
= (vm
->hactive
+ vm
->hfront_porch
) *
520 ratio_x1000
/ dsi
->slave
->lanes
/ 1000;
522 /* check if the pixels actually fit on the DSI link */
523 if (total_bytes
< bytes_per_chunk
) {
524 drm_err(dsi
->drm
, "current resolution can not be set\n");
528 chunk_overhead
= total_bytes
- bytes_per_chunk
;
530 /* overhead higher than 1 -> enable multi packets */
531 if (chunk_overhead
> 1) {
533 for (video_size
= video_size_step
;
534 video_size
< vm
->hactive
;
535 video_size
+= video_size_step
) {
536 if (vm
->hactive
* 1000 / video_size
% 1000)
539 chunks
= vm
->hactive
/ video_size
;
540 bytes_per_chunk
= bpp_x100
* video_size
/ 100
542 if (total_bytes
>= (bytes_per_chunk
* chunks
)) {
543 bytes_left
= total_bytes
-
544 bytes_per_chunk
* chunks
;
549 /* prevent overflow (unsigned - unsigned) */
550 if (bytes_left
> (pkt_header
* chunks
)) {
551 null_pkt_size
= (bytes_left
-
552 pkt_header
* chunks
) / chunks
;
553 /* avoid register overflow */
554 if (null_pkt_size
> 1023)
555 null_pkt_size
= 1023;
562 /* must be a multiple of 4 except 18 loosely */
563 for (video_size
= vm
->hactive
;
564 (video_size
% video_size_step
) != 0;
569 dsi_reg_wr(ctx
, VIDEO_PKT_CONFIG
, VIDEO_PKT_SIZE
, 0, video_size
);
570 writel(null_pkt_size
, ctx
->base
+ VIDEO_NULLPKT_SIZE
);
571 dsi_reg_wr(ctx
, VIDEO_PKT_CONFIG
, VIDEO_LINE_CHUNK_NUM
, 16, chunks
);
574 writel(ctx
->int0_mask
, ctx
->base
+ MASK_PROTOCOL_INT
);
575 writel(ctx
->int1_mask
, ctx
->base
+ MASK_INTERNAL_INT
);
576 writel(1, ctx
->base
+ SOFT_RESET
);
581 static void sprd_dsi_edpi_video(struct dsi_context
*ctx
)
583 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
584 const u32 fifo_depth
= 1096;
585 const u32 word_length
= 4;
586 u32 hactive
= ctx
->vm
.hactive
;
591 coding
= fmt_to_coding(dsi
->slave
->format
);
592 bpp_x100
= calc_bytes_per_pixel_x100(coding
);
593 max_fifo_len
= word_length
* fifo_depth
* 100 / bpp_x100
;
595 writel(0, ctx
->base
+ SOFT_RESET
);
596 dsi_reg_wr(ctx
, DPI_VIDEO_FORMAT
, DPI_VIDEO_MODE_FORMAT
, 0, coding
);
597 dsi_reg_wr(ctx
, CMD_MODE_CFG
, TEAR_FX_EN
, 0, ctx
->te_ack_en
);
599 if (max_fifo_len
> hactive
)
600 writel(hactive
, ctx
->base
+ DCS_WM_PKT_SIZE
);
602 writel(max_fifo_len
, ctx
->base
+ DCS_WM_PKT_SIZE
);
604 writel(ctx
->int0_mask
, ctx
->base
+ MASK_PROTOCOL_INT
);
605 writel(ctx
->int1_mask
, ctx
->base
+ MASK_INTERNAL_INT
);
606 writel(1, ctx
->base
+ SOFT_RESET
);
610 * Send a packet on the generic interface,
611 * this function has an active delay to wait for the buffer to clear.
612 * The delay is limited to:
613 * (param_length / 4) x DSIH_FIFO_ACTIVE_WAIT x register access time
614 * the controller restricts the sending of.
616 * This function will not be able to send Null and Blanking packets due to
617 * controller restriction
619 static int sprd_dsi_wr_pkt(struct dsi_context
*ctx
, u8 vc
, u8 type
,
620 const u8
*param
, u16 len
)
622 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
623 u8 wc_lsbyte
, wc_msbyte
;
630 /* 1st: for long packet, must config payload first */
631 ret
= dsi_wait_tx_payload_fifo_empty(ctx
);
633 drm_err(dsi
->drm
, "tx payload fifo is not empty\n");
638 for (i
= 0, j
= 0; i
< len
; i
+= j
) {
640 for (j
= 0; (j
< 4) && ((j
+ i
) < (len
)); j
++)
641 payload
|= param
[i
+ j
] << (j
* 8);
643 writel(payload
, ctx
->base
+ GEN_PLD_DATA
);
645 wc_lsbyte
= len
& 0xff;
646 wc_msbyte
= len
>> 8;
648 wc_lsbyte
= (len
> 0) ? param
[0] : 0;
649 wc_msbyte
= (len
> 1) ? param
[1] : 0;
652 /* 2nd: then set packet header */
653 ret
= dsi_wait_tx_cmd_fifo_empty(ctx
);
655 drm_err(dsi
->drm
, "tx cmd fifo is not empty\n");
659 writel(type
| (vc
<< 6) | (wc_lsbyte
<< 8) | (wc_msbyte
<< 16),
660 ctx
->base
+ GEN_HDR
);
666 * Send READ packet to peripheral using the generic interface,
667 * this will force command mode and stop video mode (because of BTA).
669 * This function has an active delay to wait for the buffer to clear,
670 * the delay is limited to 2 x DSIH_FIFO_ACTIVE_WAIT
671 * (waiting for command buffer, and waiting for receiving)
672 * @note this function will enable BTA
674 static int sprd_dsi_rd_pkt(struct dsi_context
*ctx
, u8 vc
, u8 type
,
675 u8 msb_byte
, u8 lsb_byte
,
676 u8
*buffer
, u8 bytes_to_read
)
678 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
686 /* 1st: send read command to peripheral */
687 ret
= dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_CMD_FIFO_EMPTY
, 5);
691 writel(type
| (vc
<< 6) | (lsb_byte
<< 8) | (msb_byte
<< 16),
692 ctx
->base
+ GEN_HDR
);
694 /* 2nd: wait peripheral response completed */
695 ret
= dsi_wait_rd_resp_completed(ctx
);
697 drm_err(dsi
->drm
, "wait read response time out\n");
701 /* 3rd: get data from rx payload fifo */
702 ret
= dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_RDATA_FIFO_EMPTY
, 1);
704 drm_err(dsi
->drm
, "rx payload fifo empty\n");
708 for (i
= 0; i
< 100; i
++) {
709 temp
= readl(ctx
->base
+ GEN_PLD_DATA
);
711 if (count
< bytes_to_read
)
712 buffer
[count
++] = temp
& 0xff;
713 if (count
< bytes_to_read
)
714 buffer
[count
++] = (temp
>> 8) & 0xff;
715 if (count
< bytes_to_read
)
716 buffer
[count
++] = (temp
>> 16) & 0xff;
717 if (count
< bytes_to_read
)
718 buffer
[count
++] = (temp
>> 24) & 0xff;
720 ret
= dsi_reg_rd(ctx
, CMD_MODE_STATUS
, GEN_CMD_RDATA_FIFO_EMPTY
, 1);
728 static void sprd_dsi_set_work_mode(struct dsi_context
*ctx
, u8 mode
)
730 if (mode
== DSI_MODE_CMD
)
731 writel(1, ctx
->base
+ DSI_MODE_CFG
);
733 writel(0, ctx
->base
+ DSI_MODE_CFG
);
736 static void sprd_dsi_state_reset(struct dsi_context
*ctx
)
738 writel(0, ctx
->base
+ SOFT_RESET
);
740 writel(1, ctx
->base
+ SOFT_RESET
);
743 static int sprd_dphy_init(struct dsi_context
*ctx
)
745 struct sprd_dsi
*dsi
= container_of(ctx
, struct sprd_dsi
, ctx
);
748 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_RESET_N
, 0);
749 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_SHUTDOWN
, 0);
750 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_CLK_EN
, 0);
752 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLR
, 0);
753 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLR
, PHY_TESTCLR
);
754 dsi_reg_up(ctx
, PHY_TST_CTRL0
, PHY_TESTCLR
, 0);
756 dphy_pll_config(ctx
);
757 dphy_timing_config(ctx
);
759 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_SHUTDOWN
, RF_PHY_SHUTDOWN
);
760 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_RESET_N
, RF_PHY_RESET_N
);
761 writel(0x1C, ctx
->base
+ PHY_MIN_STOP_TIME
);
762 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_CLK_EN
, RF_PHY_CLK_EN
);
763 writel(dsi
->slave
->lanes
- 1, ctx
->base
+ PHY_LANE_NUM_CONFIG
);
765 ret
= dphy_wait_pll_locked(ctx
);
767 drm_err(dsi
->drm
, "dphy initial failed\n");
774 static void sprd_dphy_fini(struct dsi_context
*ctx
)
776 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_RESET_N
, 0);
777 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_SHUTDOWN
, 0);
778 dsi_reg_up(ctx
, PHY_INTERFACE_CTRL
, RF_PHY_RESET_N
, RF_PHY_RESET_N
);
781 static void sprd_dsi_encoder_mode_set(struct drm_encoder
*encoder
,
782 struct drm_display_mode
*mode
,
783 struct drm_display_mode
*adj_mode
)
785 struct sprd_dsi
*dsi
= encoder_to_dsi(encoder
);
787 drm_display_mode_to_videomode(adj_mode
, &dsi
->ctx
.vm
);
790 static void sprd_dsi_encoder_enable(struct drm_encoder
*encoder
)
792 struct sprd_dsi
*dsi
= encoder_to_dsi(encoder
);
793 struct sprd_dpu
*dpu
= to_sprd_crtc(encoder
->crtc
);
794 struct dsi_context
*ctx
= &dsi
->ctx
;
797 drm_warn(dsi
->drm
, "dsi is initialized\n");
802 if (ctx
->work_mode
== DSI_MODE_VIDEO
)
803 sprd_dsi_dpi_video(ctx
);
805 sprd_dsi_edpi_video(ctx
);
809 sprd_dsi_set_work_mode(ctx
, ctx
->work_mode
);
810 sprd_dsi_state_reset(ctx
);
812 if (dsi
->slave
->mode_flags
& MIPI_DSI_CLOCK_NON_CONTINUOUS
) {
813 dsi_reg_up(ctx
, PHY_CLK_LANE_LP_CTRL
, AUTO_CLKLANE_CTRL_EN
,
814 AUTO_CLKLANE_CTRL_EN
);
816 dsi_reg_up(ctx
, PHY_CLK_LANE_LP_CTRL
, RF_PHY_CLK_EN
, RF_PHY_CLK_EN
);
817 dsi_reg_up(ctx
, PHY_CLK_LANE_LP_CTRL
, PHY_CLKLANE_TX_REQ_HS
,
818 PHY_CLKLANE_TX_REQ_HS
);
819 dphy_wait_pll_locked(ctx
);
827 static void sprd_dsi_encoder_disable(struct drm_encoder
*encoder
)
829 struct sprd_dsi
*dsi
= encoder_to_dsi(encoder
);
830 struct sprd_dpu
*dpu
= to_sprd_crtc(encoder
->crtc
);
831 struct dsi_context
*ctx
= &dsi
->ctx
;
834 drm_warn(dsi
->drm
, "dsi isn't initialized\n");
842 ctx
->enabled
= false;
845 static const struct drm_encoder_helper_funcs sprd_encoder_helper_funcs
= {
846 .mode_set
= sprd_dsi_encoder_mode_set
,
847 .enable
= sprd_dsi_encoder_enable
,
848 .disable
= sprd_dsi_encoder_disable
851 static const struct drm_encoder_funcs sprd_encoder_funcs
= {
852 .destroy
= drm_encoder_cleanup
,
855 static int sprd_dsi_encoder_init(struct sprd_dsi
*dsi
,
858 struct drm_encoder
*encoder
= &dsi
->encoder
;
862 crtc_mask
= drm_of_find_possible_crtcs(dsi
->drm
, dev
->of_node
);
864 drm_err(dsi
->drm
, "failed to find crtc mask\n");
868 drm_dbg(dsi
->drm
, "find possible crtcs: 0x%08x\n", crtc_mask
);
870 encoder
->possible_crtcs
= crtc_mask
;
871 ret
= drm_encoder_init(dsi
->drm
, encoder
, &sprd_encoder_funcs
,
872 DRM_MODE_ENCODER_DSI
, NULL
);
874 drm_err(dsi
->drm
, "failed to init dsi encoder\n");
878 drm_encoder_helper_add(encoder
, &sprd_encoder_helper_funcs
);
883 static int sprd_dsi_bridge_init(struct sprd_dsi
*dsi
,
888 dsi
->panel_bridge
= devm_drm_of_get_bridge(dev
, dev
->of_node
, 1, 0);
889 if (IS_ERR(dsi
->panel_bridge
))
890 return PTR_ERR(dsi
->panel_bridge
);
892 ret
= drm_bridge_attach(&dsi
->encoder
, dsi
->panel_bridge
, NULL
, 0);
899 static int sprd_dsi_context_init(struct sprd_dsi
*dsi
,
902 struct platform_device
*pdev
= to_platform_device(dev
);
903 struct dsi_context
*ctx
= &dsi
->ctx
;
904 struct resource
*res
;
906 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
908 dev_err(dev
, "failed to get I/O resource\n");
912 ctx
->base
= devm_ioremap(dev
, res
->start
, resource_size(res
));
914 drm_err(dsi
->drm
, "failed to map dsi host registers\n");
918 ctx
->regmap
= devm_regmap_init(dev
, ®map_tst_io
, dsi
, &byte_config
);
919 if (IS_ERR(ctx
->regmap
)) {
920 drm_err(dsi
->drm
, "dphy regmap init failed\n");
921 return PTR_ERR(ctx
->regmap
);
924 ctx
->data_hs2lp
= 120;
925 ctx
->data_lp2hs
= 500;
928 ctx
->max_rd_time
= 6000;
929 ctx
->int0_mask
= 0xffffffff;
930 ctx
->int1_mask
= 0xffffffff;
936 static int sprd_dsi_bind(struct device
*dev
, struct device
*master
, void *data
)
938 struct drm_device
*drm
= data
;
939 struct sprd_dsi
*dsi
= dev_get_drvdata(dev
);
944 ret
= sprd_dsi_encoder_init(dsi
, dev
);
948 ret
= sprd_dsi_bridge_init(dsi
, dev
);
952 ret
= sprd_dsi_context_init(dsi
, dev
);
959 static void sprd_dsi_unbind(struct device
*dev
,
960 struct device
*master
, void *data
)
962 struct sprd_dsi
*dsi
= dev_get_drvdata(dev
);
964 drm_of_panel_bridge_remove(dev
->of_node
, 1, 0);
966 drm_encoder_cleanup(&dsi
->encoder
);
969 static const struct component_ops dsi_component_ops
= {
970 .bind
= sprd_dsi_bind
,
971 .unbind
= sprd_dsi_unbind
,
974 static int sprd_dsi_host_attach(struct mipi_dsi_host
*host
,
975 struct mipi_dsi_device
*slave
)
977 struct sprd_dsi
*dsi
= host_to_dsi(host
);
978 struct dsi_context
*ctx
= &dsi
->ctx
;
982 if (slave
->mode_flags
& MIPI_DSI_MODE_VIDEO
)
983 ctx
->work_mode
= DSI_MODE_VIDEO
;
985 ctx
->work_mode
= DSI_MODE_CMD
;
987 if (slave
->mode_flags
& MIPI_DSI_MODE_VIDEO_BURST
)
988 ctx
->burst_mode
= VIDEO_BURST_WITH_SYNC_PULSES
;
989 else if (slave
->mode_flags
& MIPI_DSI_MODE_VIDEO_SYNC_PULSE
)
990 ctx
->burst_mode
= VIDEO_NON_BURST_WITH_SYNC_PULSES
;
992 ctx
->burst_mode
= VIDEO_NON_BURST_WITH_SYNC_EVENTS
;
994 return component_add(host
->dev
, &dsi_component_ops
);
997 static int sprd_dsi_host_detach(struct mipi_dsi_host
*host
,
998 struct mipi_dsi_device
*slave
)
1000 component_del(host
->dev
, &dsi_component_ops
);
1005 static ssize_t
sprd_dsi_host_transfer(struct mipi_dsi_host
*host
,
1006 const struct mipi_dsi_msg
*msg
)
1008 struct sprd_dsi
*dsi
= host_to_dsi(host
);
1009 const u8
*tx_buf
= msg
->tx_buf
;
1011 if (msg
->rx_buf
&& msg
->rx_len
) {
1012 u8 lsb
= (msg
->tx_len
> 0) ? tx_buf
[0] : 0;
1013 u8 msb
= (msg
->tx_len
> 1) ? tx_buf
[1] : 0;
1015 return sprd_dsi_rd_pkt(&dsi
->ctx
, msg
->channel
, msg
->type
,
1016 msb
, lsb
, msg
->rx_buf
, msg
->rx_len
);
1019 if (msg
->tx_buf
&& msg
->tx_len
)
1020 return sprd_dsi_wr_pkt(&dsi
->ctx
, msg
->channel
, msg
->type
,
1021 tx_buf
, msg
->tx_len
);
1026 static const struct mipi_dsi_host_ops sprd_dsi_host_ops
= {
1027 .attach
= sprd_dsi_host_attach
,
1028 .detach
= sprd_dsi_host_detach
,
1029 .transfer
= sprd_dsi_host_transfer
,
1032 static const struct of_device_id dsi_match_table
[] = {
1033 { .compatible
= "sprd,sharkl3-dsi-host" },
1037 static int sprd_dsi_probe(struct platform_device
*pdev
)
1039 struct device
*dev
= &pdev
->dev
;
1040 struct sprd_dsi
*dsi
;
1042 dsi
= devm_kzalloc(dev
, sizeof(*dsi
), GFP_KERNEL
);
1046 dev_set_drvdata(dev
, dsi
);
1048 dsi
->host
.ops
= &sprd_dsi_host_ops
;
1049 dsi
->host
.dev
= dev
;
1051 return mipi_dsi_host_register(&dsi
->host
);
1054 static void sprd_dsi_remove(struct platform_device
*pdev
)
1056 struct sprd_dsi
*dsi
= dev_get_drvdata(&pdev
->dev
);
1058 mipi_dsi_host_unregister(&dsi
->host
);
1061 struct platform_driver sprd_dsi_driver
= {
1062 .probe
= sprd_dsi_probe
,
1063 .remove
= sprd_dsi_remove
,
1065 .name
= "sprd-dsi-drv",
1066 .of_match_table
= dsi_match_table
,
1070 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
1071 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
1072 MODULE_DESCRIPTION("Unisoc MIPI DSI HOST Controller Driver");
1073 MODULE_LICENSE("GPL v2");