2 * Copyright (c) 2015 MediaTek Inc.
3 * Author: Leilk Liu <leilk.liu@mediatek.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
15 #include <linux/clk.h>
16 #include <linux/device.h>
17 #include <linux/err.h>
18 #include <linux/interrupt.h>
20 #include <linux/ioport.h>
21 #include <linux/module.h>
23 #include <linux/of_gpio.h>
24 #include <linux/platform_device.h>
25 #include <linux/platform_data/spi-mt65xx.h>
26 #include <linux/pm_runtime.h>
27 #include <linux/spi/spi.h>
29 #define SPI_CFG0_REG 0x0000
30 #define SPI_CFG1_REG 0x0004
31 #define SPI_TX_SRC_REG 0x0008
32 #define SPI_RX_DST_REG 0x000c
33 #define SPI_TX_DATA_REG 0x0010
34 #define SPI_RX_DATA_REG 0x0014
35 #define SPI_CMD_REG 0x0018
36 #define SPI_STATUS0_REG 0x001c
37 #define SPI_PAD_SEL_REG 0x0024
38 #define SPI_CFG2_REG 0x0028
40 #define SPI_CFG0_SCK_HIGH_OFFSET 0
41 #define SPI_CFG0_SCK_LOW_OFFSET 8
42 #define SPI_CFG0_CS_HOLD_OFFSET 16
43 #define SPI_CFG0_CS_SETUP_OFFSET 24
44 #define SPI_ADJUST_CFG0_SCK_LOW_OFFSET 16
45 #define SPI_ADJUST_CFG0_CS_HOLD_OFFSET 0
46 #define SPI_ADJUST_CFG0_CS_SETUP_OFFSET 16
48 #define SPI_CFG1_CS_IDLE_OFFSET 0
49 #define SPI_CFG1_PACKET_LOOP_OFFSET 8
50 #define SPI_CFG1_PACKET_LENGTH_OFFSET 16
51 #define SPI_CFG1_GET_TICK_DLY_OFFSET 30
53 #define SPI_CFG1_CS_IDLE_MASK 0xff
54 #define SPI_CFG1_PACKET_LOOP_MASK 0xff00
55 #define SPI_CFG1_PACKET_LENGTH_MASK 0x3ff0000
57 #define SPI_CMD_ACT BIT(0)
58 #define SPI_CMD_RESUME BIT(1)
59 #define SPI_CMD_RST BIT(2)
60 #define SPI_CMD_PAUSE_EN BIT(4)
61 #define SPI_CMD_DEASSERT BIT(5)
62 #define SPI_CMD_SAMPLE_SEL BIT(6)
63 #define SPI_CMD_CS_POL BIT(7)
64 #define SPI_CMD_CPHA BIT(8)
65 #define SPI_CMD_CPOL BIT(9)
66 #define SPI_CMD_RX_DMA BIT(10)
67 #define SPI_CMD_TX_DMA BIT(11)
68 #define SPI_CMD_TXMSBF BIT(12)
69 #define SPI_CMD_RXMSBF BIT(13)
70 #define SPI_CMD_RX_ENDIAN BIT(14)
71 #define SPI_CMD_TX_ENDIAN BIT(15)
72 #define SPI_CMD_FINISH_IE BIT(16)
73 #define SPI_CMD_PAUSE_IE BIT(17)
75 #define MT8173_SPI_MAX_PAD_SEL 3
77 #define MTK_SPI_PAUSE_INT_STATUS 0x2
79 #define MTK_SPI_IDLE 0
80 #define MTK_SPI_PAUSED 1
82 #define MTK_SPI_MAX_FIFO_SIZE 32U
83 #define MTK_SPI_PACKET_SIZE 1024
85 struct mtk_spi_compatible
{
87 /* Must explicitly send dummy Tx bytes to do Rx only transfer */
89 /* some IC design adjust cfg register to enhance time accuracy */
98 struct clk
*parent_clk
, *sel_clk
, *spi_clk
;
99 struct spi_transfer
*cur_transfer
;
101 struct scatterlist
*tx_sgl
, *rx_sgl
;
102 u32 tx_sgl_len
, rx_sgl_len
;
103 const struct mtk_spi_compatible
*dev_comp
;
106 static const struct mtk_spi_compatible mtk_common_compat
;
108 static const struct mtk_spi_compatible mt2712_compat
= {
112 static const struct mtk_spi_compatible mt7622_compat
= {
114 .enhance_timing
= true,
117 static const struct mtk_spi_compatible mt8173_compat
= {
118 .need_pad_sel
= true,
123 * A piece of default chip info unless the platform
126 static const struct mtk_chip_config mtk_default_chip_info
= {
133 static const struct of_device_id mtk_spi_of_match
[] = {
134 { .compatible
= "mediatek,mt2701-spi",
135 .data
= (void *)&mtk_common_compat
,
137 { .compatible
= "mediatek,mt2712-spi",
138 .data
= (void *)&mt2712_compat
,
140 { .compatible
= "mediatek,mt6589-spi",
141 .data
= (void *)&mtk_common_compat
,
143 { .compatible
= "mediatek,mt7622-spi",
144 .data
= (void *)&mt7622_compat
,
146 { .compatible
= "mediatek,mt8135-spi",
147 .data
= (void *)&mtk_common_compat
,
149 { .compatible
= "mediatek,mt8173-spi",
150 .data
= (void *)&mt8173_compat
,
154 MODULE_DEVICE_TABLE(of
, mtk_spi_of_match
);
156 static void mtk_spi_reset(struct mtk_spi
*mdata
)
160 /* set the software reset bit in SPI_CMD_REG. */
161 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
162 reg_val
|= SPI_CMD_RST
;
163 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
165 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
166 reg_val
&= ~SPI_CMD_RST
;
167 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
170 static int mtk_spi_prepare_message(struct spi_master
*master
,
171 struct spi_message
*msg
)
175 struct spi_device
*spi
= msg
->spi
;
176 struct mtk_chip_config
*chip_config
= spi
->controller_data
;
177 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
179 cpha
= spi
->mode
& SPI_CPHA
? 1 : 0;
180 cpol
= spi
->mode
& SPI_CPOL
? 1 : 0;
182 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
184 reg_val
|= SPI_CMD_CPHA
;
186 reg_val
&= ~SPI_CMD_CPHA
;
188 reg_val
|= SPI_CMD_CPOL
;
190 reg_val
&= ~SPI_CMD_CPOL
;
192 /* set the mlsbx and mlsbtx */
193 if (chip_config
->tx_mlsb
)
194 reg_val
|= SPI_CMD_TXMSBF
;
196 reg_val
&= ~SPI_CMD_TXMSBF
;
197 if (chip_config
->rx_mlsb
)
198 reg_val
|= SPI_CMD_RXMSBF
;
200 reg_val
&= ~SPI_CMD_RXMSBF
;
202 /* set the tx/rx endian */
203 #ifdef __LITTLE_ENDIAN
204 reg_val
&= ~SPI_CMD_TX_ENDIAN
;
205 reg_val
&= ~SPI_CMD_RX_ENDIAN
;
207 reg_val
|= SPI_CMD_TX_ENDIAN
;
208 reg_val
|= SPI_CMD_RX_ENDIAN
;
211 if (mdata
->dev_comp
->enhance_timing
) {
212 if (chip_config
->cs_pol
)
213 reg_val
|= SPI_CMD_CS_POL
;
215 reg_val
&= ~SPI_CMD_CS_POL
;
216 if (chip_config
->sample_sel
)
217 reg_val
|= SPI_CMD_SAMPLE_SEL
;
219 reg_val
&= ~SPI_CMD_SAMPLE_SEL
;
222 /* set finish and pause interrupt always enable */
223 reg_val
|= SPI_CMD_FINISH_IE
| SPI_CMD_PAUSE_IE
;
225 /* disable dma mode */
226 reg_val
&= ~(SPI_CMD_TX_DMA
| SPI_CMD_RX_DMA
);
228 /* disable deassert mode */
229 reg_val
&= ~SPI_CMD_DEASSERT
;
231 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
234 if (mdata
->dev_comp
->need_pad_sel
)
235 writel(mdata
->pad_sel
[spi
->chip_select
],
236 mdata
->base
+ SPI_PAD_SEL_REG
);
241 static void mtk_spi_set_cs(struct spi_device
*spi
, bool enable
)
244 struct mtk_spi
*mdata
= spi_master_get_devdata(spi
->master
);
246 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
248 reg_val
|= SPI_CMD_PAUSE_EN
;
249 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
251 reg_val
&= ~SPI_CMD_PAUSE_EN
;
252 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
253 mdata
->state
= MTK_SPI_IDLE
;
254 mtk_spi_reset(mdata
);
258 static void mtk_spi_prepare_transfer(struct spi_master
*master
,
259 struct spi_transfer
*xfer
)
261 u32 spi_clk_hz
, div
, sck_time
, cs_time
, reg_val
= 0;
262 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
264 spi_clk_hz
= clk_get_rate(mdata
->spi_clk
);
265 if (xfer
->speed_hz
< spi_clk_hz
/ 2)
266 div
= DIV_ROUND_UP(spi_clk_hz
, xfer
->speed_hz
);
270 sck_time
= (div
+ 1) / 2;
271 cs_time
= sck_time
* 2;
273 if (mdata
->dev_comp
->enhance_timing
) {
274 reg_val
|= (((sck_time
- 1) & 0xffff)
275 << SPI_CFG0_SCK_HIGH_OFFSET
);
276 reg_val
|= (((sck_time
- 1) & 0xffff)
277 << SPI_ADJUST_CFG0_SCK_LOW_OFFSET
);
278 writel(reg_val
, mdata
->base
+ SPI_CFG2_REG
);
279 reg_val
|= (((cs_time
- 1) & 0xffff)
280 << SPI_ADJUST_CFG0_CS_HOLD_OFFSET
);
281 reg_val
|= (((cs_time
- 1) & 0xffff)
282 << SPI_ADJUST_CFG0_CS_SETUP_OFFSET
);
283 writel(reg_val
, mdata
->base
+ SPI_CFG0_REG
);
285 reg_val
|= (((sck_time
- 1) & 0xff)
286 << SPI_CFG0_SCK_HIGH_OFFSET
);
287 reg_val
|= (((sck_time
- 1) & 0xff) << SPI_CFG0_SCK_LOW_OFFSET
);
288 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_HOLD_OFFSET
);
289 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_SETUP_OFFSET
);
290 writel(reg_val
, mdata
->base
+ SPI_CFG0_REG
);
293 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
294 reg_val
&= ~SPI_CFG1_CS_IDLE_MASK
;
295 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG1_CS_IDLE_OFFSET
);
296 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
299 static void mtk_spi_setup_packet(struct spi_master
*master
)
301 u32 packet_size
, packet_loop
, reg_val
;
302 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
304 packet_size
= min_t(u32
, mdata
->xfer_len
, MTK_SPI_PACKET_SIZE
);
305 packet_loop
= mdata
->xfer_len
/ packet_size
;
307 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
308 reg_val
&= ~(SPI_CFG1_PACKET_LENGTH_MASK
| SPI_CFG1_PACKET_LOOP_MASK
);
309 reg_val
|= (packet_size
- 1) << SPI_CFG1_PACKET_LENGTH_OFFSET
;
310 reg_val
|= (packet_loop
- 1) << SPI_CFG1_PACKET_LOOP_OFFSET
;
311 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
314 static void mtk_spi_enable_transfer(struct spi_master
*master
)
317 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
319 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
320 if (mdata
->state
== MTK_SPI_IDLE
)
323 cmd
|= SPI_CMD_RESUME
;
324 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
327 static int mtk_spi_get_mult_delta(u32 xfer_len
)
331 if (xfer_len
> MTK_SPI_PACKET_SIZE
)
332 mult_delta
= xfer_len
% MTK_SPI_PACKET_SIZE
;
339 static void mtk_spi_update_mdata_len(struct spi_master
*master
)
342 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
344 if (mdata
->tx_sgl_len
&& mdata
->rx_sgl_len
) {
345 if (mdata
->tx_sgl_len
> mdata
->rx_sgl_len
) {
346 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
347 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
348 mdata
->rx_sgl_len
= mult_delta
;
349 mdata
->tx_sgl_len
-= mdata
->xfer_len
;
351 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
352 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
353 mdata
->tx_sgl_len
= mult_delta
;
354 mdata
->rx_sgl_len
-= mdata
->xfer_len
;
356 } else if (mdata
->tx_sgl_len
) {
357 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
358 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
359 mdata
->tx_sgl_len
= mult_delta
;
360 } else if (mdata
->rx_sgl_len
) {
361 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
362 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
363 mdata
->rx_sgl_len
= mult_delta
;
367 static void mtk_spi_setup_dma_addr(struct spi_master
*master
,
368 struct spi_transfer
*xfer
)
370 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
373 writel(xfer
->tx_dma
, mdata
->base
+ SPI_TX_SRC_REG
);
375 writel(xfer
->rx_dma
, mdata
->base
+ SPI_RX_DST_REG
);
378 static int mtk_spi_fifo_transfer(struct spi_master
*master
,
379 struct spi_device
*spi
,
380 struct spi_transfer
*xfer
)
384 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
386 mdata
->cur_transfer
= xfer
;
387 mdata
->xfer_len
= min(MTK_SPI_MAX_FIFO_SIZE
, xfer
->len
);
388 mtk_spi_prepare_transfer(master
, xfer
);
389 mtk_spi_setup_packet(master
);
392 iowrite32_rep(mdata
->base
+ SPI_TX_DATA_REG
, xfer
->tx_buf
, cnt
);
394 remainder
= xfer
->len
% 4;
397 memcpy(®_val
, xfer
->tx_buf
+ (cnt
* 4), remainder
);
398 writel(reg_val
, mdata
->base
+ SPI_TX_DATA_REG
);
401 mtk_spi_enable_transfer(master
);
406 static int mtk_spi_dma_transfer(struct spi_master
*master
,
407 struct spi_device
*spi
,
408 struct spi_transfer
*xfer
)
411 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
413 mdata
->tx_sgl
= NULL
;
414 mdata
->rx_sgl
= NULL
;
415 mdata
->tx_sgl_len
= 0;
416 mdata
->rx_sgl_len
= 0;
417 mdata
->cur_transfer
= xfer
;
419 mtk_spi_prepare_transfer(master
, xfer
);
421 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
423 cmd
|= SPI_CMD_TX_DMA
;
425 cmd
|= SPI_CMD_RX_DMA
;
426 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
429 mdata
->tx_sgl
= xfer
->tx_sg
.sgl
;
431 mdata
->rx_sgl
= xfer
->rx_sg
.sgl
;
434 xfer
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
435 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
438 xfer
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
439 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
442 mtk_spi_update_mdata_len(master
);
443 mtk_spi_setup_packet(master
);
444 mtk_spi_setup_dma_addr(master
, xfer
);
445 mtk_spi_enable_transfer(master
);
450 static int mtk_spi_transfer_one(struct spi_master
*master
,
451 struct spi_device
*spi
,
452 struct spi_transfer
*xfer
)
454 if (master
->can_dma(master
, spi
, xfer
))
455 return mtk_spi_dma_transfer(master
, spi
, xfer
);
457 return mtk_spi_fifo_transfer(master
, spi
, xfer
);
460 static bool mtk_spi_can_dma(struct spi_master
*master
,
461 struct spi_device
*spi
,
462 struct spi_transfer
*xfer
)
464 /* Buffers for DMA transactions must be 4-byte aligned */
465 return (xfer
->len
> MTK_SPI_MAX_FIFO_SIZE
&&
466 (unsigned long)xfer
->tx_buf
% 4 == 0 &&
467 (unsigned long)xfer
->rx_buf
% 4 == 0);
470 static int mtk_spi_setup(struct spi_device
*spi
)
472 struct mtk_spi
*mdata
= spi_master_get_devdata(spi
->master
);
474 if (!spi
->controller_data
)
475 spi
->controller_data
= (void *)&mtk_default_chip_info
;
477 if (mdata
->dev_comp
->need_pad_sel
&& gpio_is_valid(spi
->cs_gpio
))
478 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
483 static irqreturn_t
mtk_spi_interrupt(int irq
, void *dev_id
)
485 u32 cmd
, reg_val
, cnt
, remainder
;
486 struct spi_master
*master
= dev_id
;
487 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
488 struct spi_transfer
*trans
= mdata
->cur_transfer
;
490 reg_val
= readl(mdata
->base
+ SPI_STATUS0_REG
);
491 if (reg_val
& MTK_SPI_PAUSE_INT_STATUS
)
492 mdata
->state
= MTK_SPI_PAUSED
;
494 mdata
->state
= MTK_SPI_IDLE
;
496 if (!master
->can_dma(master
, master
->cur_msg
->spi
, trans
)) {
498 cnt
= mdata
->xfer_len
/ 4;
499 ioread32_rep(mdata
->base
+ SPI_RX_DATA_REG
,
501 remainder
= mdata
->xfer_len
% 4;
503 reg_val
= readl(mdata
->base
+ SPI_RX_DATA_REG
);
504 memcpy(trans
->rx_buf
+ (cnt
* 4),
505 ®_val
, remainder
);
509 trans
->len
-= mdata
->xfer_len
;
511 spi_finalize_current_transfer(master
);
516 trans
->tx_buf
+= mdata
->xfer_len
;
518 trans
->rx_buf
+= mdata
->xfer_len
;
520 mdata
->xfer_len
= min(MTK_SPI_MAX_FIFO_SIZE
, trans
->len
);
521 mtk_spi_setup_packet(master
);
523 cnt
= trans
->len
/ 4;
524 iowrite32_rep(mdata
->base
+ SPI_TX_DATA_REG
, trans
->tx_buf
, cnt
);
526 remainder
= trans
->len
% 4;
529 memcpy(®_val
, trans
->tx_buf
+ (cnt
* 4), remainder
);
530 writel(reg_val
, mdata
->base
+ SPI_TX_DATA_REG
);
533 mtk_spi_enable_transfer(master
);
539 trans
->tx_dma
+= mdata
->xfer_len
;
541 trans
->rx_dma
+= mdata
->xfer_len
;
543 if (mdata
->tx_sgl
&& (mdata
->tx_sgl_len
== 0)) {
544 mdata
->tx_sgl
= sg_next(mdata
->tx_sgl
);
546 trans
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
547 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
550 if (mdata
->rx_sgl
&& (mdata
->rx_sgl_len
== 0)) {
551 mdata
->rx_sgl
= sg_next(mdata
->rx_sgl
);
553 trans
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
554 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
558 if (!mdata
->tx_sgl
&& !mdata
->rx_sgl
) {
559 /* spi disable dma */
560 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
561 cmd
&= ~SPI_CMD_TX_DMA
;
562 cmd
&= ~SPI_CMD_RX_DMA
;
563 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
565 spi_finalize_current_transfer(master
);
569 mtk_spi_update_mdata_len(master
);
570 mtk_spi_setup_packet(master
);
571 mtk_spi_setup_dma_addr(master
, trans
);
572 mtk_spi_enable_transfer(master
);
577 static int mtk_spi_probe(struct platform_device
*pdev
)
579 struct spi_master
*master
;
580 struct mtk_spi
*mdata
;
581 const struct of_device_id
*of_id
;
582 struct resource
*res
;
585 master
= spi_alloc_master(&pdev
->dev
, sizeof(*mdata
));
587 dev_err(&pdev
->dev
, "failed to alloc spi master\n");
591 master
->auto_runtime_pm
= true;
592 master
->dev
.of_node
= pdev
->dev
.of_node
;
593 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
;
595 master
->set_cs
= mtk_spi_set_cs
;
596 master
->prepare_message
= mtk_spi_prepare_message
;
597 master
->transfer_one
= mtk_spi_transfer_one
;
598 master
->can_dma
= mtk_spi_can_dma
;
599 master
->setup
= mtk_spi_setup
;
601 of_id
= of_match_node(mtk_spi_of_match
, pdev
->dev
.of_node
);
603 dev_err(&pdev
->dev
, "failed to probe of_node\n");
608 mdata
= spi_master_get_devdata(master
);
609 mdata
->dev_comp
= of_id
->data
;
610 if (mdata
->dev_comp
->must_tx
)
611 master
->flags
= SPI_MASTER_MUST_TX
;
613 if (mdata
->dev_comp
->need_pad_sel
) {
614 mdata
->pad_num
= of_property_count_u32_elems(
616 "mediatek,pad-select");
617 if (mdata
->pad_num
< 0) {
619 "No 'mediatek,pad-select' property\n");
624 mdata
->pad_sel
= devm_kmalloc_array(&pdev
->dev
, mdata
->pad_num
,
625 sizeof(u32
), GFP_KERNEL
);
626 if (!mdata
->pad_sel
) {
631 for (i
= 0; i
< mdata
->pad_num
; i
++) {
632 of_property_read_u32_index(pdev
->dev
.of_node
,
633 "mediatek,pad-select",
634 i
, &mdata
->pad_sel
[i
]);
635 if (mdata
->pad_sel
[i
] > MT8173_SPI_MAX_PAD_SEL
) {
636 dev_err(&pdev
->dev
, "wrong pad-sel[%d]: %u\n",
637 i
, mdata
->pad_sel
[i
]);
644 platform_set_drvdata(pdev
, master
);
646 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
649 dev_err(&pdev
->dev
, "failed to determine base address\n");
653 mdata
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
654 if (IS_ERR(mdata
->base
)) {
655 ret
= PTR_ERR(mdata
->base
);
659 irq
= platform_get_irq(pdev
, 0);
661 dev_err(&pdev
->dev
, "failed to get irq (%d)\n", irq
);
666 if (!pdev
->dev
.dma_mask
)
667 pdev
->dev
.dma_mask
= &pdev
->dev
.coherent_dma_mask
;
669 ret
= devm_request_irq(&pdev
->dev
, irq
, mtk_spi_interrupt
,
670 IRQF_TRIGGER_NONE
, dev_name(&pdev
->dev
), master
);
672 dev_err(&pdev
->dev
, "failed to register irq (%d)\n", ret
);
676 mdata
->parent_clk
= devm_clk_get(&pdev
->dev
, "parent-clk");
677 if (IS_ERR(mdata
->parent_clk
)) {
678 ret
= PTR_ERR(mdata
->parent_clk
);
679 dev_err(&pdev
->dev
, "failed to get parent-clk: %d\n", ret
);
683 mdata
->sel_clk
= devm_clk_get(&pdev
->dev
, "sel-clk");
684 if (IS_ERR(mdata
->sel_clk
)) {
685 ret
= PTR_ERR(mdata
->sel_clk
);
686 dev_err(&pdev
->dev
, "failed to get sel-clk: %d\n", ret
);
690 mdata
->spi_clk
= devm_clk_get(&pdev
->dev
, "spi-clk");
691 if (IS_ERR(mdata
->spi_clk
)) {
692 ret
= PTR_ERR(mdata
->spi_clk
);
693 dev_err(&pdev
->dev
, "failed to get spi-clk: %d\n", ret
);
697 ret
= clk_prepare_enable(mdata
->spi_clk
);
699 dev_err(&pdev
->dev
, "failed to enable spi_clk (%d)\n", ret
);
703 ret
= clk_set_parent(mdata
->sel_clk
, mdata
->parent_clk
);
705 dev_err(&pdev
->dev
, "failed to clk_set_parent (%d)\n", ret
);
706 clk_disable_unprepare(mdata
->spi_clk
);
710 clk_disable_unprepare(mdata
->spi_clk
);
712 pm_runtime_enable(&pdev
->dev
);
714 ret
= devm_spi_register_master(&pdev
->dev
, master
);
716 dev_err(&pdev
->dev
, "failed to register master (%d)\n", ret
);
717 goto err_disable_runtime_pm
;
720 if (mdata
->dev_comp
->need_pad_sel
) {
721 if (mdata
->pad_num
!= master
->num_chipselect
) {
723 "pad_num does not match num_chipselect(%d != %d)\n",
724 mdata
->pad_num
, master
->num_chipselect
);
726 goto err_disable_runtime_pm
;
729 if (!master
->cs_gpios
&& master
->num_chipselect
> 1) {
731 "cs_gpios not specified and num_chipselect > 1\n");
733 goto err_disable_runtime_pm
;
736 if (master
->cs_gpios
) {
737 for (i
= 0; i
< master
->num_chipselect
; i
++) {
738 ret
= devm_gpio_request(&pdev
->dev
,
740 dev_name(&pdev
->dev
));
743 "can't get CS GPIO %i\n", i
);
744 goto err_disable_runtime_pm
;
752 err_disable_runtime_pm
:
753 pm_runtime_disable(&pdev
->dev
);
755 spi_master_put(master
);
760 static int mtk_spi_remove(struct platform_device
*pdev
)
762 struct spi_master
*master
= platform_get_drvdata(pdev
);
763 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
765 pm_runtime_disable(&pdev
->dev
);
767 mtk_spi_reset(mdata
);
772 #ifdef CONFIG_PM_SLEEP
773 static int mtk_spi_suspend(struct device
*dev
)
776 struct spi_master
*master
= dev_get_drvdata(dev
);
777 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
779 ret
= spi_master_suspend(master
);
783 if (!pm_runtime_suspended(dev
))
784 clk_disable_unprepare(mdata
->spi_clk
);
789 static int mtk_spi_resume(struct device
*dev
)
792 struct spi_master
*master
= dev_get_drvdata(dev
);
793 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
795 if (!pm_runtime_suspended(dev
)) {
796 ret
= clk_prepare_enable(mdata
->spi_clk
);
798 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
803 ret
= spi_master_resume(master
);
805 clk_disable_unprepare(mdata
->spi_clk
);
809 #endif /* CONFIG_PM_SLEEP */
812 static int mtk_spi_runtime_suspend(struct device
*dev
)
814 struct spi_master
*master
= dev_get_drvdata(dev
);
815 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
817 clk_disable_unprepare(mdata
->spi_clk
);
822 static int mtk_spi_runtime_resume(struct device
*dev
)
824 struct spi_master
*master
= dev_get_drvdata(dev
);
825 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
828 ret
= clk_prepare_enable(mdata
->spi_clk
);
830 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
836 #endif /* CONFIG_PM */
838 static const struct dev_pm_ops mtk_spi_pm
= {
839 SET_SYSTEM_SLEEP_PM_OPS(mtk_spi_suspend
, mtk_spi_resume
)
840 SET_RUNTIME_PM_OPS(mtk_spi_runtime_suspend
,
841 mtk_spi_runtime_resume
, NULL
)
844 static struct platform_driver mtk_spi_driver
= {
848 .of_match_table
= mtk_spi_of_match
,
850 .probe
= mtk_spi_probe
,
851 .remove
= mtk_spi_remove
,
854 module_platform_driver(mtk_spi_driver
);
856 MODULE_DESCRIPTION("MTK SPI Controller driver");
857 MODULE_AUTHOR("Leilk Liu <leilk.liu@mediatek.com>");
858 MODULE_LICENSE("GPL v2");
859 MODULE_ALIAS("platform:mtk-spi");