gpio: rcar: Fix runtime PM imbalance on error
[linux/fpc-iii.git] / drivers / spi / spi-sprd.c
blob6678f1cbc566078a7681a4aa047bc01a80cc4456
1 // SPDX-License-Identifier: GPL-2.0
2 // Copyright (C) 2018 Spreadtrum Communications Inc.
4 #include <linux/clk.h>
5 #include <linux/dmaengine.h>
6 #include <linux/dma-mapping.h>
7 #include <linux/dma/sprd-dma.h>
8 #include <linux/interrupt.h>
9 #include <linux/io.h>
10 #include <linux/iopoll.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/of.h>
14 #include <linux/of_device.h>
15 #include <linux/of_dma.h>
16 #include <linux/platform_device.h>
17 #include <linux/pm_runtime.h>
18 #include <linux/spi/spi.h>
20 #define SPRD_SPI_TXD 0x0
21 #define SPRD_SPI_CLKD 0x4
22 #define SPRD_SPI_CTL0 0x8
23 #define SPRD_SPI_CTL1 0xc
24 #define SPRD_SPI_CTL2 0x10
25 #define SPRD_SPI_CTL3 0x14
26 #define SPRD_SPI_CTL4 0x18
27 #define SPRD_SPI_CTL5 0x1c
28 #define SPRD_SPI_INT_EN 0x20
29 #define SPRD_SPI_INT_CLR 0x24
30 #define SPRD_SPI_INT_RAW_STS 0x28
31 #define SPRD_SPI_INT_MASK_STS 0x2c
32 #define SPRD_SPI_STS1 0x30
33 #define SPRD_SPI_STS2 0x34
34 #define SPRD_SPI_DSP_WAIT 0x38
35 #define SPRD_SPI_STS3 0x3c
36 #define SPRD_SPI_CTL6 0x40
37 #define SPRD_SPI_STS4 0x44
38 #define SPRD_SPI_FIFO_RST 0x48
39 #define SPRD_SPI_CTL7 0x4c
40 #define SPRD_SPI_STS5 0x50
41 #define SPRD_SPI_CTL8 0x54
42 #define SPRD_SPI_CTL9 0x58
43 #define SPRD_SPI_CTL10 0x5c
44 #define SPRD_SPI_CTL11 0x60
45 #define SPRD_SPI_CTL12 0x64
46 #define SPRD_SPI_STS6 0x68
47 #define SPRD_SPI_STS7 0x6c
48 #define SPRD_SPI_STS8 0x70
49 #define SPRD_SPI_STS9 0x74
51 /* Bits & mask definition for register CTL0 */
52 #define SPRD_SPI_SCK_REV BIT(13)
53 #define SPRD_SPI_NG_TX BIT(1)
54 #define SPRD_SPI_NG_RX BIT(0)
55 #define SPRD_SPI_CHNL_LEN_MASK GENMASK(4, 0)
56 #define SPRD_SPI_CSN_MASK GENMASK(11, 8)
57 #define SPRD_SPI_CS0_VALID BIT(8)
59 /* Bits & mask definition for register SPI_INT_EN */
60 #define SPRD_SPI_TX_END_INT_EN BIT(8)
61 #define SPRD_SPI_RX_END_INT_EN BIT(9)
63 /* Bits & mask definition for register SPI_INT_RAW_STS */
64 #define SPRD_SPI_TX_END_RAW BIT(8)
65 #define SPRD_SPI_RX_END_RAW BIT(9)
67 /* Bits & mask definition for register SPI_INT_CLR */
68 #define SPRD_SPI_TX_END_CLR BIT(8)
69 #define SPRD_SPI_RX_END_CLR BIT(9)
71 /* Bits & mask definition for register INT_MASK_STS */
72 #define SPRD_SPI_MASK_RX_END BIT(9)
73 #define SPRD_SPI_MASK_TX_END BIT(8)
75 /* Bits & mask definition for register STS2 */
76 #define SPRD_SPI_TX_BUSY BIT(8)
78 /* Bits & mask definition for register CTL1 */
79 #define SPRD_SPI_RX_MODE BIT(12)
80 #define SPRD_SPI_TX_MODE BIT(13)
81 #define SPRD_SPI_RTX_MD_MASK GENMASK(13, 12)
83 /* Bits & mask definition for register CTL2 */
84 #define SPRD_SPI_DMA_EN BIT(6)
86 /* Bits & mask definition for register CTL4 */
87 #define SPRD_SPI_START_RX BIT(9)
88 #define SPRD_SPI_ONLY_RECV_MASK GENMASK(8, 0)
90 /* Bits & mask definition for register SPI_INT_CLR */
91 #define SPRD_SPI_RX_END_INT_CLR BIT(9)
92 #define SPRD_SPI_TX_END_INT_CLR BIT(8)
94 /* Bits & mask definition for register SPI_INT_RAW */
95 #define SPRD_SPI_RX_END_IRQ BIT(9)
96 #define SPRD_SPI_TX_END_IRQ BIT(8)
98 /* Bits & mask definition for register CTL12 */
99 #define SPRD_SPI_SW_RX_REQ BIT(0)
100 #define SPRD_SPI_SW_TX_REQ BIT(1)
102 /* Bits & mask definition for register CTL7 */
103 #define SPRD_SPI_DATA_LINE2_EN BIT(15)
104 #define SPRD_SPI_MODE_MASK GENMASK(5, 3)
105 #define SPRD_SPI_MODE_OFFSET 3
106 #define SPRD_SPI_3WIRE_MODE 4
107 #define SPRD_SPI_4WIRE_MODE 0
109 /* Bits & mask definition for register CTL8 */
110 #define SPRD_SPI_TX_MAX_LEN_MASK GENMASK(19, 0)
111 #define SPRD_SPI_TX_LEN_H_MASK GENMASK(3, 0)
112 #define SPRD_SPI_TX_LEN_H_OFFSET 16
114 /* Bits & mask definition for register CTL9 */
115 #define SPRD_SPI_TX_LEN_L_MASK GENMASK(15, 0)
117 /* Bits & mask definition for register CTL10 */
118 #define SPRD_SPI_RX_MAX_LEN_MASK GENMASK(19, 0)
119 #define SPRD_SPI_RX_LEN_H_MASK GENMASK(3, 0)
120 #define SPRD_SPI_RX_LEN_H_OFFSET 16
122 /* Bits & mask definition for register CTL11 */
123 #define SPRD_SPI_RX_LEN_L_MASK GENMASK(15, 0)
125 /* Default & maximum word delay cycles */
126 #define SPRD_SPI_MIN_DELAY_CYCLE 14
127 #define SPRD_SPI_MAX_DELAY_CYCLE 130
129 #define SPRD_SPI_FIFO_SIZE 32
130 #define SPRD_SPI_CHIP_CS_NUM 0x4
131 #define SPRD_SPI_CHNL_LEN 2
132 #define SPRD_SPI_DEFAULT_SOURCE 26000000
133 #define SPRD_SPI_MAX_SPEED_HZ 48000000
134 #define SPRD_SPI_AUTOSUSPEND_DELAY 100
135 #define SPRD_SPI_DMA_STEP 8
137 enum sprd_spi_dma_channel {
138 SPRD_SPI_RX,
139 SPRD_SPI_TX,
140 SPRD_SPI_MAX,
143 struct sprd_spi_dma {
144 bool enable;
145 struct dma_chan *dma_chan[SPRD_SPI_MAX];
146 enum dma_slave_buswidth width;
147 u32 fragmens_len;
148 u32 rx_len;
151 struct sprd_spi {
152 void __iomem *base;
153 phys_addr_t phy_base;
154 struct device *dev;
155 struct clk *clk;
156 int irq;
157 u32 src_clk;
158 u32 hw_mode;
159 u32 trans_len;
160 u32 trans_mode;
161 u32 word_delay;
162 u32 hw_speed_hz;
163 u32 len;
164 int status;
165 struct sprd_spi_dma dma;
166 struct completion xfer_completion;
167 const void *tx_buf;
168 void *rx_buf;
169 int (*read_bufs)(struct sprd_spi *ss, u32 len);
170 int (*write_bufs)(struct sprd_spi *ss, u32 len);
173 static u32 sprd_spi_transfer_max_timeout(struct sprd_spi *ss,
174 struct spi_transfer *t)
177 * The time spent on transmission of the full FIFO data is the maximum
178 * SPI transmission time.
180 u32 size = t->bits_per_word * SPRD_SPI_FIFO_SIZE;
181 u32 bit_time_us = DIV_ROUND_UP(USEC_PER_SEC, ss->hw_speed_hz);
182 u32 total_time_us = size * bit_time_us;
184 * There is an interval between data and the data in our SPI hardware,
185 * so the total transmission time need add the interval time.
187 u32 interval_cycle = SPRD_SPI_FIFO_SIZE * ss->word_delay;
188 u32 interval_time_us = DIV_ROUND_UP(interval_cycle * USEC_PER_SEC,
189 ss->src_clk);
191 return total_time_us + interval_time_us;
194 static int sprd_spi_wait_for_tx_end(struct sprd_spi *ss, struct spi_transfer *t)
196 u32 val, us;
197 int ret;
199 us = sprd_spi_transfer_max_timeout(ss, t);
200 ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
201 val & SPRD_SPI_TX_END_IRQ, 0, us);
202 if (ret) {
203 dev_err(ss->dev, "SPI error, spi send timeout!\n");
204 return ret;
207 ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_STS2, val,
208 !(val & SPRD_SPI_TX_BUSY), 0, us);
209 if (ret) {
210 dev_err(ss->dev, "SPI error, spi busy timeout!\n");
211 return ret;
214 writel_relaxed(SPRD_SPI_TX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
216 return 0;
219 static int sprd_spi_wait_for_rx_end(struct sprd_spi *ss, struct spi_transfer *t)
221 u32 val, us;
222 int ret;
224 us = sprd_spi_transfer_max_timeout(ss, t);
225 ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
226 val & SPRD_SPI_RX_END_IRQ, 0, us);
227 if (ret) {
228 dev_err(ss->dev, "SPI error, spi rx timeout!\n");
229 return ret;
232 writel_relaxed(SPRD_SPI_RX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
234 return 0;
237 static void sprd_spi_tx_req(struct sprd_spi *ss)
239 writel_relaxed(SPRD_SPI_SW_TX_REQ, ss->base + SPRD_SPI_CTL12);
242 static void sprd_spi_rx_req(struct sprd_spi *ss)
244 writel_relaxed(SPRD_SPI_SW_RX_REQ, ss->base + SPRD_SPI_CTL12);
247 static void sprd_spi_enter_idle(struct sprd_spi *ss)
249 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
251 val &= ~SPRD_SPI_RTX_MD_MASK;
252 writel_relaxed(val, ss->base + SPRD_SPI_CTL1);
255 static void sprd_spi_set_transfer_bits(struct sprd_spi *ss, u32 bits)
257 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
259 /* Set the valid bits for every transaction */
260 val &= ~(SPRD_SPI_CHNL_LEN_MASK << SPRD_SPI_CHNL_LEN);
261 val |= bits << SPRD_SPI_CHNL_LEN;
262 writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
265 static void sprd_spi_set_tx_length(struct sprd_spi *ss, u32 length)
267 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL8);
269 length &= SPRD_SPI_TX_MAX_LEN_MASK;
270 val &= ~SPRD_SPI_TX_LEN_H_MASK;
271 val |= length >> SPRD_SPI_TX_LEN_H_OFFSET;
272 writel_relaxed(val, ss->base + SPRD_SPI_CTL8);
274 val = length & SPRD_SPI_TX_LEN_L_MASK;
275 writel_relaxed(val, ss->base + SPRD_SPI_CTL9);
278 static void sprd_spi_set_rx_length(struct sprd_spi *ss, u32 length)
280 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL10);
282 length &= SPRD_SPI_RX_MAX_LEN_MASK;
283 val &= ~SPRD_SPI_RX_LEN_H_MASK;
284 val |= length >> SPRD_SPI_RX_LEN_H_OFFSET;
285 writel_relaxed(val, ss->base + SPRD_SPI_CTL10);
287 val = length & SPRD_SPI_RX_LEN_L_MASK;
288 writel_relaxed(val, ss->base + SPRD_SPI_CTL11);
291 static void sprd_spi_chipselect(struct spi_device *sdev, bool cs)
293 struct spi_controller *sctlr = sdev->controller;
294 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
295 u32 val;
297 val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
298 /* The SPI controller will pull down CS pin if cs is 0 */
299 if (!cs) {
300 val &= ~SPRD_SPI_CS0_VALID;
301 writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
302 } else {
303 val |= SPRD_SPI_CSN_MASK;
304 writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
308 static int sprd_spi_write_only_receive(struct sprd_spi *ss, u32 len)
310 u32 val;
312 /* Clear the start receive bit and reset receive data number */
313 val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
314 val &= ~(SPRD_SPI_START_RX | SPRD_SPI_ONLY_RECV_MASK);
315 writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
317 /* Set the receive data length */
318 val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
319 val |= len & SPRD_SPI_ONLY_RECV_MASK;
320 writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
322 /* Trigger to receive data */
323 val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
324 val |= SPRD_SPI_START_RX;
325 writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
327 return len;
330 static int sprd_spi_write_bufs_u8(struct sprd_spi *ss, u32 len)
332 u8 *tx_p = (u8 *)ss->tx_buf;
333 int i;
335 for (i = 0; i < len; i++)
336 writeb_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
338 ss->tx_buf += i;
339 return i;
342 static int sprd_spi_write_bufs_u16(struct sprd_spi *ss, u32 len)
344 u16 *tx_p = (u16 *)ss->tx_buf;
345 int i;
347 for (i = 0; i < len; i++)
348 writew_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
350 ss->tx_buf += i << 1;
351 return i << 1;
354 static int sprd_spi_write_bufs_u32(struct sprd_spi *ss, u32 len)
356 u32 *tx_p = (u32 *)ss->tx_buf;
357 int i;
359 for (i = 0; i < len; i++)
360 writel_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
362 ss->tx_buf += i << 2;
363 return i << 2;
366 static int sprd_spi_read_bufs_u8(struct sprd_spi *ss, u32 len)
368 u8 *rx_p = (u8 *)ss->rx_buf;
369 int i;
371 for (i = 0; i < len; i++)
372 rx_p[i] = readb_relaxed(ss->base + SPRD_SPI_TXD);
374 ss->rx_buf += i;
375 return i;
378 static int sprd_spi_read_bufs_u16(struct sprd_spi *ss, u32 len)
380 u16 *rx_p = (u16 *)ss->rx_buf;
381 int i;
383 for (i = 0; i < len; i++)
384 rx_p[i] = readw_relaxed(ss->base + SPRD_SPI_TXD);
386 ss->rx_buf += i << 1;
387 return i << 1;
390 static int sprd_spi_read_bufs_u32(struct sprd_spi *ss, u32 len)
392 u32 *rx_p = (u32 *)ss->rx_buf;
393 int i;
395 for (i = 0; i < len; i++)
396 rx_p[i] = readl_relaxed(ss->base + SPRD_SPI_TXD);
398 ss->rx_buf += i << 2;
399 return i << 2;
402 static int sprd_spi_txrx_bufs(struct spi_device *sdev, struct spi_transfer *t)
404 struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
405 u32 trans_len = ss->trans_len, len;
406 int ret, write_size = 0, read_size = 0;
408 while (trans_len) {
409 len = trans_len > SPRD_SPI_FIFO_SIZE ? SPRD_SPI_FIFO_SIZE :
410 trans_len;
411 if (ss->trans_mode & SPRD_SPI_TX_MODE) {
412 sprd_spi_set_tx_length(ss, len);
413 write_size += ss->write_bufs(ss, len);
416 * For our 3 wires mode or dual TX line mode, we need
417 * to request the controller to transfer.
419 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
420 sprd_spi_tx_req(ss);
422 ret = sprd_spi_wait_for_tx_end(ss, t);
423 } else {
424 sprd_spi_set_rx_length(ss, len);
427 * For our 3 wires mode or dual TX line mode, we need
428 * to request the controller to read.
430 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
431 sprd_spi_rx_req(ss);
432 else
433 write_size += ss->write_bufs(ss, len);
435 ret = sprd_spi_wait_for_rx_end(ss, t);
438 if (ret)
439 goto complete;
441 if (ss->trans_mode & SPRD_SPI_RX_MODE)
442 read_size += ss->read_bufs(ss, len);
444 trans_len -= len;
447 if (ss->trans_mode & SPRD_SPI_TX_MODE)
448 ret = write_size;
449 else
450 ret = read_size;
451 complete:
452 sprd_spi_enter_idle(ss);
454 return ret;
457 static void sprd_spi_irq_enable(struct sprd_spi *ss)
459 u32 val;
461 /* Clear interrupt status before enabling interrupt. */
462 writel_relaxed(SPRD_SPI_TX_END_CLR | SPRD_SPI_RX_END_CLR,
463 ss->base + SPRD_SPI_INT_CLR);
464 /* Enable SPI interrupt only in DMA mode. */
465 val = readl_relaxed(ss->base + SPRD_SPI_INT_EN);
466 writel_relaxed(val | SPRD_SPI_TX_END_INT_EN |
467 SPRD_SPI_RX_END_INT_EN,
468 ss->base + SPRD_SPI_INT_EN);
471 static void sprd_spi_irq_disable(struct sprd_spi *ss)
473 writel_relaxed(0, ss->base + SPRD_SPI_INT_EN);
476 static void sprd_spi_dma_enable(struct sprd_spi *ss, bool enable)
478 u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL2);
480 if (enable)
481 val |= SPRD_SPI_DMA_EN;
482 else
483 val &= ~SPRD_SPI_DMA_EN;
485 writel_relaxed(val, ss->base + SPRD_SPI_CTL2);
488 static int sprd_spi_dma_submit(struct dma_chan *dma_chan,
489 struct dma_slave_config *c,
490 struct sg_table *sg,
491 enum dma_transfer_direction dir)
493 struct dma_async_tx_descriptor *desc;
494 dma_cookie_t cookie;
495 unsigned long flags;
496 int ret;
498 ret = dmaengine_slave_config(dma_chan, c);
499 if (ret < 0)
500 return ret;
502 flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
503 SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
504 desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags);
505 if (!desc)
506 return -ENODEV;
508 cookie = dmaengine_submit(desc);
509 if (dma_submit_error(cookie))
510 return dma_submit_error(cookie);
512 dma_async_issue_pending(dma_chan);
514 return 0;
517 static int sprd_spi_dma_rx_config(struct sprd_spi *ss, struct spi_transfer *t)
519 struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_RX];
520 struct dma_slave_config config = {
521 .src_addr = ss->phy_base,
522 .src_addr_width = ss->dma.width,
523 .dst_addr_width = ss->dma.width,
524 .dst_maxburst = ss->dma.fragmens_len,
526 int ret;
528 ret = sprd_spi_dma_submit(dma_chan, &config, &t->rx_sg, DMA_DEV_TO_MEM);
529 if (ret)
530 return ret;
532 return ss->dma.rx_len;
535 static int sprd_spi_dma_tx_config(struct sprd_spi *ss, struct spi_transfer *t)
537 struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_TX];
538 struct dma_slave_config config = {
539 .dst_addr = ss->phy_base,
540 .src_addr_width = ss->dma.width,
541 .dst_addr_width = ss->dma.width,
542 .src_maxburst = ss->dma.fragmens_len,
544 int ret;
546 ret = sprd_spi_dma_submit(dma_chan, &config, &t->tx_sg, DMA_MEM_TO_DEV);
547 if (ret)
548 return ret;
550 return t->len;
553 static int sprd_spi_dma_request(struct sprd_spi *ss)
555 ss->dma.dma_chan[SPRD_SPI_RX] = dma_request_chan(ss->dev, "rx_chn");
556 if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_RX])) {
557 if (PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]) == -EPROBE_DEFER)
558 return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]);
560 dev_err(ss->dev, "request RX DMA channel failed!\n");
561 return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]);
564 ss->dma.dma_chan[SPRD_SPI_TX] = dma_request_chan(ss->dev, "tx_chn");
565 if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_TX])) {
566 if (PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]) == -EPROBE_DEFER)
567 return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]);
569 dev_err(ss->dev, "request TX DMA channel failed!\n");
570 dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
571 return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]);
574 return 0;
577 static void sprd_spi_dma_release(struct sprd_spi *ss)
579 if (ss->dma.dma_chan[SPRD_SPI_RX])
580 dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
582 if (ss->dma.dma_chan[SPRD_SPI_TX])
583 dma_release_channel(ss->dma.dma_chan[SPRD_SPI_TX]);
586 static int sprd_spi_dma_txrx_bufs(struct spi_device *sdev,
587 struct spi_transfer *t)
589 struct sprd_spi *ss = spi_master_get_devdata(sdev->master);
590 u32 trans_len = ss->trans_len;
591 int ret, write_size = 0;
593 reinit_completion(&ss->xfer_completion);
594 sprd_spi_irq_enable(ss);
595 if (ss->trans_mode & SPRD_SPI_TX_MODE) {
596 write_size = sprd_spi_dma_tx_config(ss, t);
597 sprd_spi_set_tx_length(ss, trans_len);
600 * For our 3 wires mode or dual TX line mode, we need
601 * to request the controller to transfer.
603 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
604 sprd_spi_tx_req(ss);
605 } else {
606 sprd_spi_set_rx_length(ss, trans_len);
609 * For our 3 wires mode or dual TX line mode, we need
610 * to request the controller to read.
612 if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
613 sprd_spi_rx_req(ss);
614 else
615 write_size = ss->write_bufs(ss, trans_len);
618 if (write_size < 0) {
619 ret = write_size;
620 dev_err(ss->dev, "failed to write, ret = %d\n", ret);
621 goto trans_complete;
624 if (ss->trans_mode & SPRD_SPI_RX_MODE) {
626 * Set up the DMA receive data length, which must be an
627 * integral multiple of fragment length. But when the length
628 * of received data is less than fragment length, DMA can be
629 * configured to receive data according to the actual length
630 * of received data.
632 ss->dma.rx_len = t->len > ss->dma.fragmens_len ?
633 (t->len - t->len % ss->dma.fragmens_len) :
634 t->len;
635 ret = sprd_spi_dma_rx_config(ss, t);
636 if (ret < 0) {
637 dev_err(&sdev->dev,
638 "failed to configure rx DMA, ret = %d\n", ret);
639 goto trans_complete;
643 sprd_spi_dma_enable(ss, true);
644 wait_for_completion(&(ss->xfer_completion));
646 if (ss->trans_mode & SPRD_SPI_TX_MODE)
647 ret = write_size;
648 else
649 ret = ss->dma.rx_len;
651 trans_complete:
652 sprd_spi_dma_enable(ss, false);
653 sprd_spi_enter_idle(ss);
654 sprd_spi_irq_disable(ss);
656 return ret;
659 static void sprd_spi_set_speed(struct sprd_spi *ss, u32 speed_hz)
662 * From SPI datasheet, the prescale calculation formula:
663 * prescale = SPI source clock / (2 * SPI_freq) - 1;
665 u32 clk_div = DIV_ROUND_UP(ss->src_clk, speed_hz << 1) - 1;
667 /* Save the real hardware speed */
668 ss->hw_speed_hz = (ss->src_clk >> 1) / (clk_div + 1);
669 writel_relaxed(clk_div, ss->base + SPRD_SPI_CLKD);
672 static int sprd_spi_init_hw(struct sprd_spi *ss, struct spi_transfer *t)
674 struct spi_delay *d = &t->word_delay;
675 u16 word_delay, interval;
676 u32 val;
678 if (d->unit != SPI_DELAY_UNIT_SCK)
679 return -EINVAL;
681 val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
682 val &= ~(SPRD_SPI_SCK_REV | SPRD_SPI_NG_TX | SPRD_SPI_NG_RX);
683 /* Set default chip selection, clock phase and clock polarity */
684 val |= ss->hw_mode & SPI_CPHA ? SPRD_SPI_NG_RX : SPRD_SPI_NG_TX;
685 val |= ss->hw_mode & SPI_CPOL ? SPRD_SPI_SCK_REV : 0;
686 writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
689 * Set the intervals of two SPI frames, and the inteval calculation
690 * formula as below per datasheet:
691 * interval time (source clock cycles) = interval * 4 + 10.
693 word_delay = clamp_t(u16, d->value, SPRD_SPI_MIN_DELAY_CYCLE,
694 SPRD_SPI_MAX_DELAY_CYCLE);
695 interval = DIV_ROUND_UP(word_delay - 10, 4);
696 ss->word_delay = interval * 4 + 10;
697 writel_relaxed(interval, ss->base + SPRD_SPI_CTL5);
699 /* Reset SPI fifo */
700 writel_relaxed(1, ss->base + SPRD_SPI_FIFO_RST);
701 writel_relaxed(0, ss->base + SPRD_SPI_FIFO_RST);
703 /* Set SPI work mode */
704 val = readl_relaxed(ss->base + SPRD_SPI_CTL7);
705 val &= ~SPRD_SPI_MODE_MASK;
707 if (ss->hw_mode & SPI_3WIRE)
708 val |= SPRD_SPI_3WIRE_MODE << SPRD_SPI_MODE_OFFSET;
709 else
710 val |= SPRD_SPI_4WIRE_MODE << SPRD_SPI_MODE_OFFSET;
712 if (ss->hw_mode & SPI_TX_DUAL)
713 val |= SPRD_SPI_DATA_LINE2_EN;
714 else
715 val &= ~SPRD_SPI_DATA_LINE2_EN;
717 writel_relaxed(val, ss->base + SPRD_SPI_CTL7);
719 return 0;
722 static int sprd_spi_setup_transfer(struct spi_device *sdev,
723 struct spi_transfer *t)
725 struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
726 u8 bits_per_word = t->bits_per_word;
727 u32 val, mode = 0;
728 int ret;
730 ss->len = t->len;
731 ss->tx_buf = t->tx_buf;
732 ss->rx_buf = t->rx_buf;
734 ss->hw_mode = sdev->mode;
735 ret = sprd_spi_init_hw(ss, t);
736 if (ret)
737 return ret;
739 /* Set tansfer speed and valid bits */
740 sprd_spi_set_speed(ss, t->speed_hz);
741 sprd_spi_set_transfer_bits(ss, bits_per_word);
743 if (bits_per_word > 16)
744 bits_per_word = round_up(bits_per_word, 16);
745 else
746 bits_per_word = round_up(bits_per_word, 8);
748 switch (bits_per_word) {
749 case 8:
750 ss->trans_len = t->len;
751 ss->read_bufs = sprd_spi_read_bufs_u8;
752 ss->write_bufs = sprd_spi_write_bufs_u8;
753 ss->dma.width = DMA_SLAVE_BUSWIDTH_1_BYTE;
754 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP;
755 break;
756 case 16:
757 ss->trans_len = t->len >> 1;
758 ss->read_bufs = sprd_spi_read_bufs_u16;
759 ss->write_bufs = sprd_spi_write_bufs_u16;
760 ss->dma.width = DMA_SLAVE_BUSWIDTH_2_BYTES;
761 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 1;
762 break;
763 case 32:
764 ss->trans_len = t->len >> 2;
765 ss->read_bufs = sprd_spi_read_bufs_u32;
766 ss->write_bufs = sprd_spi_write_bufs_u32;
767 ss->dma.width = DMA_SLAVE_BUSWIDTH_4_BYTES;
768 ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 2;
769 break;
770 default:
771 return -EINVAL;
774 /* Set transfer read or write mode */
775 val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
776 val &= ~SPRD_SPI_RTX_MD_MASK;
777 if (t->tx_buf)
778 mode |= SPRD_SPI_TX_MODE;
779 if (t->rx_buf)
780 mode |= SPRD_SPI_RX_MODE;
782 writel_relaxed(val | mode, ss->base + SPRD_SPI_CTL1);
784 ss->trans_mode = mode;
787 * If in only receive mode, we need to trigger the SPI controller to
788 * receive data automatically.
790 if (ss->trans_mode == SPRD_SPI_RX_MODE)
791 ss->write_bufs = sprd_spi_write_only_receive;
793 return 0;
796 static int sprd_spi_transfer_one(struct spi_controller *sctlr,
797 struct spi_device *sdev,
798 struct spi_transfer *t)
800 int ret;
802 ret = sprd_spi_setup_transfer(sdev, t);
803 if (ret)
804 goto setup_err;
806 if (sctlr->can_dma(sctlr, sdev, t))
807 ret = sprd_spi_dma_txrx_bufs(sdev, t);
808 else
809 ret = sprd_spi_txrx_bufs(sdev, t);
811 if (ret == t->len)
812 ret = 0;
813 else if (ret >= 0)
814 ret = -EREMOTEIO;
816 setup_err:
817 spi_finalize_current_transfer(sctlr);
819 return ret;
822 static irqreturn_t sprd_spi_handle_irq(int irq, void *data)
824 struct sprd_spi *ss = (struct sprd_spi *)data;
825 u32 val = readl_relaxed(ss->base + SPRD_SPI_INT_MASK_STS);
827 if (val & SPRD_SPI_MASK_TX_END) {
828 writel_relaxed(SPRD_SPI_TX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
829 if (!(ss->trans_mode & SPRD_SPI_RX_MODE))
830 complete(&ss->xfer_completion);
832 return IRQ_HANDLED;
835 if (val & SPRD_SPI_MASK_RX_END) {
836 writel_relaxed(SPRD_SPI_RX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
837 if (ss->dma.rx_len < ss->len) {
838 ss->rx_buf += ss->dma.rx_len;
839 ss->dma.rx_len +=
840 ss->read_bufs(ss, ss->len - ss->dma.rx_len);
842 complete(&ss->xfer_completion);
844 return IRQ_HANDLED;
847 return IRQ_NONE;
850 static int sprd_spi_irq_init(struct platform_device *pdev, struct sprd_spi *ss)
852 int ret;
854 ss->irq = platform_get_irq(pdev, 0);
855 if (ss->irq < 0)
856 return ss->irq;
858 ret = devm_request_irq(&pdev->dev, ss->irq, sprd_spi_handle_irq,
859 0, pdev->name, ss);
860 if (ret)
861 dev_err(&pdev->dev, "failed to request spi irq %d, ret = %d\n",
862 ss->irq, ret);
864 return ret;
867 static int sprd_spi_clk_init(struct platform_device *pdev, struct sprd_spi *ss)
869 struct clk *clk_spi, *clk_parent;
871 clk_spi = devm_clk_get(&pdev->dev, "spi");
872 if (IS_ERR(clk_spi)) {
873 dev_warn(&pdev->dev, "can't get the spi clock\n");
874 clk_spi = NULL;
877 clk_parent = devm_clk_get(&pdev->dev, "source");
878 if (IS_ERR(clk_parent)) {
879 dev_warn(&pdev->dev, "can't get the source clock\n");
880 clk_parent = NULL;
883 ss->clk = devm_clk_get(&pdev->dev, "enable");
884 if (IS_ERR(ss->clk)) {
885 dev_err(&pdev->dev, "can't get the enable clock\n");
886 return PTR_ERR(ss->clk);
889 if (!clk_set_parent(clk_spi, clk_parent))
890 ss->src_clk = clk_get_rate(clk_spi);
891 else
892 ss->src_clk = SPRD_SPI_DEFAULT_SOURCE;
894 return 0;
897 static bool sprd_spi_can_dma(struct spi_controller *sctlr,
898 struct spi_device *spi, struct spi_transfer *t)
900 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
902 return ss->dma.enable && (t->len > SPRD_SPI_FIFO_SIZE);
905 static int sprd_spi_dma_init(struct platform_device *pdev, struct sprd_spi *ss)
907 int ret;
909 ret = sprd_spi_dma_request(ss);
910 if (ret) {
911 if (ret == -EPROBE_DEFER)
912 return ret;
914 dev_warn(&pdev->dev,
915 "failed to request dma, enter no dma mode, ret = %d\n",
916 ret);
918 return 0;
921 ss->dma.enable = true;
923 return 0;
926 static int sprd_spi_probe(struct platform_device *pdev)
928 struct spi_controller *sctlr;
929 struct resource *res;
930 struct sprd_spi *ss;
931 int ret;
933 pdev->id = of_alias_get_id(pdev->dev.of_node, "spi");
934 sctlr = spi_alloc_master(&pdev->dev, sizeof(*ss));
935 if (!sctlr)
936 return -ENOMEM;
938 ss = spi_controller_get_devdata(sctlr);
939 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
940 ss->base = devm_ioremap_resource(&pdev->dev, res);
941 if (IS_ERR(ss->base)) {
942 ret = PTR_ERR(ss->base);
943 goto free_controller;
946 ss->phy_base = res->start;
947 ss->dev = &pdev->dev;
948 sctlr->dev.of_node = pdev->dev.of_node;
949 sctlr->mode_bits = SPI_CPOL | SPI_CPHA | SPI_3WIRE | SPI_TX_DUAL;
950 sctlr->bus_num = pdev->id;
951 sctlr->set_cs = sprd_spi_chipselect;
952 sctlr->transfer_one = sprd_spi_transfer_one;
953 sctlr->can_dma = sprd_spi_can_dma;
954 sctlr->auto_runtime_pm = true;
955 sctlr->max_speed_hz = min_t(u32, ss->src_clk >> 1,
956 SPRD_SPI_MAX_SPEED_HZ);
958 init_completion(&ss->xfer_completion);
959 platform_set_drvdata(pdev, sctlr);
960 ret = sprd_spi_clk_init(pdev, ss);
961 if (ret)
962 goto free_controller;
964 ret = sprd_spi_irq_init(pdev, ss);
965 if (ret)
966 goto free_controller;
968 ret = sprd_spi_dma_init(pdev, ss);
969 if (ret)
970 goto free_controller;
972 ret = clk_prepare_enable(ss->clk);
973 if (ret)
974 goto release_dma;
976 ret = pm_runtime_set_active(&pdev->dev);
977 if (ret < 0)
978 goto disable_clk;
980 pm_runtime_set_autosuspend_delay(&pdev->dev,
981 SPRD_SPI_AUTOSUSPEND_DELAY);
982 pm_runtime_use_autosuspend(&pdev->dev);
983 pm_runtime_enable(&pdev->dev);
984 ret = pm_runtime_get_sync(&pdev->dev);
985 if (ret < 0) {
986 dev_err(&pdev->dev, "failed to resume SPI controller\n");
987 goto err_rpm_put;
990 ret = devm_spi_register_controller(&pdev->dev, sctlr);
991 if (ret)
992 goto err_rpm_put;
994 pm_runtime_mark_last_busy(&pdev->dev);
995 pm_runtime_put_autosuspend(&pdev->dev);
997 return 0;
999 err_rpm_put:
1000 pm_runtime_put_noidle(&pdev->dev);
1001 pm_runtime_disable(&pdev->dev);
1002 disable_clk:
1003 clk_disable_unprepare(ss->clk);
1004 release_dma:
1005 sprd_spi_dma_release(ss);
1006 free_controller:
1007 spi_controller_put(sctlr);
1009 return ret;
1012 static int sprd_spi_remove(struct platform_device *pdev)
1014 struct spi_controller *sctlr = platform_get_drvdata(pdev);
1015 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1016 int ret;
1018 ret = pm_runtime_get_sync(ss->dev);
1019 if (ret < 0) {
1020 dev_err(ss->dev, "failed to resume SPI controller\n");
1021 return ret;
1024 spi_controller_suspend(sctlr);
1026 if (ss->dma.enable)
1027 sprd_spi_dma_release(ss);
1028 clk_disable_unprepare(ss->clk);
1029 pm_runtime_put_noidle(&pdev->dev);
1030 pm_runtime_disable(&pdev->dev);
1032 return 0;
1035 static int __maybe_unused sprd_spi_runtime_suspend(struct device *dev)
1037 struct spi_controller *sctlr = dev_get_drvdata(dev);
1038 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1040 if (ss->dma.enable)
1041 sprd_spi_dma_release(ss);
1043 clk_disable_unprepare(ss->clk);
1045 return 0;
1048 static int __maybe_unused sprd_spi_runtime_resume(struct device *dev)
1050 struct spi_controller *sctlr = dev_get_drvdata(dev);
1051 struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1052 int ret;
1054 ret = clk_prepare_enable(ss->clk);
1055 if (ret)
1056 return ret;
1058 if (!ss->dma.enable)
1059 return 0;
1061 ret = sprd_spi_dma_request(ss);
1062 if (ret)
1063 clk_disable_unprepare(ss->clk);
1065 return ret;
1068 static const struct dev_pm_ops sprd_spi_pm_ops = {
1069 SET_RUNTIME_PM_OPS(sprd_spi_runtime_suspend,
1070 sprd_spi_runtime_resume, NULL)
1073 static const struct of_device_id sprd_spi_of_match[] = {
1074 { .compatible = "sprd,sc9860-spi", },
1075 { /* sentinel */ }
1078 static struct platform_driver sprd_spi_driver = {
1079 .driver = {
1080 .name = "sprd-spi",
1081 .of_match_table = sprd_spi_of_match,
1082 .pm = &sprd_spi_pm_ops,
1084 .probe = sprd_spi_probe,
1085 .remove = sprd_spi_remove,
1088 module_platform_driver(sprd_spi_driver);
1090 MODULE_DESCRIPTION("Spreadtrum SPI Controller driver");
1091 MODULE_AUTHOR("Lanqing Liu <lanqing.liu@spreadtrum.com>");
1092 MODULE_LICENSE("GPL v2");