2 * SPI bus driver for CSR SiRFprimaII
4 * Copyright (c) 2011 Cambridge Silicon Radio Limited, a CSR plc group company.
6 * Licensed under GPLv2 or later.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/slab.h>
12 #include <linux/clk.h>
13 #include <linux/interrupt.h>
16 #include <linux/bitops.h>
17 #include <linux/err.h>
18 #include <linux/platform_device.h>
19 #include <linux/of_gpio.h>
20 #include <linux/spi/spi.h>
21 #include <linux/spi/spi_bitbang.h>
22 #include <linux/dmaengine.h>
23 #include <linux/dma-direction.h>
24 #include <linux/dma-mapping.h>
25 #include <linux/sirfsoc_dma.h>
27 #define DRIVER_NAME "sirfsoc_spi"
29 #define SIRFSOC_SPI_CTRL 0x0000
30 #define SIRFSOC_SPI_CMD 0x0004
31 #define SIRFSOC_SPI_TX_RX_EN 0x0008
32 #define SIRFSOC_SPI_INT_EN 0x000C
33 #define SIRFSOC_SPI_INT_STATUS 0x0010
34 #define SIRFSOC_SPI_TX_DMA_IO_CTRL 0x0100
35 #define SIRFSOC_SPI_TX_DMA_IO_LEN 0x0104
36 #define SIRFSOC_SPI_TXFIFO_CTRL 0x0108
37 #define SIRFSOC_SPI_TXFIFO_LEVEL_CHK 0x010C
38 #define SIRFSOC_SPI_TXFIFO_OP 0x0110
39 #define SIRFSOC_SPI_TXFIFO_STATUS 0x0114
40 #define SIRFSOC_SPI_TXFIFO_DATA 0x0118
41 #define SIRFSOC_SPI_RX_DMA_IO_CTRL 0x0120
42 #define SIRFSOC_SPI_RX_DMA_IO_LEN 0x0124
43 #define SIRFSOC_SPI_RXFIFO_CTRL 0x0128
44 #define SIRFSOC_SPI_RXFIFO_LEVEL_CHK 0x012C
45 #define SIRFSOC_SPI_RXFIFO_OP 0x0130
46 #define SIRFSOC_SPI_RXFIFO_STATUS 0x0134
47 #define SIRFSOC_SPI_RXFIFO_DATA 0x0138
48 #define SIRFSOC_SPI_DUMMY_DELAY_CTL 0x0144
50 /* SPI CTRL register defines */
51 #define SIRFSOC_SPI_SLV_MODE BIT(16)
52 #define SIRFSOC_SPI_CMD_MODE BIT(17)
53 #define SIRFSOC_SPI_CS_IO_OUT BIT(18)
54 #define SIRFSOC_SPI_CS_IO_MODE BIT(19)
55 #define SIRFSOC_SPI_CLK_IDLE_STAT BIT(20)
56 #define SIRFSOC_SPI_CS_IDLE_STAT BIT(21)
57 #define SIRFSOC_SPI_TRAN_MSB BIT(22)
58 #define SIRFSOC_SPI_DRV_POS_EDGE BIT(23)
59 #define SIRFSOC_SPI_CS_HOLD_TIME BIT(24)
60 #define SIRFSOC_SPI_CLK_SAMPLE_MODE BIT(25)
61 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_8 (0 << 26)
62 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_12 (1 << 26)
63 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_16 (2 << 26)
64 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_32 (3 << 26)
65 #define SIRFSOC_SPI_CMD_BYTE_NUM(x) ((x & 3) << 28)
66 #define SIRFSOC_SPI_ENA_AUTO_CLR BIT(30)
67 #define SIRFSOC_SPI_MUL_DAT_MODE BIT(31)
69 /* Interrupt Enable */
70 #define SIRFSOC_SPI_RX_DONE_INT_EN BIT(0)
71 #define SIRFSOC_SPI_TX_DONE_INT_EN BIT(1)
72 #define SIRFSOC_SPI_RX_OFLOW_INT_EN BIT(2)
73 #define SIRFSOC_SPI_TX_UFLOW_INT_EN BIT(3)
74 #define SIRFSOC_SPI_RX_IO_DMA_INT_EN BIT(4)
75 #define SIRFSOC_SPI_TX_IO_DMA_INT_EN BIT(5)
76 #define SIRFSOC_SPI_RXFIFO_FULL_INT_EN BIT(6)
77 #define SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN BIT(7)
78 #define SIRFSOC_SPI_RXFIFO_THD_INT_EN BIT(8)
79 #define SIRFSOC_SPI_TXFIFO_THD_INT_EN BIT(9)
80 #define SIRFSOC_SPI_FRM_END_INT_EN BIT(10)
82 #define SIRFSOC_SPI_INT_MASK_ALL 0x1FFF
84 /* Interrupt status */
85 #define SIRFSOC_SPI_RX_DONE BIT(0)
86 #define SIRFSOC_SPI_TX_DONE BIT(1)
87 #define SIRFSOC_SPI_RX_OFLOW BIT(2)
88 #define SIRFSOC_SPI_TX_UFLOW BIT(3)
89 #define SIRFSOC_SPI_RX_FIFO_FULL BIT(6)
90 #define SIRFSOC_SPI_TXFIFO_EMPTY BIT(7)
91 #define SIRFSOC_SPI_RXFIFO_THD_REACH BIT(8)
92 #define SIRFSOC_SPI_TXFIFO_THD_REACH BIT(9)
93 #define SIRFSOC_SPI_FRM_END BIT(10)
96 #define SIRFSOC_SPI_RX_EN BIT(0)
97 #define SIRFSOC_SPI_TX_EN BIT(1)
98 #define SIRFSOC_SPI_CMD_TX_EN BIT(2)
100 #define SIRFSOC_SPI_IO_MODE_SEL BIT(0)
101 #define SIRFSOC_SPI_RX_DMA_FLUSH BIT(2)
104 #define SIRFSOC_SPI_FIFO_RESET BIT(0)
105 #define SIRFSOC_SPI_FIFO_START BIT(1)
108 #define SIRFSOC_SPI_FIFO_WIDTH_BYTE (0 << 0)
109 #define SIRFSOC_SPI_FIFO_WIDTH_WORD (1 << 0)
110 #define SIRFSOC_SPI_FIFO_WIDTH_DWORD (2 << 0)
113 #define SIRFSOC_SPI_FIFO_LEVEL_MASK 0xFF
114 #define SIRFSOC_SPI_FIFO_FULL BIT(8)
115 #define SIRFSOC_SPI_FIFO_EMPTY BIT(9)
117 /* 256 bytes rx/tx FIFO */
118 #define SIRFSOC_SPI_FIFO_SIZE 256
119 #define SIRFSOC_SPI_DAT_FRM_LEN_MAX (64 * 1024)
121 #define SIRFSOC_SPI_FIFO_SC(x) ((x) & 0x3F)
122 #define SIRFSOC_SPI_FIFO_LC(x) (((x) & 0x3F) << 10)
123 #define SIRFSOC_SPI_FIFO_HC(x) (((x) & 0x3F) << 20)
124 #define SIRFSOC_SPI_FIFO_THD(x) (((x) & 0xFF) << 2)
127 * only if the rx/tx buffer and transfer size are 4-bytes aligned, we use dma
128 * due to the limitation of dma controller
131 #define ALIGNED(x) (!((u32)x & 0x3))
132 #define IS_DMA_VALID(x) (x && ALIGNED(x->tx_buf) && ALIGNED(x->rx_buf) && \
133 ALIGNED(x->len) && (x->len < 2 * PAGE_SIZE))
136 struct spi_bitbang bitbang
;
137 struct completion rx_done
;
138 struct completion tx_done
;
141 u32 ctrl_freq
; /* SPI controller clock speed */
144 /* rx & tx bufs from the spi_transfer */
148 /* place received word into rx buffer */
149 void (*rx_word
) (struct sirfsoc_spi
*);
150 /* get word from tx buffer for sending */
151 void (*tx_word
) (struct sirfsoc_spi
*);
153 /* number of words left to be tranmitted/received */
154 unsigned int left_tx_word
;
155 unsigned int left_rx_word
;
157 /* rx & tx DMA channels */
158 struct dma_chan
*rx_chan
;
159 struct dma_chan
*tx_chan
;
160 dma_addr_t src_start
;
161 dma_addr_t dst_start
;
163 int word_width
; /* in bytes */
168 static void spi_sirfsoc_rx_word_u8(struct sirfsoc_spi
*sspi
)
173 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
180 sspi
->left_rx_word
--;
183 static void spi_sirfsoc_tx_word_u8(struct sirfsoc_spi
*sspi
)
186 const u8
*tx
= sspi
->tx
;
193 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
194 sspi
->left_tx_word
--;
197 static void spi_sirfsoc_rx_word_u16(struct sirfsoc_spi
*sspi
)
202 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
209 sspi
->left_rx_word
--;
212 static void spi_sirfsoc_tx_word_u16(struct sirfsoc_spi
*sspi
)
215 const u16
*tx
= sspi
->tx
;
222 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
223 sspi
->left_tx_word
--;
226 static void spi_sirfsoc_rx_word_u32(struct sirfsoc_spi
*sspi
)
231 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
238 sspi
->left_rx_word
--;
242 static void spi_sirfsoc_tx_word_u32(struct sirfsoc_spi
*sspi
)
245 const u32
*tx
= sspi
->tx
;
252 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
253 sspi
->left_tx_word
--;
256 static irqreturn_t
spi_sirfsoc_irq(int irq
, void *dev_id
)
258 struct sirfsoc_spi
*sspi
= dev_id
;
259 u32 spi_stat
= readl(sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
261 writel(spi_stat
, sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
263 /* Error Conditions */
264 if (spi_stat
& SIRFSOC_SPI_RX_OFLOW
||
265 spi_stat
& SIRFSOC_SPI_TX_UFLOW
) {
266 complete(&sspi
->rx_done
);
267 writel(0x0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
270 if (spi_stat
& (SIRFSOC_SPI_FRM_END
271 | SIRFSOC_SPI_RXFIFO_THD_REACH
))
272 while (!((readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_STATUS
)
273 & SIRFSOC_SPI_FIFO_EMPTY
)) &&
277 if (spi_stat
& (SIRFSOC_SPI_FIFO_EMPTY
278 | SIRFSOC_SPI_TXFIFO_THD_REACH
))
279 while (!((readl(sspi
->base
+ SIRFSOC_SPI_TXFIFO_STATUS
)
280 & SIRFSOC_SPI_FIFO_FULL
)) &&
284 /* Received all words */
285 if ((sspi
->left_rx_word
== 0) && (sspi
->left_tx_word
== 0)) {
286 complete(&sspi
->rx_done
);
287 writel(0x0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
292 static void spi_sirfsoc_dma_fini_callback(void *data
)
294 struct completion
*dma_complete
= data
;
296 complete(dma_complete
);
299 static int spi_sirfsoc_transfer(struct spi_device
*spi
, struct spi_transfer
*t
)
301 struct sirfsoc_spi
*sspi
;
302 int timeout
= t
->len
* 10;
303 sspi
= spi_master_get_devdata(spi
->master
);
305 sspi
->tx
= t
->tx_buf
? t
->tx_buf
: sspi
->dummypage
;
306 sspi
->rx
= t
->rx_buf
? t
->rx_buf
: sspi
->dummypage
;
307 sspi
->left_tx_word
= sspi
->left_rx_word
= t
->len
/ sspi
->word_width
;
308 INIT_COMPLETION(sspi
->rx_done
);
309 INIT_COMPLETION(sspi
->tx_done
);
311 writel(SIRFSOC_SPI_INT_MASK_ALL
, sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
313 if (sspi
->left_tx_word
== 1) {
314 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
) |
315 SIRFSOC_SPI_ENA_AUTO_CLR
,
316 sspi
->base
+ SIRFSOC_SPI_CTRL
);
317 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
318 writel(0, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
319 } else if ((sspi
->left_tx_word
> 1) && (sspi
->left_tx_word
<
320 SIRFSOC_SPI_DAT_FRM_LEN_MAX
)) {
321 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
) |
322 SIRFSOC_SPI_MUL_DAT_MODE
|
323 SIRFSOC_SPI_ENA_AUTO_CLR
,
324 sspi
->base
+ SIRFSOC_SPI_CTRL
);
325 writel(sspi
->left_tx_word
- 1,
326 sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
327 writel(sspi
->left_tx_word
- 1,
328 sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
330 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
),
331 sspi
->base
+ SIRFSOC_SPI_CTRL
);
332 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
333 writel(0, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
336 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
337 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
338 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
339 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
341 if (IS_DMA_VALID(t
)) {
342 struct dma_async_tx_descriptor
*rx_desc
, *tx_desc
;
344 sspi
->dst_start
= dma_map_single(&spi
->dev
, sspi
->rx
, t
->len
, DMA_FROM_DEVICE
);
345 rx_desc
= dmaengine_prep_slave_single(sspi
->rx_chan
,
346 sspi
->dst_start
, t
->len
, DMA_DEV_TO_MEM
,
347 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
348 rx_desc
->callback
= spi_sirfsoc_dma_fini_callback
;
349 rx_desc
->callback_param
= &sspi
->rx_done
;
351 sspi
->src_start
= dma_map_single(&spi
->dev
, (void *)sspi
->tx
, t
->len
, DMA_TO_DEVICE
);
352 tx_desc
= dmaengine_prep_slave_single(sspi
->tx_chan
,
353 sspi
->src_start
, t
->len
, DMA_MEM_TO_DEV
,
354 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
355 tx_desc
->callback
= spi_sirfsoc_dma_fini_callback
;
356 tx_desc
->callback_param
= &sspi
->tx_done
;
358 dmaengine_submit(tx_desc
);
359 dmaengine_submit(rx_desc
);
360 dma_async_issue_pending(sspi
->tx_chan
);
361 dma_async_issue_pending(sspi
->rx_chan
);
363 /* Send the first word to trigger the whole tx/rx process */
366 writel(SIRFSOC_SPI_RX_OFLOW_INT_EN
| SIRFSOC_SPI_TX_UFLOW_INT_EN
|
367 SIRFSOC_SPI_RXFIFO_THD_INT_EN
| SIRFSOC_SPI_TXFIFO_THD_INT_EN
|
368 SIRFSOC_SPI_FRM_END_INT_EN
| SIRFSOC_SPI_RXFIFO_FULL_INT_EN
|
369 SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN
, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
372 writel(SIRFSOC_SPI_RX_EN
| SIRFSOC_SPI_TX_EN
, sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
374 if (!IS_DMA_VALID(t
)) { /* for PIO */
375 if (wait_for_completion_timeout(&sspi
->rx_done
, timeout
) == 0)
376 dev_err(&spi
->dev
, "transfer timeout\n");
377 } else if (wait_for_completion_timeout(&sspi
->rx_done
, timeout
) == 0) {
378 dev_err(&spi
->dev
, "transfer timeout\n");
379 dmaengine_terminate_all(sspi
->rx_chan
);
381 sspi
->left_rx_word
= 0;
384 * we only wait tx-done event if transferring by DMA. for PIO,
385 * we get rx data by writing tx data, so if rx is done, tx has
388 if (IS_DMA_VALID(t
)) {
389 if (wait_for_completion_timeout(&sspi
->tx_done
, timeout
) == 0) {
390 dev_err(&spi
->dev
, "transfer timeout\n");
391 dmaengine_terminate_all(sspi
->tx_chan
);
395 if (IS_DMA_VALID(t
)) {
396 dma_unmap_single(&spi
->dev
, sspi
->src_start
, t
->len
, DMA_TO_DEVICE
);
397 dma_unmap_single(&spi
->dev
, sspi
->dst_start
, t
->len
, DMA_FROM_DEVICE
);
400 /* TX, RX FIFO stop */
401 writel(0, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
402 writel(0, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
403 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
404 writel(0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
406 return t
->len
- sspi
->left_rx_word
* sspi
->word_width
;
409 static void spi_sirfsoc_chipselect(struct spi_device
*spi
, int value
)
411 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(spi
->master
);
413 if (sspi
->chipselect
[spi
->chip_select
] == 0) {
414 u32 regval
= readl(sspi
->base
+ SIRFSOC_SPI_CTRL
);
416 case BITBANG_CS_ACTIVE
:
417 if (spi
->mode
& SPI_CS_HIGH
)
418 regval
|= SIRFSOC_SPI_CS_IO_OUT
;
420 regval
&= ~SIRFSOC_SPI_CS_IO_OUT
;
422 case BITBANG_CS_INACTIVE
:
423 if (spi
->mode
& SPI_CS_HIGH
)
424 regval
&= ~SIRFSOC_SPI_CS_IO_OUT
;
426 regval
|= SIRFSOC_SPI_CS_IO_OUT
;
429 writel(regval
, sspi
->base
+ SIRFSOC_SPI_CTRL
);
431 int gpio
= sspi
->chipselect
[spi
->chip_select
];
432 gpio_direction_output(gpio
, spi
->mode
& SPI_CS_HIGH
? 0 : 1);
437 spi_sirfsoc_setup_transfer(struct spi_device
*spi
, struct spi_transfer
*t
)
439 struct sirfsoc_spi
*sspi
;
440 u8 bits_per_word
= 0;
443 u32 txfifo_ctrl
, rxfifo_ctrl
;
444 u32 fifo_size
= SIRFSOC_SPI_FIFO_SIZE
/ 4;
446 sspi
= spi_master_get_devdata(spi
->master
);
448 bits_per_word
= (t
) ? t
->bits_per_word
: spi
->bits_per_word
;
449 hz
= t
&& t
->speed_hz
? t
->speed_hz
: spi
->max_speed_hz
;
451 regval
= (sspi
->ctrl_freq
/ (2 * hz
)) - 1;
452 if (regval
> 0xFFFF || regval
< 0) {
453 dev_err(&spi
->dev
, "Speed %d not supported\n", hz
);
457 switch (bits_per_word
) {
459 regval
|= SIRFSOC_SPI_TRAN_DAT_FORMAT_8
;
460 sspi
->rx_word
= spi_sirfsoc_rx_word_u8
;
461 sspi
->tx_word
= spi_sirfsoc_tx_word_u8
;
462 txfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
463 SIRFSOC_SPI_FIFO_WIDTH_BYTE
;
464 rxfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
465 SIRFSOC_SPI_FIFO_WIDTH_BYTE
;
466 sspi
->word_width
= 1;
470 regval
|= (bits_per_word
== 12) ? SIRFSOC_SPI_TRAN_DAT_FORMAT_12
:
471 SIRFSOC_SPI_TRAN_DAT_FORMAT_16
;
472 sspi
->rx_word
= spi_sirfsoc_rx_word_u16
;
473 sspi
->tx_word
= spi_sirfsoc_tx_word_u16
;
474 txfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
475 SIRFSOC_SPI_FIFO_WIDTH_WORD
;
476 rxfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
477 SIRFSOC_SPI_FIFO_WIDTH_WORD
;
478 sspi
->word_width
= 2;
481 regval
|= SIRFSOC_SPI_TRAN_DAT_FORMAT_32
;
482 sspi
->rx_word
= spi_sirfsoc_rx_word_u32
;
483 sspi
->tx_word
= spi_sirfsoc_tx_word_u32
;
484 txfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
485 SIRFSOC_SPI_FIFO_WIDTH_DWORD
;
486 rxfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
487 SIRFSOC_SPI_FIFO_WIDTH_DWORD
;
488 sspi
->word_width
= 4;
494 if (!(spi
->mode
& SPI_CS_HIGH
))
495 regval
|= SIRFSOC_SPI_CS_IDLE_STAT
;
496 if (!(spi
->mode
& SPI_LSB_FIRST
))
497 regval
|= SIRFSOC_SPI_TRAN_MSB
;
498 if (spi
->mode
& SPI_CPOL
)
499 regval
|= SIRFSOC_SPI_CLK_IDLE_STAT
;
502 * Data should be driven at least 1/2 cycle before the fetch edge to make
503 * sure that data gets stable at the fetch edge.
505 if (((spi
->mode
& SPI_CPOL
) && (spi
->mode
& SPI_CPHA
)) ||
506 (!(spi
->mode
& SPI_CPOL
) && !(spi
->mode
& SPI_CPHA
)))
507 regval
&= ~SIRFSOC_SPI_DRV_POS_EDGE
;
509 regval
|= SIRFSOC_SPI_DRV_POS_EDGE
;
511 writel(SIRFSOC_SPI_FIFO_SC(fifo_size
- 2) |
512 SIRFSOC_SPI_FIFO_LC(fifo_size
/ 2) |
513 SIRFSOC_SPI_FIFO_HC(2),
514 sspi
->base
+ SIRFSOC_SPI_TXFIFO_LEVEL_CHK
);
515 writel(SIRFSOC_SPI_FIFO_SC(2) |
516 SIRFSOC_SPI_FIFO_LC(fifo_size
/ 2) |
517 SIRFSOC_SPI_FIFO_HC(fifo_size
- 2),
518 sspi
->base
+ SIRFSOC_SPI_RXFIFO_LEVEL_CHK
);
519 writel(txfifo_ctrl
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_CTRL
);
520 writel(rxfifo_ctrl
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_CTRL
);
522 writel(regval
, sspi
->base
+ SIRFSOC_SPI_CTRL
);
524 if (IS_DMA_VALID(t
)) {
525 /* Enable DMA mode for RX, TX */
526 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_CTRL
);
527 writel(SIRFSOC_SPI_RX_DMA_FLUSH
, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_CTRL
);
529 /* Enable IO mode for RX, TX */
530 writel(SIRFSOC_SPI_IO_MODE_SEL
, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_CTRL
);
531 writel(SIRFSOC_SPI_IO_MODE_SEL
, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_CTRL
);
537 static int spi_sirfsoc_setup(struct spi_device
*spi
)
539 struct sirfsoc_spi
*sspi
;
541 if (!spi
->max_speed_hz
)
544 sspi
= spi_master_get_devdata(spi
->master
);
546 if (!spi
->bits_per_word
)
547 spi
->bits_per_word
= 8;
549 return spi_sirfsoc_setup_transfer(spi
, NULL
);
552 static int spi_sirfsoc_probe(struct platform_device
*pdev
)
554 struct sirfsoc_spi
*sspi
;
555 struct spi_master
*master
;
556 struct resource
*mem_res
;
557 int num_cs
, cs_gpio
, irq
;
558 u32 rx_dma_ch
, tx_dma_ch
;
559 dma_cap_mask_t dma_cap_mask
;
563 ret
= of_property_read_u32(pdev
->dev
.of_node
,
564 "sirf,spi-num-chipselects", &num_cs
);
566 dev_err(&pdev
->dev
, "Unable to get chip select number\n");
570 ret
= of_property_read_u32(pdev
->dev
.of_node
,
571 "sirf,spi-dma-rx-channel", &rx_dma_ch
);
573 dev_err(&pdev
->dev
, "Unable to get rx dma channel\n");
577 ret
= of_property_read_u32(pdev
->dev
.of_node
,
578 "sirf,spi-dma-tx-channel", &tx_dma_ch
);
580 dev_err(&pdev
->dev
, "Unable to get tx dma channel\n");
584 master
= spi_alloc_master(&pdev
->dev
, sizeof(*sspi
) + sizeof(int) * num_cs
);
586 dev_err(&pdev
->dev
, "Unable to allocate SPI master\n");
589 platform_set_drvdata(pdev
, master
);
590 sspi
= spi_master_get_devdata(master
);
592 master
->num_chipselect
= num_cs
;
594 for (i
= 0; i
< master
->num_chipselect
; i
++) {
595 cs_gpio
= of_get_named_gpio(pdev
->dev
.of_node
, "cs-gpios", i
);
597 dev_err(&pdev
->dev
, "can't get cs gpio from DT\n");
602 sspi
->chipselect
[i
] = cs_gpio
;
604 continue; /* use cs from spi controller */
606 ret
= gpio_request(cs_gpio
, DRIVER_NAME
);
610 if (sspi
->chipselect
[i
] > 0)
611 gpio_free(sspi
->chipselect
[i
]);
613 dev_err(&pdev
->dev
, "fail to request cs gpios\n");
618 mem_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
619 sspi
->base
= devm_ioremap_resource(&pdev
->dev
, mem_res
);
620 if (IS_ERR(sspi
->base
)) {
621 ret
= PTR_ERR(sspi
->base
);
625 irq
= platform_get_irq(pdev
, 0);
630 ret
= devm_request_irq(&pdev
->dev
, irq
, spi_sirfsoc_irq
, 0,
635 sspi
->bitbang
.master
= spi_master_get(master
);
636 sspi
->bitbang
.chipselect
= spi_sirfsoc_chipselect
;
637 sspi
->bitbang
.setup_transfer
= spi_sirfsoc_setup_transfer
;
638 sspi
->bitbang
.txrx_bufs
= spi_sirfsoc_transfer
;
639 sspi
->bitbang
.master
->setup
= spi_sirfsoc_setup
;
640 master
->bus_num
= pdev
->id
;
641 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_LSB_FIRST
| SPI_CS_HIGH
;
642 master
->bits_per_word_mask
= SPI_BPW_MASK(8) | SPI_BPW_MASK(12) |
643 SPI_BPW_MASK(16) | SPI_BPW_MASK(32);
644 sspi
->bitbang
.master
->dev
.of_node
= pdev
->dev
.of_node
;
646 /* request DMA channels */
647 dma_cap_zero(dma_cap_mask
);
648 dma_cap_set(DMA_INTERLEAVE
, dma_cap_mask
);
650 sspi
->rx_chan
= dma_request_channel(dma_cap_mask
, (dma_filter_fn
)sirfsoc_dma_filter_id
,
652 if (!sspi
->rx_chan
) {
653 dev_err(&pdev
->dev
, "can not allocate rx dma channel\n");
657 sspi
->tx_chan
= dma_request_channel(dma_cap_mask
, (dma_filter_fn
)sirfsoc_dma_filter_id
,
659 if (!sspi
->tx_chan
) {
660 dev_err(&pdev
->dev
, "can not allocate tx dma channel\n");
665 sspi
->clk
= clk_get(&pdev
->dev
, NULL
);
666 if (IS_ERR(sspi
->clk
)) {
667 ret
= PTR_ERR(sspi
->clk
);
670 clk_prepare_enable(sspi
->clk
);
671 sspi
->ctrl_freq
= clk_get_rate(sspi
->clk
);
673 init_completion(&sspi
->rx_done
);
674 init_completion(&sspi
->tx_done
);
676 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
677 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
678 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
679 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
680 /* We are not using dummy delay between command and data */
681 writel(0, sspi
->base
+ SIRFSOC_SPI_DUMMY_DELAY_CTL
);
683 sspi
->dummypage
= kmalloc(2 * PAGE_SIZE
, GFP_KERNEL
);
684 if (!sspi
->dummypage
) {
689 ret
= spi_bitbang_start(&sspi
->bitbang
);
693 dev_info(&pdev
->dev
, "registerred, bus number = %d\n", master
->bus_num
);
697 kfree(sspi
->dummypage
);
699 clk_disable_unprepare(sspi
->clk
);
702 dma_release_channel(sspi
->tx_chan
);
704 dma_release_channel(sspi
->rx_chan
);
706 spi_master_put(master
);
711 static int spi_sirfsoc_remove(struct platform_device
*pdev
)
713 struct spi_master
*master
;
714 struct sirfsoc_spi
*sspi
;
717 master
= platform_get_drvdata(pdev
);
718 sspi
= spi_master_get_devdata(master
);
720 spi_bitbang_stop(&sspi
->bitbang
);
721 for (i
= 0; i
< master
->num_chipselect
; i
++) {
722 if (sspi
->chipselect
[i
] > 0)
723 gpio_free(sspi
->chipselect
[i
]);
725 kfree(sspi
->dummypage
);
726 clk_disable_unprepare(sspi
->clk
);
728 dma_release_channel(sspi
->rx_chan
);
729 dma_release_channel(sspi
->tx_chan
);
730 spi_master_put(master
);
735 static int spi_sirfsoc_suspend(struct device
*dev
)
737 struct spi_master
*master
= dev_get_drvdata(dev
);
738 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(master
);
740 clk_disable(sspi
->clk
);
744 static int spi_sirfsoc_resume(struct device
*dev
)
746 struct spi_master
*master
= dev_get_drvdata(dev
);
747 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(master
);
749 clk_enable(sspi
->clk
);
750 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
751 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
752 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
753 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
758 static const struct dev_pm_ops spi_sirfsoc_pm_ops
= {
759 .suspend
= spi_sirfsoc_suspend
,
760 .resume
= spi_sirfsoc_resume
,
764 static const struct of_device_id spi_sirfsoc_of_match
[] = {
765 { .compatible
= "sirf,prima2-spi", },
766 { .compatible
= "sirf,marco-spi", },
769 MODULE_DEVICE_TABLE(of
, spi_sirfsoc_of_match
);
771 static struct platform_driver spi_sirfsoc_driver
= {
774 .owner
= THIS_MODULE
,
776 .pm
= &spi_sirfsoc_pm_ops
,
778 .of_match_table
= spi_sirfsoc_of_match
,
780 .probe
= spi_sirfsoc_probe
,
781 .remove
= spi_sirfsoc_remove
,
783 module_platform_driver(spi_sirfsoc_driver
);
785 MODULE_DESCRIPTION("SiRF SoC SPI master driver");
786 MODULE_AUTHOR("Zhiwu Song <Zhiwu.Song@csr.com>, "
787 "Barry Song <Baohua.Song@csr.com>");
788 MODULE_LICENSE("GPL v2");