1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * au1550 psc spi controller driver
4 * may work also with au1200, au1210, au1250
5 * will not work on au1000, au1100 and au1500 (no full spi controller there)
7 * Copyright (c) 2006 ATRON electronic GmbH
8 * Author: Jan Nikitenko <jan.nikitenko@gmail.com>
11 #include <linux/init.h>
12 #include <linux/interrupt.h>
13 #include <linux/slab.h>
14 #include <linux/errno.h>
15 #include <linux/module.h>
16 #include <linux/device.h>
17 #include <linux/platform_device.h>
18 #include <linux/resource.h>
19 #include <linux/spi/spi.h>
20 #include <linux/spi/spi_bitbang.h>
21 #include <linux/dma-mapping.h>
22 #include <linux/completion.h>
23 #include <asm/mach-au1x00/au1000.h>
24 #include <asm/mach-au1x00/au1xxx_psc.h>
25 #include <asm/mach-au1x00/au1xxx_dbdma.h>
27 #include <asm/mach-au1x00/au1550_spi.h>
29 static unsigned int usedma
= 1;
30 module_param(usedma
, uint
, 0644);
33 #define AU1550_SPI_DEBUG_LOOPBACK
37 #define AU1550_SPI_DBDMA_DESCRIPTORS 1
38 #define AU1550_SPI_DMA_RXTMP_MINSIZE 2048U
41 struct spi_bitbang bitbang
;
43 volatile psc_spi_t __iomem
*regs
;
47 unsigned int tx_count
;
48 unsigned int rx_count
;
52 void (*rx_word
)(struct au1550_spi
*hw
);
53 void (*tx_word
)(struct au1550_spi
*hw
);
54 int (*txrx_bufs
)(struct spi_device
*spi
, struct spi_transfer
*t
);
55 irqreturn_t (*irq_callback
)(struct au1550_spi
*hw
);
57 struct completion host_done
;
66 unsigned int dma_rx_tmpbuf_size
;
67 u32 dma_rx_tmpbuf_addr
;
69 struct spi_controller
*host
;
71 struct au1550_spi_info
*pdata
;
72 struct resource
*ioarea
;
76 /* we use an 8-bit memory device for dma transfers to/from spi fifo */
77 static dbdev_tab_t au1550_spi_mem_dbdev
= {
78 .dev_id
= DBDMA_MEM_CHAN
,
79 .dev_flags
= DEV_FLAGS_ANYUSE
|DEV_FLAGS_SYNC
,
82 .dev_physaddr
= 0x00000000,
87 static int ddma_memid
; /* id to above mem dma device */
89 static void au1550_spi_bits_handlers_set(struct au1550_spi
*hw
, int bpw
);
93 * compute BRG and DIV bits to setup spi clock based on main input clock rate
94 * that was specified in platform data structure
95 * according to au1550 datasheet:
96 * psc_tempclk = psc_mainclk / (2 << DIV)
97 * spiclk = psc_tempclk / (2 * (BRG + 1))
98 * BRG valid range is 4..63
99 * DIV valid range is 0..3
101 static u32
au1550_spi_baudcfg(struct au1550_spi
*hw
, unsigned int speed_hz
)
103 u32 mainclk_hz
= hw
->pdata
->mainclk_hz
;
106 for (div
= 0; div
< 4; div
++) {
107 brg
= mainclk_hz
/ speed_hz
/ (4 << div
);
108 /* now we have BRG+1 in brg, so count with that */
110 brg
= (4 + 1); /* speed_hz too big */
111 break; /* set lowest brg (div is == 0) */
114 break; /* we have valid brg and div */
117 div
= 3; /* speed_hz too small */
118 brg
= (63 + 1); /* set highest brg and div */
121 return PSC_SPICFG_SET_BAUD(brg
) | PSC_SPICFG_SET_DIV(div
);
124 static inline void au1550_spi_mask_ack_all(struct au1550_spi
*hw
)
126 hw
->regs
->psc_spimsk
=
127 PSC_SPIMSK_MM
| PSC_SPIMSK_RR
| PSC_SPIMSK_RO
128 | PSC_SPIMSK_RU
| PSC_SPIMSK_TR
| PSC_SPIMSK_TO
129 | PSC_SPIMSK_TU
| PSC_SPIMSK_SD
| PSC_SPIMSK_MD
;
130 wmb(); /* drain writebuffer */
132 hw
->regs
->psc_spievent
=
133 PSC_SPIEVNT_MM
| PSC_SPIEVNT_RR
| PSC_SPIEVNT_RO
134 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TR
| PSC_SPIEVNT_TO
135 | PSC_SPIEVNT_TU
| PSC_SPIEVNT_SD
| PSC_SPIEVNT_MD
;
136 wmb(); /* drain writebuffer */
139 static void au1550_spi_reset_fifos(struct au1550_spi
*hw
)
143 hw
->regs
->psc_spipcr
= PSC_SPIPCR_RC
| PSC_SPIPCR_TC
;
144 wmb(); /* drain writebuffer */
146 pcr
= hw
->regs
->psc_spipcr
;
147 wmb(); /* drain writebuffer */
152 * dma transfers are used for the most common spi word size of 8-bits
153 * we cannot easily change already set up dma channels' width, so if we wanted
154 * dma support for more than 8-bit words (up to 24 bits), we would need to
155 * setup dma channels from scratch on each spi transfer, based on bits_per_word
156 * instead we have pre set up 8 bit dma channels supporting spi 4 to 8 bits
157 * transfers, and 9 to 24 bits spi transfers will be done in pio irq based mode
158 * callbacks to handle dma or pio are set up in au1550_spi_bits_handlers_set()
160 static void au1550_spi_chipsel(struct spi_device
*spi
, int value
)
162 struct au1550_spi
*hw
= spi_controller_get_devdata(spi
->controller
);
163 unsigned int cspol
= spi
->mode
& SPI_CS_HIGH
? 1 : 0;
167 case BITBANG_CS_INACTIVE
:
168 if (hw
->pdata
->deactivate_cs
)
169 hw
->pdata
->deactivate_cs(hw
->pdata
, spi_get_chipselect(spi
, 0),
173 case BITBANG_CS_ACTIVE
:
174 au1550_spi_bits_handlers_set(hw
, spi
->bits_per_word
);
176 cfg
= hw
->regs
->psc_spicfg
;
177 wmb(); /* drain writebuffer */
178 hw
->regs
->psc_spicfg
= cfg
& ~PSC_SPICFG_DE_ENABLE
;
179 wmb(); /* drain writebuffer */
181 if (spi
->mode
& SPI_CPOL
)
182 cfg
|= PSC_SPICFG_BI
;
184 cfg
&= ~PSC_SPICFG_BI
;
185 if (spi
->mode
& SPI_CPHA
)
186 cfg
&= ~PSC_SPICFG_CDE
;
188 cfg
|= PSC_SPICFG_CDE
;
190 if (spi
->mode
& SPI_LSB_FIRST
)
191 cfg
|= PSC_SPICFG_MLF
;
193 cfg
&= ~PSC_SPICFG_MLF
;
195 if (hw
->usedma
&& spi
->bits_per_word
<= 8)
196 cfg
&= ~PSC_SPICFG_DD_DISABLE
;
198 cfg
|= PSC_SPICFG_DD_DISABLE
;
199 cfg
= PSC_SPICFG_CLR_LEN(cfg
);
200 cfg
|= PSC_SPICFG_SET_LEN(spi
->bits_per_word
);
202 cfg
= PSC_SPICFG_CLR_BAUD(cfg
);
203 cfg
&= ~PSC_SPICFG_SET_DIV(3);
204 cfg
|= au1550_spi_baudcfg(hw
, spi
->max_speed_hz
);
206 hw
->regs
->psc_spicfg
= cfg
| PSC_SPICFG_DE_ENABLE
;
207 wmb(); /* drain writebuffer */
209 stat
= hw
->regs
->psc_spistat
;
210 wmb(); /* drain writebuffer */
211 } while ((stat
& PSC_SPISTAT_DR
) == 0);
213 if (hw
->pdata
->activate_cs
)
214 hw
->pdata
->activate_cs(hw
->pdata
, spi_get_chipselect(spi
, 0),
220 static int au1550_spi_setupxfer(struct spi_device
*spi
, struct spi_transfer
*t
)
222 struct au1550_spi
*hw
= spi_controller_get_devdata(spi
->controller
);
223 unsigned int bpw
, hz
;
227 bpw
= t
->bits_per_word
;
230 bpw
= spi
->bits_per_word
;
231 hz
= spi
->max_speed_hz
;
237 au1550_spi_bits_handlers_set(hw
, spi
->bits_per_word
);
239 cfg
= hw
->regs
->psc_spicfg
;
240 wmb(); /* drain writebuffer */
241 hw
->regs
->psc_spicfg
= cfg
& ~PSC_SPICFG_DE_ENABLE
;
242 wmb(); /* drain writebuffer */
244 if (hw
->usedma
&& bpw
<= 8)
245 cfg
&= ~PSC_SPICFG_DD_DISABLE
;
247 cfg
|= PSC_SPICFG_DD_DISABLE
;
248 cfg
= PSC_SPICFG_CLR_LEN(cfg
);
249 cfg
|= PSC_SPICFG_SET_LEN(bpw
);
251 cfg
= PSC_SPICFG_CLR_BAUD(cfg
);
252 cfg
&= ~PSC_SPICFG_SET_DIV(3);
253 cfg
|= au1550_spi_baudcfg(hw
, hz
);
255 hw
->regs
->psc_spicfg
= cfg
;
256 wmb(); /* drain writebuffer */
258 if (cfg
& PSC_SPICFG_DE_ENABLE
) {
260 stat
= hw
->regs
->psc_spistat
;
261 wmb(); /* drain writebuffer */
262 } while ((stat
& PSC_SPISTAT_DR
) == 0);
265 au1550_spi_reset_fifos(hw
);
266 au1550_spi_mask_ack_all(hw
);
271 * for dma spi transfers, we have to setup rx channel, otherwise there is
272 * no reliable way how to recognize that spi transfer is done
273 * dma complete callbacks are called before real spi transfer is finished
274 * and if only tx dma channel is set up (and rx fifo overflow event masked)
275 * spi host done event irq is not generated unless rx fifo is empty (emptied)
276 * so we need rx tmp buffer to use for rx dma if user does not provide one
278 static int au1550_spi_dma_rxtmp_alloc(struct au1550_spi
*hw
, unsigned int size
)
280 hw
->dma_rx_tmpbuf
= kmalloc(size
, GFP_KERNEL
);
281 if (!hw
->dma_rx_tmpbuf
)
283 hw
->dma_rx_tmpbuf_size
= size
;
284 hw
->dma_rx_tmpbuf_addr
= dma_map_single(hw
->dev
, hw
->dma_rx_tmpbuf
,
285 size
, DMA_FROM_DEVICE
);
286 if (dma_mapping_error(hw
->dev
, hw
->dma_rx_tmpbuf_addr
)) {
287 kfree(hw
->dma_rx_tmpbuf
);
288 hw
->dma_rx_tmpbuf
= 0;
289 hw
->dma_rx_tmpbuf_size
= 0;
295 static void au1550_spi_dma_rxtmp_free(struct au1550_spi
*hw
)
297 dma_unmap_single(hw
->dev
, hw
->dma_rx_tmpbuf_addr
,
298 hw
->dma_rx_tmpbuf_size
, DMA_FROM_DEVICE
);
299 kfree(hw
->dma_rx_tmpbuf
);
300 hw
->dma_rx_tmpbuf
= 0;
301 hw
->dma_rx_tmpbuf_size
= 0;
304 static int au1550_spi_dma_txrxb(struct spi_device
*spi
, struct spi_transfer
*t
)
306 struct au1550_spi
*hw
= spi_controller_get_devdata(spi
->controller
);
307 dma_addr_t dma_tx_addr
;
308 dma_addr_t dma_rx_addr
;
319 * - first map the TX buffer, so cache data gets written to memory
320 * - then map the RX buffer, so that cache entries (with
321 * soon-to-be-stale data) get removed
322 * use rx buffer in place of tx if tx buffer was not provided
323 * use temp rx buffer (preallocated or realloc to fit) for rx dma
326 dma_tx_addr
= dma_map_single(hw
->dev
, (void *)t
->tx_buf
,
327 t
->len
, DMA_TO_DEVICE
);
328 if (dma_mapping_error(hw
->dev
, dma_tx_addr
))
329 dev_err(hw
->dev
, "tx dma map error\n");
333 dma_rx_addr
= dma_map_single(hw
->dev
, (void *)t
->rx_buf
,
334 t
->len
, DMA_FROM_DEVICE
);
335 if (dma_mapping_error(hw
->dev
, dma_rx_addr
))
336 dev_err(hw
->dev
, "rx dma map error\n");
338 if (t
->len
> hw
->dma_rx_tmpbuf_size
) {
341 au1550_spi_dma_rxtmp_free(hw
);
342 ret
= au1550_spi_dma_rxtmp_alloc(hw
, max(t
->len
,
343 AU1550_SPI_DMA_RXTMP_MINSIZE
));
347 hw
->rx
= hw
->dma_rx_tmpbuf
;
348 dma_rx_addr
= hw
->dma_rx_tmpbuf_addr
;
349 dma_sync_single_for_device(hw
->dev
, dma_rx_addr
,
350 t
->len
, DMA_FROM_DEVICE
);
354 dma_sync_single_for_device(hw
->dev
, dma_rx_addr
,
355 t
->len
, DMA_BIDIRECTIONAL
);
359 /* put buffers on the ring */
360 res
= au1xxx_dbdma_put_dest(hw
->dma_rx_ch
, virt_to_phys(hw
->rx
),
361 t
->len
, DDMA_FLAGS_IE
);
363 dev_err(hw
->dev
, "rx dma put dest error\n");
365 res
= au1xxx_dbdma_put_source(hw
->dma_tx_ch
, virt_to_phys(hw
->tx
),
366 t
->len
, DDMA_FLAGS_IE
);
368 dev_err(hw
->dev
, "tx dma put source error\n");
370 au1xxx_dbdma_start(hw
->dma_rx_ch
);
371 au1xxx_dbdma_start(hw
->dma_tx_ch
);
373 /* by default enable nearly all events interrupt */
374 hw
->regs
->psc_spimsk
= PSC_SPIMSK_SD
;
375 wmb(); /* drain writebuffer */
377 /* start the transfer */
378 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
379 wmb(); /* drain writebuffer */
381 wait_for_completion(&hw
->host_done
);
383 au1xxx_dbdma_stop(hw
->dma_tx_ch
);
384 au1xxx_dbdma_stop(hw
->dma_rx_ch
);
387 /* using the temporal preallocated and premapped buffer */
388 dma_sync_single_for_cpu(hw
->dev
, dma_rx_addr
, t
->len
,
391 /* unmap buffers if mapped above */
393 dma_unmap_single(hw
->dev
, dma_rx_addr
, t
->len
,
396 dma_unmap_single(hw
->dev
, dma_tx_addr
, t
->len
,
399 return min(hw
->rx_count
, hw
->tx_count
);
402 static irqreturn_t
au1550_spi_dma_irq_callback(struct au1550_spi
*hw
)
406 stat
= hw
->regs
->psc_spistat
;
407 evnt
= hw
->regs
->psc_spievent
;
408 wmb(); /* drain writebuffer */
409 if ((stat
& PSC_SPISTAT_DI
) == 0) {
410 dev_err(hw
->dev
, "Unexpected IRQ!\n");
414 if ((evnt
& (PSC_SPIEVNT_MM
| PSC_SPIEVNT_RO
415 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TO
416 | PSC_SPIEVNT_TU
| PSC_SPIEVNT_SD
))
419 * due to an spi error we consider transfer as done,
420 * so mask all events until before next transfer start
421 * and stop the possibly running dma immediately
423 au1550_spi_mask_ack_all(hw
);
424 au1xxx_dbdma_stop(hw
->dma_rx_ch
);
425 au1xxx_dbdma_stop(hw
->dma_tx_ch
);
427 /* get number of transferred bytes */
428 hw
->rx_count
= hw
->len
- au1xxx_get_dma_residue(hw
->dma_rx_ch
);
429 hw
->tx_count
= hw
->len
- au1xxx_get_dma_residue(hw
->dma_tx_ch
);
431 au1xxx_dbdma_reset(hw
->dma_rx_ch
);
432 au1xxx_dbdma_reset(hw
->dma_tx_ch
);
433 au1550_spi_reset_fifos(hw
);
435 if (evnt
== PSC_SPIEVNT_RO
)
437 "dma transfer: receive FIFO overflow!\n");
440 "dma transfer: unexpected SPI error (event=0x%x stat=0x%x)!\n",
443 complete(&hw
->host_done
);
447 if ((evnt
& PSC_SPIEVNT_MD
) != 0) {
448 /* transfer completed successfully */
449 au1550_spi_mask_ack_all(hw
);
450 hw
->rx_count
= hw
->len
;
451 hw
->tx_count
= hw
->len
;
452 complete(&hw
->host_done
);
458 /* routines to handle different word sizes in pio mode */
459 #define AU1550_SPI_RX_WORD(size, mask) \
460 static void au1550_spi_rx_word_##size(struct au1550_spi *hw) \
462 u32 fifoword = hw->regs->psc_spitxrx & (u32)(mask); \
463 wmb(); /* drain writebuffer */ \
465 *(u##size *)hw->rx = (u##size)fifoword; \
466 hw->rx += (size) / 8; \
468 hw->rx_count += (size) / 8; \
471 #define AU1550_SPI_TX_WORD(size, mask) \
472 static void au1550_spi_tx_word_##size(struct au1550_spi *hw) \
476 fifoword = *(u##size *)hw->tx & (u32)(mask); \
477 hw->tx += (size) / 8; \
479 hw->tx_count += (size) / 8; \
480 if (hw->tx_count >= hw->len) \
481 fifoword |= PSC_SPITXRX_LC; \
482 hw->regs->psc_spitxrx = fifoword; \
483 wmb(); /* drain writebuffer */ \
486 AU1550_SPI_RX_WORD(8, 0xff)
487 AU1550_SPI_RX_WORD(16, 0xffff)
488 AU1550_SPI_RX_WORD(32, 0xffffff)
489 AU1550_SPI_TX_WORD(8, 0xff)
490 AU1550_SPI_TX_WORD(16, 0xffff)
491 AU1550_SPI_TX_WORD(32, 0xffffff)
493 static int au1550_spi_pio_txrxb(struct spi_device
*spi
, struct spi_transfer
*t
)
496 struct au1550_spi
*hw
= spi_controller_get_devdata(spi
->controller
);
504 /* by default enable nearly all events after filling tx fifo */
505 mask
= PSC_SPIMSK_SD
;
507 /* fill the transmit FIFO */
508 while (hw
->tx_count
< hw
->len
) {
512 if (hw
->tx_count
>= hw
->len
) {
513 /* mask tx fifo request interrupt as we are done */
514 mask
|= PSC_SPIMSK_TR
;
517 stat
= hw
->regs
->psc_spistat
;
518 wmb(); /* drain writebuffer */
519 if (stat
& PSC_SPISTAT_TF
)
523 /* enable event interrupts */
524 hw
->regs
->psc_spimsk
= mask
;
525 wmb(); /* drain writebuffer */
527 /* start the transfer */
528 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
529 wmb(); /* drain writebuffer */
531 wait_for_completion(&hw
->host_done
);
533 return min(hw
->rx_count
, hw
->tx_count
);
536 static irqreturn_t
au1550_spi_pio_irq_callback(struct au1550_spi
*hw
)
541 stat
= hw
->regs
->psc_spistat
;
542 evnt
= hw
->regs
->psc_spievent
;
543 wmb(); /* drain writebuffer */
544 if ((stat
& PSC_SPISTAT_DI
) == 0) {
545 dev_err(hw
->dev
, "Unexpected IRQ!\n");
549 if ((evnt
& (PSC_SPIEVNT_MM
| PSC_SPIEVNT_RO
550 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TO
554 * due to an error we consider transfer as done,
555 * so mask all events until before next transfer start
557 au1550_spi_mask_ack_all(hw
);
558 au1550_spi_reset_fifos(hw
);
560 "pio transfer: unexpected SPI error (event=0x%x stat=0x%x)!\n",
562 complete(&hw
->host_done
);
567 * while there is something to read from rx fifo
568 * or there is a space to write to tx fifo:
572 stat
= hw
->regs
->psc_spistat
;
573 wmb(); /* drain writebuffer */
576 * Take care to not let the Rx FIFO overflow.
578 * We only write a byte if we have read one at least. Initially,
579 * the write fifo is full, so we should read from the read fifo
581 * In case we miss a word from the read fifo, we should get a
582 * RO event and should back out.
584 if (!(stat
& PSC_SPISTAT_RE
) && hw
->rx_count
< hw
->len
) {
588 if (!(stat
& PSC_SPISTAT_TF
) && hw
->tx_count
< hw
->len
)
593 hw
->regs
->psc_spievent
= PSC_SPIEVNT_RR
| PSC_SPIEVNT_TR
;
594 wmb(); /* drain writebuffer */
597 * Restart the SPI transmission in case of a transmit underflow.
598 * This seems to work despite the notes in the Au1550 data book
599 * of Figure 8-4 with flowchart for SPI host operation:
601 * """Note 1: An XFR Error Interrupt occurs, unless masked,
602 * for any of the following events: Tx FIFO Underflow,
603 * Rx FIFO Overflow, or Multiple-host Error
604 * Note 2: In case of a Tx Underflow Error, all zeroes are
607 * By simply restarting the spi transfer on Tx Underflow Error,
608 * we assume that spi transfer was paused instead of zeroes
609 * transmittion mentioned in the Note 2 of Au1550 data book.
611 if (evnt
& PSC_SPIEVNT_TU
) {
612 hw
->regs
->psc_spievent
= PSC_SPIEVNT_TU
| PSC_SPIEVNT_MD
;
613 wmb(); /* drain writebuffer */
614 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
615 wmb(); /* drain writebuffer */
618 if (hw
->rx_count
>= hw
->len
) {
619 /* transfer completed successfully */
620 au1550_spi_mask_ack_all(hw
);
621 complete(&hw
->host_done
);
626 static int au1550_spi_txrx_bufs(struct spi_device
*spi
, struct spi_transfer
*t
)
628 struct au1550_spi
*hw
= spi_controller_get_devdata(spi
->controller
);
630 return hw
->txrx_bufs(spi
, t
);
633 static irqreturn_t
au1550_spi_irq(int irq
, void *dev
)
635 struct au1550_spi
*hw
= dev
;
637 return hw
->irq_callback(hw
);
640 static void au1550_spi_bits_handlers_set(struct au1550_spi
*hw
, int bpw
)
644 hw
->txrx_bufs
= &au1550_spi_dma_txrxb
;
645 hw
->irq_callback
= &au1550_spi_dma_irq_callback
;
647 hw
->rx_word
= &au1550_spi_rx_word_8
;
648 hw
->tx_word
= &au1550_spi_tx_word_8
;
649 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
650 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
652 } else if (bpw
<= 16) {
653 hw
->rx_word
= &au1550_spi_rx_word_16
;
654 hw
->tx_word
= &au1550_spi_tx_word_16
;
655 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
656 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
658 hw
->rx_word
= &au1550_spi_rx_word_32
;
659 hw
->tx_word
= &au1550_spi_tx_word_32
;
660 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
661 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
665 static void au1550_spi_setup_psc_as_spi(struct au1550_spi
*hw
)
669 /* set up the PSC for SPI mode */
670 hw
->regs
->psc_ctrl
= PSC_CTRL_DISABLE
;
671 wmb(); /* drain writebuffer */
672 hw
->regs
->psc_sel
= PSC_SEL_PS_SPIMODE
;
673 wmb(); /* drain writebuffer */
675 hw
->regs
->psc_spicfg
= 0;
676 wmb(); /* drain writebuffer */
678 hw
->regs
->psc_ctrl
= PSC_CTRL_ENABLE
;
679 wmb(); /* drain writebuffer */
682 stat
= hw
->regs
->psc_spistat
;
683 wmb(); /* drain writebuffer */
684 } while ((stat
& PSC_SPISTAT_SR
) == 0);
687 cfg
= hw
->usedma
? 0 : PSC_SPICFG_DD_DISABLE
;
688 cfg
|= PSC_SPICFG_SET_LEN(8);
689 cfg
|= PSC_SPICFG_RT_FIFO8
| PSC_SPICFG_TT_FIFO8
;
690 /* use minimal allowed brg and div values as initial setting: */
691 cfg
|= PSC_SPICFG_SET_BAUD(4) | PSC_SPICFG_SET_DIV(0);
693 #ifdef AU1550_SPI_DEBUG_LOOPBACK
694 cfg
|= PSC_SPICFG_LB
;
697 hw
->regs
->psc_spicfg
= cfg
;
698 wmb(); /* drain writebuffer */
700 au1550_spi_mask_ack_all(hw
);
702 hw
->regs
->psc_spicfg
|= PSC_SPICFG_DE_ENABLE
;
703 wmb(); /* drain writebuffer */
706 stat
= hw
->regs
->psc_spistat
;
707 wmb(); /* drain writebuffer */
708 } while ((stat
& PSC_SPISTAT_DR
) == 0);
710 au1550_spi_reset_fifos(hw
);
714 static int au1550_spi_probe(struct platform_device
*pdev
)
716 struct au1550_spi
*hw
;
717 struct spi_controller
*host
;
721 host
= spi_alloc_host(&pdev
->dev
, sizeof(struct au1550_spi
));
723 dev_err(&pdev
->dev
, "No memory for spi_controller\n");
728 /* the spi->mode bits understood by this driver: */
729 host
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_CS_HIGH
| SPI_LSB_FIRST
;
730 host
->bits_per_word_mask
= SPI_BPW_RANGE_MASK(4, 24);
732 hw
= spi_controller_get_devdata(host
);
735 hw
->pdata
= dev_get_platdata(&pdev
->dev
);
736 hw
->dev
= &pdev
->dev
;
738 if (hw
->pdata
== NULL
) {
739 dev_err(&pdev
->dev
, "No platform data supplied\n");
744 r
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
746 dev_err(&pdev
->dev
, "no IRQ\n");
753 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
755 hw
->dma_tx_id
= r
->start
;
756 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
758 hw
->dma_rx_id
= r
->start
;
759 if (usedma
&& ddma_memid
) {
760 if (pdev
->dev
.dma_mask
== NULL
)
761 dev_warn(&pdev
->dev
, "no dma mask\n");
768 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
770 dev_err(&pdev
->dev
, "no mmio resource\n");
775 hw
->ioarea
= request_mem_region(r
->start
, sizeof(psc_spi_t
),
778 dev_err(&pdev
->dev
, "Cannot reserve iomem region\n");
783 hw
->regs
= (psc_spi_t __iomem
*)ioremap(r
->start
, sizeof(psc_spi_t
));
785 dev_err(&pdev
->dev
, "cannot ioremap\n");
790 platform_set_drvdata(pdev
, hw
);
792 init_completion(&hw
->host_done
);
794 hw
->bitbang
.ctlr
= hw
->host
;
795 hw
->bitbang
.setup_transfer
= au1550_spi_setupxfer
;
796 hw
->bitbang
.chipselect
= au1550_spi_chipsel
;
797 hw
->bitbang
.txrx_bufs
= au1550_spi_txrx_bufs
;
800 hw
->dma_tx_ch
= au1xxx_dbdma_chan_alloc(ddma_memid
,
801 hw
->dma_tx_id
, NULL
, (void *)hw
);
802 if (hw
->dma_tx_ch
== 0) {
804 "Cannot allocate tx dma channel\n");
808 au1xxx_dbdma_set_devwidth(hw
->dma_tx_ch
, 8);
809 if (au1xxx_dbdma_ring_alloc(hw
->dma_tx_ch
,
810 AU1550_SPI_DBDMA_DESCRIPTORS
) == 0) {
812 "Cannot allocate tx dma descriptors\n");
814 goto err_no_txdma_descr
;
818 hw
->dma_rx_ch
= au1xxx_dbdma_chan_alloc(hw
->dma_rx_id
,
819 ddma_memid
, NULL
, (void *)hw
);
820 if (hw
->dma_rx_ch
== 0) {
822 "Cannot allocate rx dma channel\n");
826 au1xxx_dbdma_set_devwidth(hw
->dma_rx_ch
, 8);
827 if (au1xxx_dbdma_ring_alloc(hw
->dma_rx_ch
,
828 AU1550_SPI_DBDMA_DESCRIPTORS
) == 0) {
830 "Cannot allocate rx dma descriptors\n");
832 goto err_no_rxdma_descr
;
835 err
= au1550_spi_dma_rxtmp_alloc(hw
,
836 AU1550_SPI_DMA_RXTMP_MINSIZE
);
839 "Cannot allocate initial rx dma tmp buffer\n");
840 goto err_dma_rxtmp_alloc
;
844 au1550_spi_bits_handlers_set(hw
, 8);
846 err
= request_irq(hw
->irq
, au1550_spi_irq
, 0, pdev
->name
, hw
);
848 dev_err(&pdev
->dev
, "Cannot claim IRQ\n");
852 host
->bus_num
= pdev
->id
;
853 host
->num_chipselect
= hw
->pdata
->num_chipselect
;
856 * precompute valid range for spi freq - from au1550 datasheet:
857 * psc_tempclk = psc_mainclk / (2 << DIV)
858 * spiclk = psc_tempclk / (2 * (BRG + 1))
859 * BRG valid range is 4..63
860 * DIV valid range is 0..3
861 * round the min and max frequencies to values that would still
862 * produce valid brg and div
865 int min_div
= (2 << 0) * (2 * (4 + 1));
866 int max_div
= (2 << 3) * (2 * (63 + 1));
868 host
->max_speed_hz
= hw
->pdata
->mainclk_hz
/ min_div
;
870 hw
->pdata
->mainclk_hz
/ (max_div
+ 1) + 1;
873 au1550_spi_setup_psc_as_spi(hw
);
875 err
= spi_bitbang_start(&hw
->bitbang
);
877 dev_err(&pdev
->dev
, "Failed to register SPI host\n");
882 "spi host registered: bus_num=%d num_chipselect=%d\n",
883 host
->bus_num
, host
->num_chipselect
);
888 free_irq(hw
->irq
, hw
);
891 au1550_spi_dma_rxtmp_free(hw
);
896 au1xxx_dbdma_chan_free(hw
->dma_rx_ch
);
901 au1xxx_dbdma_chan_free(hw
->dma_tx_ch
);
904 iounmap((void __iomem
*)hw
->regs
);
907 release_mem_region(r
->start
, sizeof(psc_spi_t
));
911 spi_controller_put(hw
->host
);
917 static void au1550_spi_remove(struct platform_device
*pdev
)
919 struct au1550_spi
*hw
= platform_get_drvdata(pdev
);
921 dev_info(&pdev
->dev
, "spi host remove: bus_num=%d\n",
924 spi_bitbang_stop(&hw
->bitbang
);
925 free_irq(hw
->irq
, hw
);
926 iounmap((void __iomem
*)hw
->regs
);
927 release_mem_region(hw
->ioarea
->start
, sizeof(psc_spi_t
));
930 au1550_spi_dma_rxtmp_free(hw
);
931 au1xxx_dbdma_chan_free(hw
->dma_rx_ch
);
932 au1xxx_dbdma_chan_free(hw
->dma_tx_ch
);
935 spi_controller_put(hw
->host
);
938 /* work with hotplug and coldplug */
939 MODULE_ALIAS("platform:au1550-spi");
941 static struct platform_driver au1550_spi_drv
= {
942 .probe
= au1550_spi_probe
,
943 .remove
= au1550_spi_remove
,
945 .name
= "au1550-spi",
949 static int __init
au1550_spi_init(void)
952 * create memory device with 8 bits dev_devwidth
953 * needed for proper byte ordering to spi fifo
955 switch (alchemy_get_cputype()) {
956 case ALCHEMY_CPU_AU1550
:
957 case ALCHEMY_CPU_AU1200
:
958 case ALCHEMY_CPU_AU1300
:
965 ddma_memid
= au1xxx_ddma_add_device(&au1550_spi_mem_dbdev
);
967 printk(KERN_ERR
"au1550-spi: cannot add memory dbdma device\n");
969 return platform_driver_register(&au1550_spi_drv
);
971 module_init(au1550_spi_init
);
973 static void __exit
au1550_spi_exit(void)
975 if (usedma
&& ddma_memid
)
976 au1xxx_ddma_del_device(ddma_memid
);
977 platform_driver_unregister(&au1550_spi_drv
);
979 module_exit(au1550_spi_exit
);
981 MODULE_DESCRIPTION("Au1550 PSC SPI Driver");
982 MODULE_AUTHOR("Jan Nikitenko <jan.nikitenko@gmail.com>");
983 MODULE_LICENSE("GPL");