2 * au1550 psc spi controller driver
3 * may work also with au1200, au1210, au1250
4 * will not work on au1000, au1100 and au1500 (no full spi controller there)
6 * Copyright (c) 2006 ATRON electronic GmbH
7 * Author: Jan Nikitenko <jan.nikitenko@gmail.com>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
20 #include <linux/init.h>
21 #include <linux/interrupt.h>
22 #include <linux/slab.h>
23 #include <linux/errno.h>
24 #include <linux/module.h>
25 #include <linux/device.h>
26 #include <linux/platform_device.h>
27 #include <linux/resource.h>
28 #include <linux/spi/spi.h>
29 #include <linux/spi/spi_bitbang.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/completion.h>
32 #include <asm/mach-au1x00/au1000.h>
33 #include <asm/mach-au1x00/au1xxx_psc.h>
34 #include <asm/mach-au1x00/au1xxx_dbdma.h>
36 #include <asm/mach-au1x00/au1550_spi.h>
38 static unsigned usedma
= 1;
39 module_param(usedma
, uint
, 0644);
42 #define AU1550_SPI_DEBUG_LOOPBACK
46 #define AU1550_SPI_DBDMA_DESCRIPTORS 1
47 #define AU1550_SPI_DMA_RXTMP_MINSIZE 2048U
50 struct spi_bitbang bitbang
;
52 volatile psc_spi_t __iomem
*regs
;
61 void (*rx_word
)(struct au1550_spi
*hw
);
62 void (*tx_word
)(struct au1550_spi
*hw
);
63 int (*txrx_bufs
)(struct spi_device
*spi
, struct spi_transfer
*t
);
64 irqreturn_t (*irq_callback
)(struct au1550_spi
*hw
);
66 struct completion master_done
;
75 unsigned dma_rx_tmpbuf_size
;
76 u32 dma_rx_tmpbuf_addr
;
78 struct spi_master
*master
;
80 struct au1550_spi_info
*pdata
;
81 struct resource
*ioarea
;
85 /* we use an 8-bit memory device for dma transfers to/from spi fifo */
86 static dbdev_tab_t au1550_spi_mem_dbdev
=
88 .dev_id
= DBDMA_MEM_CHAN
,
89 .dev_flags
= DEV_FLAGS_ANYUSE
|DEV_FLAGS_SYNC
,
92 .dev_physaddr
= 0x00000000,
97 static int ddma_memid
; /* id to above mem dma device */
99 static void au1550_spi_bits_handlers_set(struct au1550_spi
*hw
, int bpw
);
103 * compute BRG and DIV bits to setup spi clock based on main input clock rate
104 * that was specified in platform data structure
105 * according to au1550 datasheet:
106 * psc_tempclk = psc_mainclk / (2 << DIV)
107 * spiclk = psc_tempclk / (2 * (BRG + 1))
108 * BRG valid range is 4..63
109 * DIV valid range is 0..3
111 static u32
au1550_spi_baudcfg(struct au1550_spi
*hw
, unsigned speed_hz
)
113 u32 mainclk_hz
= hw
->pdata
->mainclk_hz
;
116 for (div
= 0; div
< 4; div
++) {
117 brg
= mainclk_hz
/ speed_hz
/ (4 << div
);
118 /* now we have BRG+1 in brg, so count with that */
120 brg
= (4 + 1); /* speed_hz too big */
121 break; /* set lowest brg (div is == 0) */
124 break; /* we have valid brg and div */
127 div
= 3; /* speed_hz too small */
128 brg
= (63 + 1); /* set highest brg and div */
131 return PSC_SPICFG_SET_BAUD(brg
) | PSC_SPICFG_SET_DIV(div
);
134 static inline void au1550_spi_mask_ack_all(struct au1550_spi
*hw
)
136 hw
->regs
->psc_spimsk
=
137 PSC_SPIMSK_MM
| PSC_SPIMSK_RR
| PSC_SPIMSK_RO
138 | PSC_SPIMSK_RU
| PSC_SPIMSK_TR
| PSC_SPIMSK_TO
139 | PSC_SPIMSK_TU
| PSC_SPIMSK_SD
| PSC_SPIMSK_MD
;
140 wmb(); /* drain writebuffer */
142 hw
->regs
->psc_spievent
=
143 PSC_SPIEVNT_MM
| PSC_SPIEVNT_RR
| PSC_SPIEVNT_RO
144 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TR
| PSC_SPIEVNT_TO
145 | PSC_SPIEVNT_TU
| PSC_SPIEVNT_SD
| PSC_SPIEVNT_MD
;
146 wmb(); /* drain writebuffer */
149 static void au1550_spi_reset_fifos(struct au1550_spi
*hw
)
153 hw
->regs
->psc_spipcr
= PSC_SPIPCR_RC
| PSC_SPIPCR_TC
;
154 wmb(); /* drain writebuffer */
156 pcr
= hw
->regs
->psc_spipcr
;
157 wmb(); /* drain writebuffer */
162 * dma transfers are used for the most common spi word size of 8-bits
163 * we cannot easily change already set up dma channels' width, so if we wanted
164 * dma support for more than 8-bit words (up to 24 bits), we would need to
165 * setup dma channels from scratch on each spi transfer, based on bits_per_word
166 * instead we have pre set up 8 bit dma channels supporting spi 4 to 8 bits
167 * transfers, and 9 to 24 bits spi transfers will be done in pio irq based mode
168 * callbacks to handle dma or pio are set up in au1550_spi_bits_handlers_set()
170 static void au1550_spi_chipsel(struct spi_device
*spi
, int value
)
172 struct au1550_spi
*hw
= spi_master_get_devdata(spi
->master
);
173 unsigned cspol
= spi
->mode
& SPI_CS_HIGH
? 1 : 0;
177 case BITBANG_CS_INACTIVE
:
178 if (hw
->pdata
->deactivate_cs
)
179 hw
->pdata
->deactivate_cs(hw
->pdata
, spi
->chip_select
,
183 case BITBANG_CS_ACTIVE
:
184 au1550_spi_bits_handlers_set(hw
, spi
->bits_per_word
);
186 cfg
= hw
->regs
->psc_spicfg
;
187 wmb(); /* drain writebuffer */
188 hw
->regs
->psc_spicfg
= cfg
& ~PSC_SPICFG_DE_ENABLE
;
189 wmb(); /* drain writebuffer */
191 if (spi
->mode
& SPI_CPOL
)
192 cfg
|= PSC_SPICFG_BI
;
194 cfg
&= ~PSC_SPICFG_BI
;
195 if (spi
->mode
& SPI_CPHA
)
196 cfg
&= ~PSC_SPICFG_CDE
;
198 cfg
|= PSC_SPICFG_CDE
;
200 if (spi
->mode
& SPI_LSB_FIRST
)
201 cfg
|= PSC_SPICFG_MLF
;
203 cfg
&= ~PSC_SPICFG_MLF
;
205 if (hw
->usedma
&& spi
->bits_per_word
<= 8)
206 cfg
&= ~PSC_SPICFG_DD_DISABLE
;
208 cfg
|= PSC_SPICFG_DD_DISABLE
;
209 cfg
= PSC_SPICFG_CLR_LEN(cfg
);
210 cfg
|= PSC_SPICFG_SET_LEN(spi
->bits_per_word
);
212 cfg
= PSC_SPICFG_CLR_BAUD(cfg
);
213 cfg
&= ~PSC_SPICFG_SET_DIV(3);
214 cfg
|= au1550_spi_baudcfg(hw
, spi
->max_speed_hz
);
216 hw
->regs
->psc_spicfg
= cfg
| PSC_SPICFG_DE_ENABLE
;
217 wmb(); /* drain writebuffer */
219 stat
= hw
->regs
->psc_spistat
;
220 wmb(); /* drain writebuffer */
221 } while ((stat
& PSC_SPISTAT_DR
) == 0);
223 if (hw
->pdata
->activate_cs
)
224 hw
->pdata
->activate_cs(hw
->pdata
, spi
->chip_select
,
230 static int au1550_spi_setupxfer(struct spi_device
*spi
, struct spi_transfer
*t
)
232 struct au1550_spi
*hw
= spi_master_get_devdata(spi
->master
);
236 bpw
= spi
->bits_per_word
;
237 hz
= spi
->max_speed_hz
;
239 if (t
->bits_per_word
)
240 bpw
= t
->bits_per_word
;
248 au1550_spi_bits_handlers_set(hw
, spi
->bits_per_word
);
250 cfg
= hw
->regs
->psc_spicfg
;
251 wmb(); /* drain writebuffer */
252 hw
->regs
->psc_spicfg
= cfg
& ~PSC_SPICFG_DE_ENABLE
;
253 wmb(); /* drain writebuffer */
255 if (hw
->usedma
&& bpw
<= 8)
256 cfg
&= ~PSC_SPICFG_DD_DISABLE
;
258 cfg
|= PSC_SPICFG_DD_DISABLE
;
259 cfg
= PSC_SPICFG_CLR_LEN(cfg
);
260 cfg
|= PSC_SPICFG_SET_LEN(bpw
);
262 cfg
= PSC_SPICFG_CLR_BAUD(cfg
);
263 cfg
&= ~PSC_SPICFG_SET_DIV(3);
264 cfg
|= au1550_spi_baudcfg(hw
, hz
);
266 hw
->regs
->psc_spicfg
= cfg
;
267 wmb(); /* drain writebuffer */
269 if (cfg
& PSC_SPICFG_DE_ENABLE
) {
271 stat
= hw
->regs
->psc_spistat
;
272 wmb(); /* drain writebuffer */
273 } while ((stat
& PSC_SPISTAT_DR
) == 0);
276 au1550_spi_reset_fifos(hw
);
277 au1550_spi_mask_ack_all(hw
);
282 * for dma spi transfers, we have to setup rx channel, otherwise there is
283 * no reliable way how to recognize that spi transfer is done
284 * dma complete callbacks are called before real spi transfer is finished
285 * and if only tx dma channel is set up (and rx fifo overflow event masked)
286 * spi master done event irq is not generated unless rx fifo is empty (emptied)
287 * so we need rx tmp buffer to use for rx dma if user does not provide one
289 static int au1550_spi_dma_rxtmp_alloc(struct au1550_spi
*hw
, unsigned size
)
291 hw
->dma_rx_tmpbuf
= kmalloc(size
, GFP_KERNEL
);
292 if (!hw
->dma_rx_tmpbuf
)
294 hw
->dma_rx_tmpbuf_size
= size
;
295 hw
->dma_rx_tmpbuf_addr
= dma_map_single(hw
->dev
, hw
->dma_rx_tmpbuf
,
296 size
, DMA_FROM_DEVICE
);
297 if (dma_mapping_error(hw
->dev
, hw
->dma_rx_tmpbuf_addr
)) {
298 kfree(hw
->dma_rx_tmpbuf
);
299 hw
->dma_rx_tmpbuf
= 0;
300 hw
->dma_rx_tmpbuf_size
= 0;
306 static void au1550_spi_dma_rxtmp_free(struct au1550_spi
*hw
)
308 dma_unmap_single(hw
->dev
, hw
->dma_rx_tmpbuf_addr
,
309 hw
->dma_rx_tmpbuf_size
, DMA_FROM_DEVICE
);
310 kfree(hw
->dma_rx_tmpbuf
);
311 hw
->dma_rx_tmpbuf
= 0;
312 hw
->dma_rx_tmpbuf_size
= 0;
315 static int au1550_spi_dma_txrxb(struct spi_device
*spi
, struct spi_transfer
*t
)
317 struct au1550_spi
*hw
= spi_master_get_devdata(spi
->master
);
318 dma_addr_t dma_tx_addr
;
319 dma_addr_t dma_rx_addr
;
328 dma_tx_addr
= t
->tx_dma
;
329 dma_rx_addr
= t
->rx_dma
;
332 * check if buffers are already dma mapped, map them otherwise:
333 * - first map the TX buffer, so cache data gets written to memory
334 * - then map the RX buffer, so that cache entries (with
335 * soon-to-be-stale data) get removed
336 * use rx buffer in place of tx if tx buffer was not provided
337 * use temp rx buffer (preallocated or realloc to fit) for rx dma
340 if (t
->tx_dma
== 0) { /* if DMA_ADDR_INVALID, map it */
341 dma_tx_addr
= dma_map_single(hw
->dev
,
343 t
->len
, DMA_TO_DEVICE
);
344 if (dma_mapping_error(hw
->dev
, dma_tx_addr
))
345 dev_err(hw
->dev
, "tx dma map error\n");
350 if (t
->rx_dma
== 0) { /* if DMA_ADDR_INVALID, map it */
351 dma_rx_addr
= dma_map_single(hw
->dev
,
353 t
->len
, DMA_FROM_DEVICE
);
354 if (dma_mapping_error(hw
->dev
, dma_rx_addr
))
355 dev_err(hw
->dev
, "rx dma map error\n");
358 if (t
->len
> hw
->dma_rx_tmpbuf_size
) {
361 au1550_spi_dma_rxtmp_free(hw
);
362 ret
= au1550_spi_dma_rxtmp_alloc(hw
, max(t
->len
,
363 AU1550_SPI_DMA_RXTMP_MINSIZE
));
367 hw
->rx
= hw
->dma_rx_tmpbuf
;
368 dma_rx_addr
= hw
->dma_rx_tmpbuf_addr
;
369 dma_sync_single_for_device(hw
->dev
, dma_rx_addr
,
370 t
->len
, DMA_FROM_DEVICE
);
374 dma_sync_single_for_device(hw
->dev
, dma_rx_addr
,
375 t
->len
, DMA_BIDIRECTIONAL
);
379 /* put buffers on the ring */
380 res
= au1xxx_dbdma_put_dest(hw
->dma_rx_ch
, virt_to_phys(hw
->rx
),
381 t
->len
, DDMA_FLAGS_IE
);
383 dev_err(hw
->dev
, "rx dma put dest error\n");
385 res
= au1xxx_dbdma_put_source(hw
->dma_tx_ch
, virt_to_phys(hw
->tx
),
386 t
->len
, DDMA_FLAGS_IE
);
388 dev_err(hw
->dev
, "tx dma put source error\n");
390 au1xxx_dbdma_start(hw
->dma_rx_ch
);
391 au1xxx_dbdma_start(hw
->dma_tx_ch
);
393 /* by default enable nearly all events interrupt */
394 hw
->regs
->psc_spimsk
= PSC_SPIMSK_SD
;
395 wmb(); /* drain writebuffer */
397 /* start the transfer */
398 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
399 wmb(); /* drain writebuffer */
401 wait_for_completion(&hw
->master_done
);
403 au1xxx_dbdma_stop(hw
->dma_tx_ch
);
404 au1xxx_dbdma_stop(hw
->dma_rx_ch
);
407 /* using the temporal preallocated and premapped buffer */
408 dma_sync_single_for_cpu(hw
->dev
, dma_rx_addr
, t
->len
,
411 /* unmap buffers if mapped above */
412 if (t
->rx_buf
&& t
->rx_dma
== 0 )
413 dma_unmap_single(hw
->dev
, dma_rx_addr
, t
->len
,
415 if (t
->tx_buf
&& t
->tx_dma
== 0 )
416 dma_unmap_single(hw
->dev
, dma_tx_addr
, t
->len
,
419 return hw
->rx_count
< hw
->tx_count
? hw
->rx_count
: hw
->tx_count
;
422 static irqreturn_t
au1550_spi_dma_irq_callback(struct au1550_spi
*hw
)
426 stat
= hw
->regs
->psc_spistat
;
427 evnt
= hw
->regs
->psc_spievent
;
428 wmb(); /* drain writebuffer */
429 if ((stat
& PSC_SPISTAT_DI
) == 0) {
430 dev_err(hw
->dev
, "Unexpected IRQ!\n");
434 if ((evnt
& (PSC_SPIEVNT_MM
| PSC_SPIEVNT_RO
435 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TO
436 | PSC_SPIEVNT_TU
| PSC_SPIEVNT_SD
))
439 * due to an spi error we consider transfer as done,
440 * so mask all events until before next transfer start
441 * and stop the possibly running dma immediately
443 au1550_spi_mask_ack_all(hw
);
444 au1xxx_dbdma_stop(hw
->dma_rx_ch
);
445 au1xxx_dbdma_stop(hw
->dma_tx_ch
);
447 /* get number of transferred bytes */
448 hw
->rx_count
= hw
->len
- au1xxx_get_dma_residue(hw
->dma_rx_ch
);
449 hw
->tx_count
= hw
->len
- au1xxx_get_dma_residue(hw
->dma_tx_ch
);
451 au1xxx_dbdma_reset(hw
->dma_rx_ch
);
452 au1xxx_dbdma_reset(hw
->dma_tx_ch
);
453 au1550_spi_reset_fifos(hw
);
455 if (evnt
== PSC_SPIEVNT_RO
)
457 "dma transfer: receive FIFO overflow!\n");
460 "dma transfer: unexpected SPI error "
461 "(event=0x%x stat=0x%x)!\n", evnt
, stat
);
463 complete(&hw
->master_done
);
467 if ((evnt
& PSC_SPIEVNT_MD
) != 0) {
468 /* transfer completed successfully */
469 au1550_spi_mask_ack_all(hw
);
470 hw
->rx_count
= hw
->len
;
471 hw
->tx_count
= hw
->len
;
472 complete(&hw
->master_done
);
478 /* routines to handle different word sizes in pio mode */
479 #define AU1550_SPI_RX_WORD(size, mask) \
480 static void au1550_spi_rx_word_##size(struct au1550_spi *hw) \
482 u32 fifoword = hw->regs->psc_spitxrx & (u32)(mask); \
483 wmb(); /* drain writebuffer */ \
485 *(u##size *)hw->rx = (u##size)fifoword; \
486 hw->rx += (size) / 8; \
488 hw->rx_count += (size) / 8; \
491 #define AU1550_SPI_TX_WORD(size, mask) \
492 static void au1550_spi_tx_word_##size(struct au1550_spi *hw) \
496 fifoword = *(u##size *)hw->tx & (u32)(mask); \
497 hw->tx += (size) / 8; \
499 hw->tx_count += (size) / 8; \
500 if (hw->tx_count >= hw->len) \
501 fifoword |= PSC_SPITXRX_LC; \
502 hw->regs->psc_spitxrx = fifoword; \
503 wmb(); /* drain writebuffer */ \
506 AU1550_SPI_RX_WORD(8,0xff)
507 AU1550_SPI_RX_WORD(16,0xffff)
508 AU1550_SPI_RX_WORD(32,0xffffff)
509 AU1550_SPI_TX_WORD(8,0xff)
510 AU1550_SPI_TX_WORD(16,0xffff)
511 AU1550_SPI_TX_WORD(32,0xffffff)
513 static int au1550_spi_pio_txrxb(struct spi_device
*spi
, struct spi_transfer
*t
)
516 struct au1550_spi
*hw
= spi_master_get_devdata(spi
->master
);
524 /* by default enable nearly all events after filling tx fifo */
525 mask
= PSC_SPIMSK_SD
;
527 /* fill the transmit FIFO */
528 while (hw
->tx_count
< hw
->len
) {
532 if (hw
->tx_count
>= hw
->len
) {
533 /* mask tx fifo request interrupt as we are done */
534 mask
|= PSC_SPIMSK_TR
;
537 stat
= hw
->regs
->psc_spistat
;
538 wmb(); /* drain writebuffer */
539 if (stat
& PSC_SPISTAT_TF
)
543 /* enable event interrupts */
544 hw
->regs
->psc_spimsk
= mask
;
545 wmb(); /* drain writebuffer */
547 /* start the transfer */
548 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
549 wmb(); /* drain writebuffer */
551 wait_for_completion(&hw
->master_done
);
553 return hw
->rx_count
< hw
->tx_count
? hw
->rx_count
: hw
->tx_count
;
556 static irqreturn_t
au1550_spi_pio_irq_callback(struct au1550_spi
*hw
)
561 stat
= hw
->regs
->psc_spistat
;
562 evnt
= hw
->regs
->psc_spievent
;
563 wmb(); /* drain writebuffer */
564 if ((stat
& PSC_SPISTAT_DI
) == 0) {
565 dev_err(hw
->dev
, "Unexpected IRQ!\n");
569 if ((evnt
& (PSC_SPIEVNT_MM
| PSC_SPIEVNT_RO
570 | PSC_SPIEVNT_RU
| PSC_SPIEVNT_TO
574 * due to an error we consider transfer as done,
575 * so mask all events until before next transfer start
577 au1550_spi_mask_ack_all(hw
);
578 au1550_spi_reset_fifos(hw
);
580 "pio transfer: unexpected SPI error "
581 "(event=0x%x stat=0x%x)!\n", evnt
, stat
);
582 complete(&hw
->master_done
);
587 * while there is something to read from rx fifo
588 * or there is a space to write to tx fifo:
592 stat
= hw
->regs
->psc_spistat
;
593 wmb(); /* drain writebuffer */
596 * Take care to not let the Rx FIFO overflow.
598 * We only write a byte if we have read one at least. Initially,
599 * the write fifo is full, so we should read from the read fifo
601 * In case we miss a word from the read fifo, we should get a
602 * RO event and should back out.
604 if (!(stat
& PSC_SPISTAT_RE
) && hw
->rx_count
< hw
->len
) {
608 if (!(stat
& PSC_SPISTAT_TF
) && hw
->tx_count
< hw
->len
)
613 hw
->regs
->psc_spievent
= PSC_SPIEVNT_RR
| PSC_SPIEVNT_TR
;
614 wmb(); /* drain writebuffer */
617 * Restart the SPI transmission in case of a transmit underflow.
618 * This seems to work despite the notes in the Au1550 data book
619 * of Figure 8-4 with flowchart for SPI master operation:
621 * """Note 1: An XFR Error Interrupt occurs, unless masked,
622 * for any of the following events: Tx FIFO Underflow,
623 * Rx FIFO Overflow, or Multiple-master Error
624 * Note 2: In case of a Tx Underflow Error, all zeroes are
627 * By simply restarting the spi transfer on Tx Underflow Error,
628 * we assume that spi transfer was paused instead of zeroes
629 * transmittion mentioned in the Note 2 of Au1550 data book.
631 if (evnt
& PSC_SPIEVNT_TU
) {
632 hw
->regs
->psc_spievent
= PSC_SPIEVNT_TU
| PSC_SPIEVNT_MD
;
633 wmb(); /* drain writebuffer */
634 hw
->regs
->psc_spipcr
= PSC_SPIPCR_MS
;
635 wmb(); /* drain writebuffer */
638 if (hw
->rx_count
>= hw
->len
) {
639 /* transfer completed successfully */
640 au1550_spi_mask_ack_all(hw
);
641 complete(&hw
->master_done
);
646 static int au1550_spi_txrx_bufs(struct spi_device
*spi
, struct spi_transfer
*t
)
648 struct au1550_spi
*hw
= spi_master_get_devdata(spi
->master
);
649 return hw
->txrx_bufs(spi
, t
);
652 static irqreturn_t
au1550_spi_irq(int irq
, void *dev
)
654 struct au1550_spi
*hw
= dev
;
655 return hw
->irq_callback(hw
);
658 static void au1550_spi_bits_handlers_set(struct au1550_spi
*hw
, int bpw
)
662 hw
->txrx_bufs
= &au1550_spi_dma_txrxb
;
663 hw
->irq_callback
= &au1550_spi_dma_irq_callback
;
665 hw
->rx_word
= &au1550_spi_rx_word_8
;
666 hw
->tx_word
= &au1550_spi_tx_word_8
;
667 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
668 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
670 } else if (bpw
<= 16) {
671 hw
->rx_word
= &au1550_spi_rx_word_16
;
672 hw
->tx_word
= &au1550_spi_tx_word_16
;
673 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
674 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
676 hw
->rx_word
= &au1550_spi_rx_word_32
;
677 hw
->tx_word
= &au1550_spi_tx_word_32
;
678 hw
->txrx_bufs
= &au1550_spi_pio_txrxb
;
679 hw
->irq_callback
= &au1550_spi_pio_irq_callback
;
683 static void au1550_spi_setup_psc_as_spi(struct au1550_spi
*hw
)
687 /* set up the PSC for SPI mode */
688 hw
->regs
->psc_ctrl
= PSC_CTRL_DISABLE
;
689 wmb(); /* drain writebuffer */
690 hw
->regs
->psc_sel
= PSC_SEL_PS_SPIMODE
;
691 wmb(); /* drain writebuffer */
693 hw
->regs
->psc_spicfg
= 0;
694 wmb(); /* drain writebuffer */
696 hw
->regs
->psc_ctrl
= PSC_CTRL_ENABLE
;
697 wmb(); /* drain writebuffer */
700 stat
= hw
->regs
->psc_spistat
;
701 wmb(); /* drain writebuffer */
702 } while ((stat
& PSC_SPISTAT_SR
) == 0);
705 cfg
= hw
->usedma
? 0 : PSC_SPICFG_DD_DISABLE
;
706 cfg
|= PSC_SPICFG_SET_LEN(8);
707 cfg
|= PSC_SPICFG_RT_FIFO8
| PSC_SPICFG_TT_FIFO8
;
708 /* use minimal allowed brg and div values as initial setting: */
709 cfg
|= PSC_SPICFG_SET_BAUD(4) | PSC_SPICFG_SET_DIV(0);
711 #ifdef AU1550_SPI_DEBUG_LOOPBACK
712 cfg
|= PSC_SPICFG_LB
;
715 hw
->regs
->psc_spicfg
= cfg
;
716 wmb(); /* drain writebuffer */
718 au1550_spi_mask_ack_all(hw
);
720 hw
->regs
->psc_spicfg
|= PSC_SPICFG_DE_ENABLE
;
721 wmb(); /* drain writebuffer */
724 stat
= hw
->regs
->psc_spistat
;
725 wmb(); /* drain writebuffer */
726 } while ((stat
& PSC_SPISTAT_DR
) == 0);
728 au1550_spi_reset_fifos(hw
);
732 static int au1550_spi_probe(struct platform_device
*pdev
)
734 struct au1550_spi
*hw
;
735 struct spi_master
*master
;
739 master
= spi_alloc_master(&pdev
->dev
, sizeof(struct au1550_spi
));
740 if (master
== NULL
) {
741 dev_err(&pdev
->dev
, "No memory for spi_master\n");
746 /* the spi->mode bits understood by this driver: */
747 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_CS_HIGH
| SPI_LSB_FIRST
;
748 master
->bits_per_word_mask
= SPI_BPW_RANGE_MASK(4, 24);
750 hw
= spi_master_get_devdata(master
);
753 hw
->pdata
= dev_get_platdata(&pdev
->dev
);
754 hw
->dev
= &pdev
->dev
;
756 if (hw
->pdata
== NULL
) {
757 dev_err(&pdev
->dev
, "No platform data supplied\n");
762 r
= platform_get_resource(pdev
, IORESOURCE_IRQ
, 0);
764 dev_err(&pdev
->dev
, "no IRQ\n");
771 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
773 hw
->dma_tx_id
= r
->start
;
774 r
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
776 hw
->dma_rx_id
= r
->start
;
777 if (usedma
&& ddma_memid
) {
778 if (pdev
->dev
.dma_mask
== NULL
)
779 dev_warn(&pdev
->dev
, "no dma mask\n");
786 r
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
788 dev_err(&pdev
->dev
, "no mmio resource\n");
793 hw
->ioarea
= request_mem_region(r
->start
, sizeof(psc_spi_t
),
796 dev_err(&pdev
->dev
, "Cannot reserve iomem region\n");
801 hw
->regs
= (psc_spi_t __iomem
*)ioremap(r
->start
, sizeof(psc_spi_t
));
803 dev_err(&pdev
->dev
, "cannot ioremap\n");
808 platform_set_drvdata(pdev
, hw
);
810 init_completion(&hw
->master_done
);
812 hw
->bitbang
.master
= hw
->master
;
813 hw
->bitbang
.setup_transfer
= au1550_spi_setupxfer
;
814 hw
->bitbang
.chipselect
= au1550_spi_chipsel
;
815 hw
->bitbang
.txrx_bufs
= au1550_spi_txrx_bufs
;
818 hw
->dma_tx_ch
= au1xxx_dbdma_chan_alloc(ddma_memid
,
819 hw
->dma_tx_id
, NULL
, (void *)hw
);
820 if (hw
->dma_tx_ch
== 0) {
822 "Cannot allocate tx dma channel\n");
826 au1xxx_dbdma_set_devwidth(hw
->dma_tx_ch
, 8);
827 if (au1xxx_dbdma_ring_alloc(hw
->dma_tx_ch
,
828 AU1550_SPI_DBDMA_DESCRIPTORS
) == 0) {
830 "Cannot allocate tx dma descriptors\n");
832 goto err_no_txdma_descr
;
836 hw
->dma_rx_ch
= au1xxx_dbdma_chan_alloc(hw
->dma_rx_id
,
837 ddma_memid
, NULL
, (void *)hw
);
838 if (hw
->dma_rx_ch
== 0) {
840 "Cannot allocate rx dma channel\n");
844 au1xxx_dbdma_set_devwidth(hw
->dma_rx_ch
, 8);
845 if (au1xxx_dbdma_ring_alloc(hw
->dma_rx_ch
,
846 AU1550_SPI_DBDMA_DESCRIPTORS
) == 0) {
848 "Cannot allocate rx dma descriptors\n");
850 goto err_no_rxdma_descr
;
853 err
= au1550_spi_dma_rxtmp_alloc(hw
,
854 AU1550_SPI_DMA_RXTMP_MINSIZE
);
857 "Cannot allocate initial rx dma tmp buffer\n");
858 goto err_dma_rxtmp_alloc
;
862 au1550_spi_bits_handlers_set(hw
, 8);
864 err
= request_irq(hw
->irq
, au1550_spi_irq
, 0, pdev
->name
, hw
);
866 dev_err(&pdev
->dev
, "Cannot claim IRQ\n");
870 master
->bus_num
= pdev
->id
;
871 master
->num_chipselect
= hw
->pdata
->num_chipselect
;
874 * precompute valid range for spi freq - from au1550 datasheet:
875 * psc_tempclk = psc_mainclk / (2 << DIV)
876 * spiclk = psc_tempclk / (2 * (BRG + 1))
877 * BRG valid range is 4..63
878 * DIV valid range is 0..3
879 * round the min and max frequencies to values that would still
880 * produce valid brg and div
883 int min_div
= (2 << 0) * (2 * (4 + 1));
884 int max_div
= (2 << 3) * (2 * (63 + 1));
885 master
->max_speed_hz
= hw
->pdata
->mainclk_hz
/ min_div
;
886 master
->min_speed_hz
=
887 hw
->pdata
->mainclk_hz
/ (max_div
+ 1) + 1;
890 au1550_spi_setup_psc_as_spi(hw
);
892 err
= spi_bitbang_start(&hw
->bitbang
);
894 dev_err(&pdev
->dev
, "Failed to register SPI master\n");
899 "spi master registered: bus_num=%d num_chipselect=%d\n",
900 master
->bus_num
, master
->num_chipselect
);
905 free_irq(hw
->irq
, hw
);
908 au1550_spi_dma_rxtmp_free(hw
);
913 au1xxx_dbdma_chan_free(hw
->dma_rx_ch
);
918 au1xxx_dbdma_chan_free(hw
->dma_tx_ch
);
921 iounmap((void __iomem
*)hw
->regs
);
924 release_mem_region(r
->start
, sizeof(psc_spi_t
));
928 spi_master_put(hw
->master
);
934 static int au1550_spi_remove(struct platform_device
*pdev
)
936 struct au1550_spi
*hw
= platform_get_drvdata(pdev
);
938 dev_info(&pdev
->dev
, "spi master remove: bus_num=%d\n",
939 hw
->master
->bus_num
);
941 spi_bitbang_stop(&hw
->bitbang
);
942 free_irq(hw
->irq
, hw
);
943 iounmap((void __iomem
*)hw
->regs
);
944 release_mem_region(hw
->ioarea
->start
, sizeof(psc_spi_t
));
947 au1550_spi_dma_rxtmp_free(hw
);
948 au1xxx_dbdma_chan_free(hw
->dma_rx_ch
);
949 au1xxx_dbdma_chan_free(hw
->dma_tx_ch
);
952 spi_master_put(hw
->master
);
956 /* work with hotplug and coldplug */
957 MODULE_ALIAS("platform:au1550-spi");
959 static struct platform_driver au1550_spi_drv
= {
960 .probe
= au1550_spi_probe
,
961 .remove
= au1550_spi_remove
,
963 .name
= "au1550-spi",
967 static int __init
au1550_spi_init(void)
970 * create memory device with 8 bits dev_devwidth
971 * needed for proper byte ordering to spi fifo
973 switch (alchemy_get_cputype()) {
974 case ALCHEMY_CPU_AU1550
:
975 case ALCHEMY_CPU_AU1200
:
976 case ALCHEMY_CPU_AU1300
:
983 ddma_memid
= au1xxx_ddma_add_device(&au1550_spi_mem_dbdev
);
985 printk(KERN_ERR
"au1550-spi: cannot add memory"
988 return platform_driver_register(&au1550_spi_drv
);
990 module_init(au1550_spi_init
);
992 static void __exit
au1550_spi_exit(void)
994 if (usedma
&& ddma_memid
)
995 au1xxx_ddma_del_device(ddma_memid
);
996 platform_driver_unregister(&au1550_spi_drv
);
998 module_exit(au1550_spi_exit
);
1000 MODULE_DESCRIPTION("Au1550 PSC SPI Driver");
1001 MODULE_AUTHOR("Jan Nikitenko <jan.nikitenko@gmail.com>");
1002 MODULE_LICENSE("GPL");