2 * Analog Devices SPI3 controller driver
4 * Copyright (c) 2014 Analog Devices Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
16 #include <linux/clk.h>
17 #include <linux/delay.h>
18 #include <linux/device.h>
19 #include <linux/dma-mapping.h>
20 #include <linux/errno.h>
21 #include <linux/gpio.h>
22 #include <linux/init.h>
23 #include <linux/interrupt.h>
25 #include <linux/ioport.h>
26 #include <linux/module.h>
27 #include <linux/platform_device.h>
28 #include <linux/slab.h>
29 #include <linux/spi/spi.h>
30 #include <linux/spi/adi_spi3.h>
31 #include <linux/types.h>
34 #include <asm/portmux.h>
43 struct adi_spi_master
;
45 struct adi_spi_transfer_ops
{
46 void (*write
) (struct adi_spi_master
*);
47 void (*read
) (struct adi_spi_master
*);
48 void (*duplex
) (struct adi_spi_master
*);
51 /* runtime info for spi master */
52 struct adi_spi_master
{
53 /* SPI framework hookup */
54 struct spi_master
*master
;
56 /* Regs base of SPI controller */
57 struct adi_spi_regs __iomem
*regs
;
59 /* Pin request list */
62 /* Message Transfer pump */
63 struct tasklet_struct pump_transfers
;
65 /* Current message transfer state info */
66 struct spi_message
*cur_msg
;
67 struct spi_transfer
*cur_transfer
;
68 struct adi_spi_device
*cur_chip
;
69 unsigned transfer_len
;
80 dma_addr_t tx_dma_addr
;
81 dma_addr_t rx_dma_addr
;
82 unsigned long dummy_buffer
; /* used in unidirectional transfer */
83 unsigned long tx_dma_size
;
84 unsigned long rx_dma_size
;
88 /* store register value for suspend/resume */
93 enum adi_spi_state state
;
95 const struct adi_spi_transfer_ops
*ops
;
98 struct adi_spi_device
{
104 u16 cs_chg_udelay
; /* Some devices require > 255usec delay */
106 u32 tx_dummy_val
; /* tx value for rx only transfer */
108 const struct adi_spi_transfer_ops
*ops
;
111 static void adi_spi_enable(struct adi_spi_master
*drv_data
)
115 ctl
= ioread32(&drv_data
->regs
->control
);
117 iowrite32(ctl
, &drv_data
->regs
->control
);
120 static void adi_spi_disable(struct adi_spi_master
*drv_data
)
124 ctl
= ioread32(&drv_data
->regs
->control
);
126 iowrite32(ctl
, &drv_data
->regs
->control
);
129 /* Caculate the SPI_CLOCK register value based on input HZ */
130 static u32
hz_to_spi_clock(u32 sclk
, u32 speed_hz
)
132 u32 spi_clock
= sclk
/ speed_hz
;
139 static int adi_spi_flush(struct adi_spi_master
*drv_data
)
141 unsigned long limit
= loops_per_jiffy
<< 1;
143 /* wait for stop and clear stat */
144 while (!(ioread32(&drv_data
->regs
->status
) & SPI_STAT_SPIF
) && --limit
)
147 iowrite32(0xFFFFFFFF, &drv_data
->regs
->status
);
152 /* Chip select operation functions for cs_change flag */
153 static void adi_spi_cs_active(struct adi_spi_master
*drv_data
, struct adi_spi_device
*chip
)
155 if (likely(chip
->cs
< MAX_CTRL_CS
)) {
157 reg
= ioread32(&drv_data
->regs
->ssel
);
159 iowrite32(reg
, &drv_data
->regs
->ssel
);
161 gpio_set_value(chip
->cs_gpio
, 0);
165 static void adi_spi_cs_deactive(struct adi_spi_master
*drv_data
,
166 struct adi_spi_device
*chip
)
168 if (likely(chip
->cs
< MAX_CTRL_CS
)) {
170 reg
= ioread32(&drv_data
->regs
->ssel
);
172 iowrite32(reg
, &drv_data
->regs
->ssel
);
174 gpio_set_value(chip
->cs_gpio
, 1);
177 /* Move delay here for consistency */
178 if (chip
->cs_chg_udelay
)
179 udelay(chip
->cs_chg_udelay
);
182 /* enable or disable the pin muxed by GPIO and SPI CS to work as SPI CS */
183 static inline void adi_spi_cs_enable(struct adi_spi_master
*drv_data
,
184 struct adi_spi_device
*chip
)
186 if (chip
->cs
< MAX_CTRL_CS
) {
188 reg
= ioread32(&drv_data
->regs
->ssel
);
189 reg
|= chip
->ssel
>> 8;
190 iowrite32(reg
, &drv_data
->regs
->ssel
);
194 static inline void adi_spi_cs_disable(struct adi_spi_master
*drv_data
,
195 struct adi_spi_device
*chip
)
197 if (chip
->cs
< MAX_CTRL_CS
) {
199 reg
= ioread32(&drv_data
->regs
->ssel
);
200 reg
&= ~(chip
->ssel
>> 8);
201 iowrite32(reg
, &drv_data
->regs
->ssel
);
205 /* stop controller and re-config current chip*/
206 static void adi_spi_restore_state(struct adi_spi_master
*drv_data
)
208 struct adi_spi_device
*chip
= drv_data
->cur_chip
;
210 /* Clear status and disable clock */
211 iowrite32(0xFFFFFFFF, &drv_data
->regs
->status
);
212 iowrite32(0x0, &drv_data
->regs
->rx_control
);
213 iowrite32(0x0, &drv_data
->regs
->tx_control
);
214 adi_spi_disable(drv_data
);
216 /* Load the registers */
217 iowrite32(chip
->control
, &drv_data
->regs
->control
);
218 iowrite32(chip
->clock
, &drv_data
->regs
->clock
);
220 adi_spi_enable(drv_data
);
221 drv_data
->tx_num
= drv_data
->rx_num
= 0;
222 /* we always choose tx transfer initiate */
223 iowrite32(SPI_RXCTL_REN
, &drv_data
->regs
->rx_control
);
224 iowrite32(SPI_TXCTL_TEN
| SPI_TXCTL_TTI
, &drv_data
->regs
->tx_control
);
225 adi_spi_cs_active(drv_data
, chip
);
228 /* discard invalid rx data and empty rfifo */
229 static inline void dummy_read(struct adi_spi_master
*drv_data
)
231 while (!(ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
))
232 ioread32(&drv_data
->regs
->rfifo
);
235 static void adi_spi_u8_write(struct adi_spi_master
*drv_data
)
237 dummy_read(drv_data
);
238 while (drv_data
->tx
< drv_data
->tx_end
) {
239 iowrite32(*(u8
*)(drv_data
->tx
++), &drv_data
->regs
->tfifo
);
240 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
242 ioread32(&drv_data
->regs
->rfifo
);
246 static void adi_spi_u8_read(struct adi_spi_master
*drv_data
)
248 u32 tx_val
= drv_data
->cur_chip
->tx_dummy_val
;
250 dummy_read(drv_data
);
251 while (drv_data
->rx
< drv_data
->rx_end
) {
252 iowrite32(tx_val
, &drv_data
->regs
->tfifo
);
253 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
255 *(u8
*)(drv_data
->rx
++) = ioread32(&drv_data
->regs
->rfifo
);
259 static void adi_spi_u8_duplex(struct adi_spi_master
*drv_data
)
261 dummy_read(drv_data
);
262 while (drv_data
->rx
< drv_data
->rx_end
) {
263 iowrite32(*(u8
*)(drv_data
->tx
++), &drv_data
->regs
->tfifo
);
264 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
266 *(u8
*)(drv_data
->rx
++) = ioread32(&drv_data
->regs
->rfifo
);
270 static const struct adi_spi_transfer_ops adi_spi_transfer_ops_u8
= {
271 .write
= adi_spi_u8_write
,
272 .read
= adi_spi_u8_read
,
273 .duplex
= adi_spi_u8_duplex
,
276 static void adi_spi_u16_write(struct adi_spi_master
*drv_data
)
278 dummy_read(drv_data
);
279 while (drv_data
->tx
< drv_data
->tx_end
) {
280 iowrite32(*(u16
*)drv_data
->tx
, &drv_data
->regs
->tfifo
);
282 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
284 ioread32(&drv_data
->regs
->rfifo
);
288 static void adi_spi_u16_read(struct adi_spi_master
*drv_data
)
290 u32 tx_val
= drv_data
->cur_chip
->tx_dummy_val
;
292 dummy_read(drv_data
);
293 while (drv_data
->rx
< drv_data
->rx_end
) {
294 iowrite32(tx_val
, &drv_data
->regs
->tfifo
);
295 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
297 *(u16
*)drv_data
->rx
= ioread32(&drv_data
->regs
->rfifo
);
302 static void adi_spi_u16_duplex(struct adi_spi_master
*drv_data
)
304 dummy_read(drv_data
);
305 while (drv_data
->rx
< drv_data
->rx_end
) {
306 iowrite32(*(u16
*)drv_data
->tx
, &drv_data
->regs
->tfifo
);
308 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
310 *(u16
*)drv_data
->rx
= ioread32(&drv_data
->regs
->rfifo
);
315 static const struct adi_spi_transfer_ops adi_spi_transfer_ops_u16
= {
316 .write
= adi_spi_u16_write
,
317 .read
= adi_spi_u16_read
,
318 .duplex
= adi_spi_u16_duplex
,
321 static void adi_spi_u32_write(struct adi_spi_master
*drv_data
)
323 dummy_read(drv_data
);
324 while (drv_data
->tx
< drv_data
->tx_end
) {
325 iowrite32(*(u32
*)drv_data
->tx
, &drv_data
->regs
->tfifo
);
327 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
329 ioread32(&drv_data
->regs
->rfifo
);
333 static void adi_spi_u32_read(struct adi_spi_master
*drv_data
)
335 u32 tx_val
= drv_data
->cur_chip
->tx_dummy_val
;
337 dummy_read(drv_data
);
338 while (drv_data
->rx
< drv_data
->rx_end
) {
339 iowrite32(tx_val
, &drv_data
->regs
->tfifo
);
340 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
342 *(u32
*)drv_data
->rx
= ioread32(&drv_data
->regs
->rfifo
);
347 static void adi_spi_u32_duplex(struct adi_spi_master
*drv_data
)
349 dummy_read(drv_data
);
350 while (drv_data
->rx
< drv_data
->rx_end
) {
351 iowrite32(*(u32
*)drv_data
->tx
, &drv_data
->regs
->tfifo
);
353 while (ioread32(&drv_data
->regs
->status
) & SPI_STAT_RFE
)
355 *(u32
*)drv_data
->rx
= ioread32(&drv_data
->regs
->rfifo
);
360 static const struct adi_spi_transfer_ops adi_spi_transfer_ops_u32
= {
361 .write
= adi_spi_u32_write
,
362 .read
= adi_spi_u32_read
,
363 .duplex
= adi_spi_u32_duplex
,
367 /* test if there is more transfer to be done */
368 static void adi_spi_next_transfer(struct adi_spi_master
*drv
)
370 struct spi_message
*msg
= drv
->cur_msg
;
371 struct spi_transfer
*t
= drv
->cur_transfer
;
373 /* Move to next transfer */
374 if (t
->transfer_list
.next
!= &msg
->transfers
) {
375 drv
->cur_transfer
= list_entry(t
->transfer_list
.next
,
376 struct spi_transfer
, transfer_list
);
377 drv
->state
= RUNNING_STATE
;
379 drv
->state
= DONE_STATE
;
380 drv
->cur_transfer
= NULL
;
384 static void adi_spi_giveback(struct adi_spi_master
*drv_data
)
386 struct adi_spi_device
*chip
= drv_data
->cur_chip
;
388 adi_spi_cs_deactive(drv_data
, chip
);
389 spi_finalize_current_message(drv_data
->master
);
392 static int adi_spi_setup_transfer(struct adi_spi_master
*drv
)
394 struct spi_transfer
*t
= drv
->cur_transfer
;
398 drv
->tx
= (void *)t
->tx_buf
;
399 drv
->tx_end
= drv
->tx
+ t
->len
;
406 drv
->rx_end
= drv
->rx
+ t
->len
;
411 drv
->transfer_len
= t
->len
;
413 /* bits per word setup */
414 switch (t
->bits_per_word
) {
416 cr_width
= SPI_CTL_SIZE08
;
417 drv
->ops
= &adi_spi_transfer_ops_u8
;
420 cr_width
= SPI_CTL_SIZE16
;
421 drv
->ops
= &adi_spi_transfer_ops_u16
;
424 cr_width
= SPI_CTL_SIZE32
;
425 drv
->ops
= &adi_spi_transfer_ops_u32
;
430 cr
= ioread32(&drv
->regs
->control
) & ~SPI_CTL_SIZE
;
432 iowrite32(cr
, &drv
->regs
->control
);
435 iowrite32(hz_to_spi_clock(drv
->sclk
, t
->speed_hz
), &drv
->regs
->clock
);
439 static int adi_spi_dma_xfer(struct adi_spi_master
*drv_data
)
441 struct spi_transfer
*t
= drv_data
->cur_transfer
;
442 struct spi_message
*msg
= drv_data
->cur_msg
;
443 struct adi_spi_device
*chip
= drv_data
->cur_chip
;
445 unsigned long word_count
, word_size
;
446 void *tx_buf
, *rx_buf
;
448 switch (t
->bits_per_word
) {
450 dma_config
= WDSIZE_8
| PSIZE_8
;
451 word_count
= drv_data
->transfer_len
;
455 dma_config
= WDSIZE_16
| PSIZE_16
;
456 word_count
= drv_data
->transfer_len
/ 2;
460 dma_config
= WDSIZE_32
| PSIZE_32
;
461 word_count
= drv_data
->transfer_len
/ 4;
467 tx_buf
= drv_data
->tx
;
468 rx_buf
= &drv_data
->dummy_buffer
;
469 drv_data
->tx_dma_size
= drv_data
->transfer_len
;
470 drv_data
->rx_dma_size
= sizeof(drv_data
->dummy_buffer
);
471 set_dma_x_modify(drv_data
->tx_dma
, word_size
);
472 set_dma_x_modify(drv_data
->rx_dma
, 0);
473 } else if (!drv_data
->tx
) {
474 drv_data
->dummy_buffer
= chip
->tx_dummy_val
;
475 tx_buf
= &drv_data
->dummy_buffer
;
476 rx_buf
= drv_data
->rx
;
477 drv_data
->tx_dma_size
= sizeof(drv_data
->dummy_buffer
);
478 drv_data
->rx_dma_size
= drv_data
->transfer_len
;
479 set_dma_x_modify(drv_data
->tx_dma
, 0);
480 set_dma_x_modify(drv_data
->rx_dma
, word_size
);
482 tx_buf
= drv_data
->tx
;
483 rx_buf
= drv_data
->rx
;
484 drv_data
->tx_dma_size
= drv_data
->rx_dma_size
485 = drv_data
->transfer_len
;
486 set_dma_x_modify(drv_data
->tx_dma
, word_size
);
487 set_dma_x_modify(drv_data
->rx_dma
, word_size
);
490 drv_data
->tx_dma_addr
= dma_map_single(&msg
->spi
->dev
,
492 drv_data
->tx_dma_size
,
494 if (dma_mapping_error(&msg
->spi
->dev
,
495 drv_data
->tx_dma_addr
))
498 drv_data
->rx_dma_addr
= dma_map_single(&msg
->spi
->dev
,
500 drv_data
->rx_dma_size
,
502 if (dma_mapping_error(&msg
->spi
->dev
,
503 drv_data
->rx_dma_addr
)) {
504 dma_unmap_single(&msg
->spi
->dev
,
505 drv_data
->tx_dma_addr
,
506 drv_data
->tx_dma_size
,
511 dummy_read(drv_data
);
512 set_dma_x_count(drv_data
->tx_dma
, word_count
);
513 set_dma_x_count(drv_data
->rx_dma
, word_count
);
514 set_dma_start_addr(drv_data
->tx_dma
, drv_data
->tx_dma_addr
);
515 set_dma_start_addr(drv_data
->rx_dma
, drv_data
->rx_dma_addr
);
516 dma_config
|= DMAFLOW_STOP
| RESTART
| DI_EN
;
517 set_dma_config(drv_data
->tx_dma
, dma_config
);
518 set_dma_config(drv_data
->rx_dma
, dma_config
| WNR
);
519 enable_dma(drv_data
->tx_dma
);
520 enable_dma(drv_data
->rx_dma
);
522 iowrite32(SPI_RXCTL_REN
| SPI_RXCTL_RDR_NE
,
523 &drv_data
->regs
->rx_control
);
524 iowrite32(SPI_TXCTL_TEN
| SPI_TXCTL_TTI
| SPI_TXCTL_TDR_NF
,
525 &drv_data
->regs
->tx_control
);
530 static int adi_spi_pio_xfer(struct adi_spi_master
*drv_data
)
532 struct spi_message
*msg
= drv_data
->cur_msg
;
535 /* write only half duplex */
536 drv_data
->ops
->write(drv_data
);
537 if (drv_data
->tx
!= drv_data
->tx_end
)
539 } else if (!drv_data
->tx
) {
540 /* read only half duplex */
541 drv_data
->ops
->read(drv_data
);
542 if (drv_data
->rx
!= drv_data
->rx_end
)
545 /* full duplex mode */
546 drv_data
->ops
->duplex(drv_data
);
547 if (drv_data
->tx
!= drv_data
->tx_end
)
551 if (!adi_spi_flush(drv_data
))
553 msg
->actual_length
+= drv_data
->transfer_len
;
554 tasklet_schedule(&drv_data
->pump_transfers
);
558 static void adi_spi_pump_transfers(unsigned long data
)
560 struct adi_spi_master
*drv_data
= (struct adi_spi_master
*)data
;
561 struct spi_message
*msg
= NULL
;
562 struct spi_transfer
*t
= NULL
;
563 struct adi_spi_device
*chip
= NULL
;
566 /* Get current state information */
567 msg
= drv_data
->cur_msg
;
568 t
= drv_data
->cur_transfer
;
569 chip
= drv_data
->cur_chip
;
571 /* Handle for abort */
572 if (drv_data
->state
== ERROR_STATE
) {
574 adi_spi_giveback(drv_data
);
578 if (drv_data
->state
== RUNNING_STATE
) {
580 udelay(t
->delay_usecs
);
582 adi_spi_cs_deactive(drv_data
, chip
);
583 adi_spi_next_transfer(drv_data
);
584 t
= drv_data
->cur_transfer
;
586 /* Handle end of message */
587 if (drv_data
->state
== DONE_STATE
) {
589 adi_spi_giveback(drv_data
);
593 if ((t
->len
== 0) || (t
->tx_buf
== NULL
&& t
->rx_buf
== NULL
)) {
594 /* Schedule next transfer tasklet */
595 tasklet_schedule(&drv_data
->pump_transfers
);
599 ret
= adi_spi_setup_transfer(drv_data
);
602 adi_spi_giveback(drv_data
);
605 iowrite32(0xFFFFFFFF, &drv_data
->regs
->status
);
606 adi_spi_cs_active(drv_data
, chip
);
607 drv_data
->state
= RUNNING_STATE
;
609 if (chip
->enable_dma
)
610 ret
= adi_spi_dma_xfer(drv_data
);
612 ret
= adi_spi_pio_xfer(drv_data
);
615 adi_spi_giveback(drv_data
);
619 static int adi_spi_transfer_one_message(struct spi_master
*master
,
620 struct spi_message
*m
)
622 struct adi_spi_master
*drv_data
= spi_master_get_devdata(master
);
624 drv_data
->cur_msg
= m
;
625 drv_data
->cur_chip
= spi_get_ctldata(drv_data
->cur_msg
->spi
);
626 adi_spi_restore_state(drv_data
);
628 drv_data
->state
= START_STATE
;
629 drv_data
->cur_transfer
= list_entry(drv_data
->cur_msg
->transfers
.next
,
630 struct spi_transfer
, transfer_list
);
632 tasklet_schedule(&drv_data
->pump_transfers
);
636 #define MAX_SPI_SSEL 7
638 static const u16 ssel
[][MAX_SPI_SSEL
] = {
639 {P_SPI0_SSEL1
, P_SPI0_SSEL2
, P_SPI0_SSEL3
,
640 P_SPI0_SSEL4
, P_SPI0_SSEL5
,
641 P_SPI0_SSEL6
, P_SPI0_SSEL7
},
643 {P_SPI1_SSEL1
, P_SPI1_SSEL2
, P_SPI1_SSEL3
,
644 P_SPI1_SSEL4
, P_SPI1_SSEL5
,
645 P_SPI1_SSEL6
, P_SPI1_SSEL7
},
647 {P_SPI2_SSEL1
, P_SPI2_SSEL2
, P_SPI2_SSEL3
,
648 P_SPI2_SSEL4
, P_SPI2_SSEL5
,
649 P_SPI2_SSEL6
, P_SPI2_SSEL7
},
652 static int adi_spi_setup(struct spi_device
*spi
)
654 struct adi_spi_master
*drv_data
= spi_master_get_devdata(spi
->master
);
655 struct adi_spi_device
*chip
= spi_get_ctldata(spi
);
656 u32 ctl_reg
= SPI_CTL_ODM
| SPI_CTL_PSSE
;
660 struct adi_spi3_chip
*chip_info
= spi
->controller_data
;
662 chip
= kzalloc(sizeof(*chip
), GFP_KERNEL
);
667 if (chip_info
->control
& ~ctl_reg
) {
669 "do not set bits that the SPI framework manages\n");
672 chip
->control
= chip_info
->control
;
673 chip
->cs_chg_udelay
= chip_info
->cs_chg_udelay
;
674 chip
->tx_dummy_val
= chip_info
->tx_dummy_val
;
675 chip
->enable_dma
= chip_info
->enable_dma
;
677 chip
->cs
= spi
->chip_select
;
679 if (chip
->cs
< MAX_CTRL_CS
) {
680 chip
->ssel
= (1 << chip
->cs
) << 8;
681 ret
= peripheral_request(ssel
[spi
->master
->bus_num
]
682 [chip
->cs
-1], dev_name(&spi
->dev
));
684 dev_err(&spi
->dev
, "peripheral_request() error\n");
688 chip
->cs_gpio
= chip
->cs
- MAX_CTRL_CS
;
689 ret
= gpio_request_one(chip
->cs_gpio
, GPIOF_OUT_INIT_HIGH
,
690 dev_name(&spi
->dev
));
692 dev_err(&spi
->dev
, "gpio_request_one() error\n");
696 spi_set_ctldata(spi
, chip
);
699 /* force a default base state */
700 chip
->control
&= ctl_reg
;
702 if (spi
->mode
& SPI_CPOL
)
703 chip
->control
|= SPI_CTL_CPOL
;
704 if (spi
->mode
& SPI_CPHA
)
705 chip
->control
|= SPI_CTL_CPHA
;
706 if (spi
->mode
& SPI_LSB_FIRST
)
707 chip
->control
|= SPI_CTL_LSBF
;
708 chip
->control
|= SPI_CTL_MSTR
;
709 /* we choose software to controll cs */
710 chip
->control
&= ~SPI_CTL_ASSEL
;
712 chip
->clock
= hz_to_spi_clock(drv_data
->sclk
, spi
->max_speed_hz
);
714 adi_spi_cs_enable(drv_data
, chip
);
715 adi_spi_cs_deactive(drv_data
, chip
);
721 spi_set_ctldata(spi
, NULL
);
727 static void adi_spi_cleanup(struct spi_device
*spi
)
729 struct adi_spi_device
*chip
= spi_get_ctldata(spi
);
730 struct adi_spi_master
*drv_data
= spi_master_get_devdata(spi
->master
);
735 if (chip
->cs
< MAX_CTRL_CS
) {
736 peripheral_free(ssel
[spi
->master
->bus_num
]
738 adi_spi_cs_disable(drv_data
, chip
);
740 gpio_free(chip
->cs_gpio
);
744 spi_set_ctldata(spi
, NULL
);
747 static irqreturn_t
adi_spi_tx_dma_isr(int irq
, void *dev_id
)
749 struct adi_spi_master
*drv_data
= dev_id
;
750 u32 dma_stat
= get_dma_curr_irqstat(drv_data
->tx_dma
);
753 clear_dma_irqstat(drv_data
->tx_dma
);
754 if (dma_stat
& DMA_DONE
) {
757 dev_err(&drv_data
->master
->dev
,
758 "spi tx dma error: %d\n", dma_stat
);
760 drv_data
->state
= ERROR_STATE
;
762 tx_ctl
= ioread32(&drv_data
->regs
->tx_control
);
763 tx_ctl
&= ~SPI_TXCTL_TDR_NF
;
764 iowrite32(tx_ctl
, &drv_data
->regs
->tx_control
);
768 static irqreturn_t
adi_spi_rx_dma_isr(int irq
, void *dev_id
)
770 struct adi_spi_master
*drv_data
= dev_id
;
771 struct spi_message
*msg
= drv_data
->cur_msg
;
772 u32 dma_stat
= get_dma_curr_irqstat(drv_data
->rx_dma
);
774 clear_dma_irqstat(drv_data
->rx_dma
);
775 if (dma_stat
& DMA_DONE
) {
777 /* we may fail on tx dma */
778 if (drv_data
->state
!= ERROR_STATE
)
779 msg
->actual_length
+= drv_data
->transfer_len
;
781 drv_data
->state
= ERROR_STATE
;
782 dev_err(&drv_data
->master
->dev
,
783 "spi rx dma error: %d\n", dma_stat
);
785 iowrite32(0, &drv_data
->regs
->tx_control
);
786 iowrite32(0, &drv_data
->regs
->rx_control
);
787 if (drv_data
->rx_num
!= drv_data
->tx_num
)
788 dev_dbg(&drv_data
->master
->dev
,
789 "dma interrupt missing: tx=%d,rx=%d\n",
790 drv_data
->tx_num
, drv_data
->rx_num
);
791 tasklet_schedule(&drv_data
->pump_transfers
);
795 static int adi_spi_probe(struct platform_device
*pdev
)
797 struct device
*dev
= &pdev
->dev
;
798 struct adi_spi3_master
*info
= dev_get_platdata(dev
);
799 struct spi_master
*master
;
800 struct adi_spi_master
*drv_data
;
801 struct resource
*mem
, *res
;
802 unsigned int tx_dma
, rx_dma
;
807 dev_err(dev
, "platform data missing!\n");
811 sclk
= devm_clk_get(dev
, "spi");
813 dev_err(dev
, "can not get spi clock\n");
814 return PTR_ERR(sclk
);
817 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 0);
819 dev_err(dev
, "can not get tx dma resource\n");
824 res
= platform_get_resource(pdev
, IORESOURCE_DMA
, 1);
826 dev_err(dev
, "can not get rx dma resource\n");
831 /* allocate master with space for drv_data */
832 master
= spi_alloc_master(dev
, sizeof(*drv_data
));
834 dev_err(dev
, "can not alloc spi_master\n");
837 platform_set_drvdata(pdev
, master
);
839 /* the mode bits supported by this driver */
840 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_LSB_FIRST
;
842 master
->bus_num
= pdev
->id
;
843 master
->num_chipselect
= info
->num_chipselect
;
844 master
->cleanup
= adi_spi_cleanup
;
845 master
->setup
= adi_spi_setup
;
846 master
->transfer_one_message
= adi_spi_transfer_one_message
;
847 master
->bits_per_word_mask
= SPI_BPW_MASK(32) | SPI_BPW_MASK(16) |
850 drv_data
= spi_master_get_devdata(master
);
851 drv_data
->master
= master
;
852 drv_data
->tx_dma
= tx_dma
;
853 drv_data
->rx_dma
= rx_dma
;
854 drv_data
->pin_req
= info
->pin_req
;
855 drv_data
->sclk
= clk_get_rate(sclk
);
857 mem
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
858 drv_data
->regs
= devm_ioremap_resource(dev
, mem
);
859 if (IS_ERR(drv_data
->regs
)) {
860 ret
= PTR_ERR(drv_data
->regs
);
864 /* request tx and rx dma */
865 ret
= request_dma(tx_dma
, "SPI_TX_DMA");
867 dev_err(dev
, "can not request SPI TX DMA channel\n");
870 set_dma_callback(tx_dma
, adi_spi_tx_dma_isr
, drv_data
);
872 ret
= request_dma(rx_dma
, "SPI_RX_DMA");
874 dev_err(dev
, "can not request SPI RX DMA channel\n");
875 goto err_free_tx_dma
;
877 set_dma_callback(drv_data
->rx_dma
, adi_spi_rx_dma_isr
, drv_data
);
879 /* request CLK, MOSI and MISO */
880 ret
= peripheral_request_list(drv_data
->pin_req
, "adi-spi3");
882 dev_err(dev
, "can not request spi pins\n");
883 goto err_free_rx_dma
;
886 iowrite32(SPI_CTL_MSTR
| SPI_CTL_CPHA
, &drv_data
->regs
->control
);
887 iowrite32(0x0000FE00, &drv_data
->regs
->ssel
);
888 iowrite32(0x0, &drv_data
->regs
->delay
);
890 tasklet_init(&drv_data
->pump_transfers
,
891 adi_spi_pump_transfers
, (unsigned long)drv_data
);
892 /* register with the SPI framework */
893 ret
= devm_spi_register_master(dev
, master
);
895 dev_err(dev
, "can not register spi master\n");
896 goto err_free_peripheral
;
902 peripheral_free_list(drv_data
->pin_req
);
908 spi_master_put(master
);
913 static int adi_spi_remove(struct platform_device
*pdev
)
915 struct spi_master
*master
= platform_get_drvdata(pdev
);
916 struct adi_spi_master
*drv_data
= spi_master_get_devdata(master
);
918 adi_spi_disable(drv_data
);
919 peripheral_free_list(drv_data
->pin_req
);
920 free_dma(drv_data
->rx_dma
);
921 free_dma(drv_data
->tx_dma
);
926 static int adi_spi_suspend(struct device
*dev
)
928 struct spi_master
*master
= dev_get_drvdata(dev
);
929 struct adi_spi_master
*drv_data
= spi_master_get_devdata(master
);
931 spi_master_suspend(master
);
933 drv_data
->control
= ioread32(&drv_data
->regs
->control
);
934 drv_data
->ssel
= ioread32(&drv_data
->regs
->ssel
);
936 iowrite32(SPI_CTL_MSTR
| SPI_CTL_CPHA
, &drv_data
->regs
->control
);
937 iowrite32(0x0000FE00, &drv_data
->regs
->ssel
);
938 dma_disable_irq(drv_data
->rx_dma
);
939 dma_disable_irq(drv_data
->tx_dma
);
944 static int adi_spi_resume(struct device
*dev
)
946 struct spi_master
*master
= dev_get_drvdata(dev
);
947 struct adi_spi_master
*drv_data
= spi_master_get_devdata(master
);
950 /* bootrom may modify spi and dma status when resume in spi boot mode */
951 disable_dma(drv_data
->rx_dma
);
953 dma_enable_irq(drv_data
->rx_dma
);
954 dma_enable_irq(drv_data
->tx_dma
);
955 iowrite32(drv_data
->control
, &drv_data
->regs
->control
);
956 iowrite32(drv_data
->ssel
, &drv_data
->regs
->ssel
);
958 ret
= spi_master_resume(master
);
960 free_dma(drv_data
->rx_dma
);
961 free_dma(drv_data
->tx_dma
);
967 static const struct dev_pm_ops adi_spi_pm_ops
= {
968 SET_SYSTEM_SLEEP_PM_OPS(adi_spi_suspend
, adi_spi_resume
)
971 MODULE_ALIAS("platform:adi-spi3");
972 static struct platform_driver adi_spi_driver
= {
975 .pm
= &adi_spi_pm_ops
,
977 .remove
= adi_spi_remove
,
980 module_platform_driver_probe(adi_spi_driver
, adi_spi_probe
);
982 MODULE_DESCRIPTION("Analog Devices SPI3 controller driver");
983 MODULE_AUTHOR("Scott Jiang <Scott.Jiang.Linux@gmail.com>");
984 MODULE_LICENSE("GPL v2");