1 // SPDX-License-Identifier: GPL-2.0-only
3 * Microchip PIC32 SPI controller driver.
5 * Purna Chandra Mandal <purna.mandal@microchip.com>
6 * Copyright (c) 2016, Microchip Technology Inc.
10 #include <linux/clkdev.h>
11 #include <linux/delay.h>
12 #include <linux/dmaengine.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/highmem.h>
15 #include <linux/module.h>
17 #include <linux/interrupt.h>
19 #include <linux/of_irq.h>
20 #include <linux/of_gpio.h>
21 #include <linux/of_address.h>
22 #include <linux/platform_device.h>
23 #include <linux/spi/spi.h>
25 /* SPI controller registers */
26 struct pic32_spi_regs
{
45 /* Bit fields of SPI Control Register */
46 #define CTRL_RX_INT_SHIFT 0 /* Rx interrupt generation */
47 #define RX_FIFO_EMPTY 0
48 #define RX_FIFO_NOT_EMPTY 1 /* not empty */
49 #define RX_FIFO_HALF_FULL 2 /* full by half or more */
50 #define RX_FIFO_FULL 3 /* completely full */
52 #define CTRL_TX_INT_SHIFT 2 /* TX interrupt generation */
53 #define TX_FIFO_ALL_EMPTY 0 /* completely empty */
54 #define TX_FIFO_EMPTY 1 /* empty */
55 #define TX_FIFO_HALF_EMPTY 2 /* empty by half or more */
56 #define TX_FIFO_NOT_FULL 3 /* atleast one empty */
58 #define CTRL_MSTEN BIT(5) /* enable master mode */
59 #define CTRL_CKP BIT(6) /* active low */
60 #define CTRL_CKE BIT(8) /* Tx on falling edge */
61 #define CTRL_SMP BIT(9) /* Rx at middle or end of tx */
62 #define CTRL_BPW_MASK 0x03 /* bits per word/sample */
63 #define CTRL_BPW_SHIFT 10
65 #define PIC32_BPW_16 1
66 #define PIC32_BPW_32 2
67 #define CTRL_SIDL BIT(13) /* sleep when idle */
68 #define CTRL_ON BIT(15) /* enable macro */
69 #define CTRL_ENHBUF BIT(16) /* enable enhanced buffering */
70 #define CTRL_MCLKSEL BIT(23) /* select clock source */
71 #define CTRL_MSSEN BIT(28) /* macro driven /SS */
72 #define CTRL_FRMEN BIT(31) /* enable framing mode */
74 /* Bit fields of SPI Status Register */
75 #define STAT_RF_EMPTY BIT(5) /* RX Fifo empty */
76 #define STAT_RX_OV BIT(6) /* err, s/w needs to clear */
77 #define STAT_TX_UR BIT(8) /* UR in Framed SPI modes */
78 #define STAT_FRM_ERR BIT(12) /* Multiple Frame Sync pulse */
79 #define STAT_TF_LVL_MASK 0x1F
80 #define STAT_TF_LVL_SHIFT 16
81 #define STAT_RF_LVL_MASK 0x1F
82 #define STAT_RF_LVL_SHIFT 24
84 /* Bit fields of SPI Baud Register */
85 #define BAUD_MASK 0x1ff
87 /* Bit fields of SPI Control2 Register */
88 #define CTRL2_TX_UR_EN BIT(10) /* Enable int on Tx under-run */
89 #define CTRL2_RX_OV_EN BIT(11) /* Enable int on Rx over-run */
90 #define CTRL2_FRM_ERR_EN BIT(12) /* Enable frame err int */
92 /* Minimum DMA transfer size */
93 #define PIC32_DMA_LEN_MIN 64
97 struct pic32_spi_regs __iomem
*regs
;
101 u32 fifo_n_byte
; /* FIFO depth in bytes */
103 struct spi_master
*master
;
104 /* Current controller setting */
105 u32 speed_hz
; /* spi-clk rate */
108 u32 fifo_n_elm
; /* FIFO depth in words */
109 #define PIC32F_DMA_PREP 0 /* DMA chnls configured */
111 /* Current transfer state */
112 struct completion xfer_done
;
113 /* PIO transfer specific */
119 void (*rx_fifo
)(struct pic32_spi
*);
120 void (*tx_fifo
)(struct pic32_spi
*);
123 static inline void pic32_spi_enable(struct pic32_spi
*pic32s
)
125 writel(CTRL_ON
| CTRL_SIDL
, &pic32s
->regs
->ctrl_set
);
128 static inline void pic32_spi_disable(struct pic32_spi
*pic32s
)
130 writel(CTRL_ON
| CTRL_SIDL
, &pic32s
->regs
->ctrl_clr
);
132 /* avoid SPI registers read/write at immediate next CPU clock */
136 static void pic32_spi_set_clk_rate(struct pic32_spi
*pic32s
, u32 spi_ck
)
140 /* div = (clk_in / 2 * spi_ck) - 1 */
141 div
= DIV_ROUND_CLOSEST(clk_get_rate(pic32s
->clk
), 2 * spi_ck
) - 1;
143 writel(div
& BAUD_MASK
, &pic32s
->regs
->baud
);
146 static inline u32
pic32_rx_fifo_level(struct pic32_spi
*pic32s
)
148 u32 sr
= readl(&pic32s
->regs
->status
);
150 return (sr
>> STAT_RF_LVL_SHIFT
) & STAT_RF_LVL_MASK
;
153 static inline u32
pic32_tx_fifo_level(struct pic32_spi
*pic32s
)
155 u32 sr
= readl(&pic32s
->regs
->status
);
157 return (sr
>> STAT_TF_LVL_SHIFT
) & STAT_TF_LVL_MASK
;
160 /* Return the max entries we can fill into tx fifo */
161 static u32
pic32_tx_max(struct pic32_spi
*pic32s
, int n_bytes
)
163 u32 tx_left
, tx_room
, rxtx_gap
;
165 tx_left
= (pic32s
->tx_end
- pic32s
->tx
) / n_bytes
;
166 tx_room
= pic32s
->fifo_n_elm
- pic32_tx_fifo_level(pic32s
);
169 * Another concern is about the tx/rx mismatch, we
170 * though to use (pic32s->fifo_n_byte - rxfl - txfl) as
171 * one maximum value for tx, but it doesn't cover the
172 * data which is out of tx/rx fifo and inside the
173 * shift registers. So a ctrl from sw point of
176 rxtx_gap
= ((pic32s
->rx_end
- pic32s
->rx
) -
177 (pic32s
->tx_end
- pic32s
->tx
)) / n_bytes
;
178 return min3(tx_left
, tx_room
, (u32
)(pic32s
->fifo_n_elm
- rxtx_gap
));
181 /* Return the max entries we should read out of rx fifo */
182 static u32
pic32_rx_max(struct pic32_spi
*pic32s
, int n_bytes
)
184 u32 rx_left
= (pic32s
->rx_end
- pic32s
->rx
) / n_bytes
;
186 return min_t(u32
, rx_left
, pic32_rx_fifo_level(pic32s
));
189 #define BUILD_SPI_FIFO_RW(__name, __type, __bwl) \
190 static void pic32_spi_rx_##__name(struct pic32_spi *pic32s) \
193 u32 mx = pic32_rx_max(pic32s, sizeof(__type)); \
195 v = read##__bwl(&pic32s->regs->buf); \
196 if (pic32s->rx_end - pic32s->len) \
197 *(__type *)(pic32s->rx) = v; \
198 pic32s->rx += sizeof(__type); \
202 static void pic32_spi_tx_##__name(struct pic32_spi *pic32s) \
205 u32 mx = pic32_tx_max(pic32s, sizeof(__type)); \
206 for (; mx ; mx--) { \
208 if (pic32s->tx_end - pic32s->len) \
209 v = *(__type *)(pic32s->tx); \
210 write##__bwl(v, &pic32s->regs->buf); \
211 pic32s->tx += sizeof(__type); \
215 BUILD_SPI_FIFO_RW(byte
, u8
, b
);
216 BUILD_SPI_FIFO_RW(word
, u16
, w
);
217 BUILD_SPI_FIFO_RW(dword
, u32
, l
);
219 static void pic32_err_stop(struct pic32_spi
*pic32s
, const char *msg
)
221 /* disable all interrupts */
222 disable_irq_nosync(pic32s
->fault_irq
);
223 disable_irq_nosync(pic32s
->rx_irq
);
224 disable_irq_nosync(pic32s
->tx_irq
);
226 /* Show err message and abort xfer with err */
227 dev_err(&pic32s
->master
->dev
, "%s\n", msg
);
228 if (pic32s
->master
->cur_msg
)
229 pic32s
->master
->cur_msg
->status
= -EIO
;
230 complete(&pic32s
->xfer_done
);
233 static irqreturn_t
pic32_spi_fault_irq(int irq
, void *dev_id
)
235 struct pic32_spi
*pic32s
= dev_id
;
238 status
= readl(&pic32s
->regs
->status
);
241 if (status
& (STAT_RX_OV
| STAT_TX_UR
)) {
242 writel(STAT_RX_OV
, &pic32s
->regs
->status_clr
);
243 writel(STAT_TX_UR
, &pic32s
->regs
->status_clr
);
244 pic32_err_stop(pic32s
, "err_irq: fifo ov/ur-run\n");
248 if (status
& STAT_FRM_ERR
) {
249 pic32_err_stop(pic32s
, "err_irq: frame error");
253 if (!pic32s
->master
->cur_msg
) {
254 pic32_err_stop(pic32s
, "err_irq: no mesg");
261 static irqreturn_t
pic32_spi_rx_irq(int irq
, void *dev_id
)
263 struct pic32_spi
*pic32s
= dev_id
;
265 pic32s
->rx_fifo(pic32s
);
268 if (pic32s
->rx_end
== pic32s
->rx
) {
269 /* disable all interrupts */
270 disable_irq_nosync(pic32s
->fault_irq
);
271 disable_irq_nosync(pic32s
->rx_irq
);
273 /* complete current xfer */
274 complete(&pic32s
->xfer_done
);
280 static irqreturn_t
pic32_spi_tx_irq(int irq
, void *dev_id
)
282 struct pic32_spi
*pic32s
= dev_id
;
284 pic32s
->tx_fifo(pic32s
);
286 /* tx complete? disable tx interrupt */
287 if (pic32s
->tx_end
== pic32s
->tx
)
288 disable_irq_nosync(pic32s
->tx_irq
);
293 static void pic32_spi_dma_rx_notify(void *data
)
295 struct pic32_spi
*pic32s
= data
;
297 complete(&pic32s
->xfer_done
);
300 static int pic32_spi_dma_transfer(struct pic32_spi
*pic32s
,
301 struct spi_transfer
*xfer
)
303 struct spi_master
*master
= pic32s
->master
;
304 struct dma_async_tx_descriptor
*desc_rx
;
305 struct dma_async_tx_descriptor
*desc_tx
;
309 if (!master
->dma_rx
|| !master
->dma_tx
)
312 desc_rx
= dmaengine_prep_slave_sg(master
->dma_rx
,
316 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
322 desc_tx
= dmaengine_prep_slave_sg(master
->dma_tx
,
326 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
332 /* Put callback on the RX transfer, that should finish last */
333 desc_rx
->callback
= pic32_spi_dma_rx_notify
;
334 desc_rx
->callback_param
= pic32s
;
336 cookie
= dmaengine_submit(desc_rx
);
337 ret
= dma_submit_error(cookie
);
341 cookie
= dmaengine_submit(desc_tx
);
342 ret
= dma_submit_error(cookie
);
346 dma_async_issue_pending(master
->dma_rx
);
347 dma_async_issue_pending(master
->dma_tx
);
352 dmaengine_terminate_all(master
->dma_rx
);
357 static int pic32_spi_dma_config(struct pic32_spi
*pic32s
, u32 dma_width
)
359 int buf_offset
= offsetof(struct pic32_spi_regs
, buf
);
360 struct spi_master
*master
= pic32s
->master
;
361 struct dma_slave_config cfg
;
364 cfg
.device_fc
= true;
365 cfg
.src_addr
= pic32s
->dma_base
+ buf_offset
;
366 cfg
.dst_addr
= pic32s
->dma_base
+ buf_offset
;
367 cfg
.src_maxburst
= pic32s
->fifo_n_elm
/ 2; /* fill one-half */
368 cfg
.dst_maxburst
= pic32s
->fifo_n_elm
/ 2; /* drain one-half */
369 cfg
.src_addr_width
= dma_width
;
370 cfg
.dst_addr_width
= dma_width
;
372 cfg
.slave_id
= pic32s
->tx_irq
;
373 cfg
.direction
= DMA_MEM_TO_DEV
;
374 ret
= dmaengine_slave_config(master
->dma_tx
, &cfg
);
376 dev_err(&master
->dev
, "tx channel setup failed\n");
380 cfg
.slave_id
= pic32s
->rx_irq
;
381 cfg
.direction
= DMA_DEV_TO_MEM
;
382 ret
= dmaengine_slave_config(master
->dma_rx
, &cfg
);
384 dev_err(&master
->dev
, "rx channel setup failed\n");
389 static int pic32_spi_set_word_size(struct pic32_spi
*pic32s
, u8 bits_per_word
)
391 enum dma_slave_buswidth dmawidth
;
394 switch (bits_per_word
) {
396 pic32s
->rx_fifo
= pic32_spi_rx_byte
;
397 pic32s
->tx_fifo
= pic32_spi_tx_byte
;
398 buswidth
= PIC32_BPW_8
;
399 dmawidth
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
402 pic32s
->rx_fifo
= pic32_spi_rx_word
;
403 pic32s
->tx_fifo
= pic32_spi_tx_word
;
404 buswidth
= PIC32_BPW_16
;
405 dmawidth
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
408 pic32s
->rx_fifo
= pic32_spi_rx_dword
;
409 pic32s
->tx_fifo
= pic32_spi_tx_dword
;
410 buswidth
= PIC32_BPW_32
;
411 dmawidth
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
418 /* calculate maximum number of words fifos can hold */
419 pic32s
->fifo_n_elm
= DIV_ROUND_UP(pic32s
->fifo_n_byte
,
422 v
= readl(&pic32s
->regs
->ctrl
);
423 v
&= ~(CTRL_BPW_MASK
<< CTRL_BPW_SHIFT
);
424 v
|= buswidth
<< CTRL_BPW_SHIFT
;
425 writel(v
, &pic32s
->regs
->ctrl
);
427 /* re-configure dma width, if required */
428 if (test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
429 pic32_spi_dma_config(pic32s
, dmawidth
);
434 static int pic32_spi_prepare_hardware(struct spi_master
*master
)
436 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
438 pic32_spi_enable(pic32s
);
443 static int pic32_spi_prepare_message(struct spi_master
*master
,
444 struct spi_message
*msg
)
446 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
447 struct spi_device
*spi
= msg
->spi
;
450 /* set device specific bits_per_word */
451 if (pic32s
->bits_per_word
!= spi
->bits_per_word
) {
452 pic32_spi_set_word_size(pic32s
, spi
->bits_per_word
);
453 pic32s
->bits_per_word
= spi
->bits_per_word
;
456 /* device specific speed change */
457 if (pic32s
->speed_hz
!= spi
->max_speed_hz
) {
458 pic32_spi_set_clk_rate(pic32s
, spi
->max_speed_hz
);
459 pic32s
->speed_hz
= spi
->max_speed_hz
;
462 /* device specific mode change */
463 if (pic32s
->mode
!= spi
->mode
) {
464 val
= readl(&pic32s
->regs
->ctrl
);
466 if (spi
->mode
& SPI_CPOL
)
470 /* tx on rising edge */
471 if (spi
->mode
& SPI_CPHA
)
476 /* rx at end of tx */
478 writel(val
, &pic32s
->regs
->ctrl
);
479 pic32s
->mode
= spi
->mode
;
485 static bool pic32_spi_can_dma(struct spi_master
*master
,
486 struct spi_device
*spi
,
487 struct spi_transfer
*xfer
)
489 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
491 /* skip using DMA on small size transfer to avoid overhead.*/
492 return (xfer
->len
>= PIC32_DMA_LEN_MIN
) &&
493 test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
496 static int pic32_spi_one_transfer(struct spi_master
*master
,
497 struct spi_device
*spi
,
498 struct spi_transfer
*transfer
)
500 struct pic32_spi
*pic32s
;
501 bool dma_issued
= false;
502 unsigned long timeout
;
505 pic32s
= spi_master_get_devdata(master
);
507 /* handle transfer specific word size change */
508 if (transfer
->bits_per_word
&&
509 (transfer
->bits_per_word
!= pic32s
->bits_per_word
)) {
510 ret
= pic32_spi_set_word_size(pic32s
, transfer
->bits_per_word
);
513 pic32s
->bits_per_word
= transfer
->bits_per_word
;
516 /* handle transfer specific speed change */
517 if (transfer
->speed_hz
&& (transfer
->speed_hz
!= pic32s
->speed_hz
)) {
518 pic32_spi_set_clk_rate(pic32s
, transfer
->speed_hz
);
519 pic32s
->speed_hz
= transfer
->speed_hz
;
522 reinit_completion(&pic32s
->xfer_done
);
524 /* transact by DMA mode */
525 if (transfer
->rx_sg
.nents
&& transfer
->tx_sg
.nents
) {
526 ret
= pic32_spi_dma_transfer(pic32s
, transfer
);
528 dev_err(&spi
->dev
, "dma submit error\n");
535 /* set current transfer information */
536 pic32s
->tx
= (const void *)transfer
->tx_buf
;
537 pic32s
->rx
= (const void *)transfer
->rx_buf
;
538 pic32s
->tx_end
= pic32s
->tx
+ transfer
->len
;
539 pic32s
->rx_end
= pic32s
->rx
+ transfer
->len
;
540 pic32s
->len
= transfer
->len
;
542 /* transact by interrupt driven PIO */
543 enable_irq(pic32s
->fault_irq
);
544 enable_irq(pic32s
->rx_irq
);
545 enable_irq(pic32s
->tx_irq
);
548 /* wait for completion */
549 timeout
= wait_for_completion_timeout(&pic32s
->xfer_done
, 2 * HZ
);
551 dev_err(&spi
->dev
, "wait error/timedout\n");
553 dmaengine_terminate_all(master
->dma_rx
);
554 dmaengine_terminate_all(master
->dma_tx
);
564 static int pic32_spi_unprepare_message(struct spi_master
*master
,
565 struct spi_message
*msg
)
571 static int pic32_spi_unprepare_hardware(struct spi_master
*master
)
573 struct pic32_spi
*pic32s
= spi_master_get_devdata(master
);
575 pic32_spi_disable(pic32s
);
580 /* This may be called multiple times by same spi dev */
581 static int pic32_spi_setup(struct spi_device
*spi
)
583 if (!spi
->max_speed_hz
) {
584 dev_err(&spi
->dev
, "No max speed HZ parameter\n");
588 /* PIC32 spi controller can drive /CS during transfer depending
589 * on tx fifo fill-level. /CS will stay asserted as long as TX
590 * fifo is non-empty, else will be deasserted indicating
591 * completion of the ongoing transfer. This might result into
592 * unreliable/erroneous SPI transactions.
593 * To avoid that we will always handle /CS by toggling GPIO.
595 if (!gpio_is_valid(spi
->cs_gpio
))
598 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
603 static void pic32_spi_cleanup(struct spi_device
*spi
)
605 /* de-activate cs-gpio */
606 gpio_direction_output(spi
->cs_gpio
, !(spi
->mode
& SPI_CS_HIGH
));
609 static int pic32_spi_dma_prep(struct pic32_spi
*pic32s
, struct device
*dev
)
611 struct spi_master
*master
= pic32s
->master
;
614 master
->dma_rx
= dma_request_chan(dev
, "spi-rx");
615 if (IS_ERR(master
->dma_rx
)) {
616 if (PTR_ERR(master
->dma_rx
) == -EPROBE_DEFER
)
619 dev_warn(dev
, "RX channel not found.\n");
621 master
->dma_rx
= NULL
;
625 master
->dma_tx
= dma_request_chan(dev
, "spi-tx");
626 if (IS_ERR(master
->dma_tx
)) {
627 if (PTR_ERR(master
->dma_tx
) == -EPROBE_DEFER
)
630 dev_warn(dev
, "TX channel not found.\n");
632 master
->dma_tx
= NULL
;
636 if (pic32_spi_dma_config(pic32s
, DMA_SLAVE_BUSWIDTH_1_BYTE
))
639 /* DMA chnls allocated and prepared */
640 set_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
645 if (master
->dma_rx
) {
646 dma_release_channel(master
->dma_rx
);
647 master
->dma_rx
= NULL
;
650 if (master
->dma_tx
) {
651 dma_release_channel(master
->dma_tx
);
652 master
->dma_tx
= NULL
;
658 static void pic32_spi_dma_unprep(struct pic32_spi
*pic32s
)
660 if (!test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
663 clear_bit(PIC32F_DMA_PREP
, &pic32s
->flags
);
664 if (pic32s
->master
->dma_rx
)
665 dma_release_channel(pic32s
->master
->dma_rx
);
667 if (pic32s
->master
->dma_tx
)
668 dma_release_channel(pic32s
->master
->dma_tx
);
671 static void pic32_spi_hw_init(struct pic32_spi
*pic32s
)
675 /* disable hardware */
676 pic32_spi_disable(pic32s
);
678 ctrl
= readl(&pic32s
->regs
->ctrl
);
679 /* enable enhanced fifo of 128bit deep */
681 pic32s
->fifo_n_byte
= 16;
683 /* disable framing mode */
686 /* enable master mode while disabled */
689 /* set tx fifo threshold interrupt */
690 ctrl
&= ~(0x3 << CTRL_TX_INT_SHIFT
);
691 ctrl
|= (TX_FIFO_HALF_EMPTY
<< CTRL_TX_INT_SHIFT
);
693 /* set rx fifo threshold interrupt */
694 ctrl
&= ~(0x3 << CTRL_RX_INT_SHIFT
);
695 ctrl
|= (RX_FIFO_NOT_EMPTY
<< CTRL_RX_INT_SHIFT
);
697 /* select clk source */
698 ctrl
&= ~CTRL_MCLKSEL
;
700 /* set manual /CS mode */
703 writel(ctrl
, &pic32s
->regs
->ctrl
);
705 /* enable error reporting */
706 ctrl
= CTRL2_TX_UR_EN
| CTRL2_RX_OV_EN
| CTRL2_FRM_ERR_EN
;
707 writel(ctrl
, &pic32s
->regs
->ctrl2_set
);
710 static int pic32_spi_hw_probe(struct platform_device
*pdev
,
711 struct pic32_spi
*pic32s
)
713 struct resource
*mem
;
716 mem
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
717 pic32s
->regs
= devm_ioremap_resource(&pdev
->dev
, mem
);
718 if (IS_ERR(pic32s
->regs
))
719 return PTR_ERR(pic32s
->regs
);
721 pic32s
->dma_base
= mem
->start
;
723 /* get irq resources: err-irq, rx-irq, tx-irq */
724 pic32s
->fault_irq
= platform_get_irq_byname(pdev
, "fault");
725 if (pic32s
->fault_irq
< 0)
726 return pic32s
->fault_irq
;
728 pic32s
->rx_irq
= platform_get_irq_byname(pdev
, "rx");
729 if (pic32s
->rx_irq
< 0)
730 return pic32s
->rx_irq
;
732 pic32s
->tx_irq
= platform_get_irq_byname(pdev
, "tx");
733 if (pic32s
->tx_irq
< 0)
734 return pic32s
->tx_irq
;
737 pic32s
->clk
= devm_clk_get(&pdev
->dev
, "mck0");
738 if (IS_ERR(pic32s
->clk
)) {
739 dev_err(&pdev
->dev
, "clk not found\n");
740 ret
= PTR_ERR(pic32s
->clk
);
744 ret
= clk_prepare_enable(pic32s
->clk
);
748 pic32_spi_hw_init(pic32s
);
753 dev_err(&pdev
->dev
, "%s failed, err %d\n", __func__
, ret
);
757 static int pic32_spi_probe(struct platform_device
*pdev
)
759 struct spi_master
*master
;
760 struct pic32_spi
*pic32s
;
763 master
= spi_alloc_master(&pdev
->dev
, sizeof(*pic32s
));
767 pic32s
= spi_master_get_devdata(master
);
768 pic32s
->master
= master
;
770 ret
= pic32_spi_hw_probe(pdev
, pic32s
);
774 master
->dev
.of_node
= pdev
->dev
.of_node
;
775 master
->mode_bits
= SPI_MODE_3
| SPI_MODE_0
| SPI_CS_HIGH
;
776 master
->num_chipselect
= 1; /* single chip-select */
777 master
->max_speed_hz
= clk_get_rate(pic32s
->clk
);
778 master
->setup
= pic32_spi_setup
;
779 master
->cleanup
= pic32_spi_cleanup
;
780 master
->flags
= SPI_MASTER_MUST_TX
| SPI_MASTER_MUST_RX
;
781 master
->bits_per_word_mask
= SPI_BPW_MASK(8) | SPI_BPW_MASK(16) |
783 master
->transfer_one
= pic32_spi_one_transfer
;
784 master
->prepare_message
= pic32_spi_prepare_message
;
785 master
->unprepare_message
= pic32_spi_unprepare_message
;
786 master
->prepare_transfer_hardware
= pic32_spi_prepare_hardware
;
787 master
->unprepare_transfer_hardware
= pic32_spi_unprepare_hardware
;
789 /* optional DMA support */
790 ret
= pic32_spi_dma_prep(pic32s
, &pdev
->dev
);
794 if (test_bit(PIC32F_DMA_PREP
, &pic32s
->flags
))
795 master
->can_dma
= pic32_spi_can_dma
;
797 init_completion(&pic32s
->xfer_done
);
800 /* install irq handlers (with irq-disabled) */
801 irq_set_status_flags(pic32s
->fault_irq
, IRQ_NOAUTOEN
);
802 ret
= devm_request_irq(&pdev
->dev
, pic32s
->fault_irq
,
803 pic32_spi_fault_irq
, IRQF_NO_THREAD
,
804 dev_name(&pdev
->dev
), pic32s
);
806 dev_err(&pdev
->dev
, "request fault-irq %d\n", pic32s
->rx_irq
);
810 /* receive interrupt handler */
811 irq_set_status_flags(pic32s
->rx_irq
, IRQ_NOAUTOEN
);
812 ret
= devm_request_irq(&pdev
->dev
, pic32s
->rx_irq
,
813 pic32_spi_rx_irq
, IRQF_NO_THREAD
,
814 dev_name(&pdev
->dev
), pic32s
);
816 dev_err(&pdev
->dev
, "request rx-irq %d\n", pic32s
->rx_irq
);
820 /* transmit interrupt handler */
821 irq_set_status_flags(pic32s
->tx_irq
, IRQ_NOAUTOEN
);
822 ret
= devm_request_irq(&pdev
->dev
, pic32s
->tx_irq
,
823 pic32_spi_tx_irq
, IRQF_NO_THREAD
,
824 dev_name(&pdev
->dev
), pic32s
);
826 dev_err(&pdev
->dev
, "request tx-irq %d\n", pic32s
->tx_irq
);
830 /* register master */
831 ret
= devm_spi_register_master(&pdev
->dev
, master
);
833 dev_err(&master
->dev
, "failed registering spi master\n");
837 platform_set_drvdata(pdev
, pic32s
);
842 clk_disable_unprepare(pic32s
->clk
);
844 spi_master_put(master
);
848 static int pic32_spi_remove(struct platform_device
*pdev
)
850 struct pic32_spi
*pic32s
;
852 pic32s
= platform_get_drvdata(pdev
);
853 pic32_spi_disable(pic32s
);
854 clk_disable_unprepare(pic32s
->clk
);
855 pic32_spi_dma_unprep(pic32s
);
860 static const struct of_device_id pic32_spi_of_match
[] = {
861 {.compatible
= "microchip,pic32mzda-spi",},
864 MODULE_DEVICE_TABLE(of
, pic32_spi_of_match
);
866 static struct platform_driver pic32_spi_driver
= {
869 .of_match_table
= of_match_ptr(pic32_spi_of_match
),
871 .probe
= pic32_spi_probe
,
872 .remove
= pic32_spi_remove
,
875 module_platform_driver(pic32_spi_driver
);
877 MODULE_AUTHOR("Purna Chandra Mandal <purna.mandal@microchip.com>");
878 MODULE_DESCRIPTION("Microchip SPI driver for PIC32 SPI controller.");
879 MODULE_LICENSE("GPL v2");