2 * Copyright (C) 2015, Michael Lee <igvtee@gmail.com>
5 * This program is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License as published by the
7 * Free Software Foundation; either version 2 of the License, or (at your
8 * option) any later version.
12 #include <linux/dmaengine.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/list.h>
17 #include <linux/module.h>
18 #include <linux/platform_device.h>
19 #include <linux/slab.h>
20 #include <linux/spinlock.h>
21 #include <linux/irq.h>
22 #include <linux/of_dma.h>
23 #include <linux/reset.h>
24 #include <linux/of_device.h>
28 #define HSDMA_BASE_OFFSET 0x800
30 #define HSDMA_REG_TX_BASE 0x00
31 #define HSDMA_REG_TX_CNT 0x04
32 #define HSDMA_REG_TX_CTX 0x08
33 #define HSDMA_REG_TX_DTX 0x0c
34 #define HSDMA_REG_RX_BASE 0x100
35 #define HSDMA_REG_RX_CNT 0x104
36 #define HSDMA_REG_RX_CRX 0x108
37 #define HSDMA_REG_RX_DRX 0x10c
38 #define HSDMA_REG_INFO 0x200
39 #define HSDMA_REG_GLO_CFG 0x204
40 #define HSDMA_REG_RST_CFG 0x208
41 #define HSDMA_REG_DELAY_INT 0x20c
42 #define HSDMA_REG_FREEQ_THRES 0x210
43 #define HSDMA_REG_INT_STATUS 0x220
44 #define HSDMA_REG_INT_MASK 0x228
45 #define HSDMA_REG_SCH_Q01 0x280
46 #define HSDMA_REG_SCH_Q23 0x284
48 #define HSDMA_DESCS_MAX 0xfff
49 #define HSDMA_DESCS_NUM 8
50 #define HSDMA_DESCS_MASK (HSDMA_DESCS_NUM - 1)
51 #define HSDMA_NEXT_DESC(x) (((x) + 1) & HSDMA_DESCS_MASK)
54 #define HSDMA_INFO_INDEX_MASK 0xf
55 #define HSDMA_INFO_INDEX_SHIFT 24
56 #define HSDMA_INFO_BASE_MASK 0xff
57 #define HSDMA_INFO_BASE_SHIFT 16
58 #define HSDMA_INFO_RX_MASK 0xff
59 #define HSDMA_INFO_RX_SHIFT 8
60 #define HSDMA_INFO_TX_MASK 0xff
61 #define HSDMA_INFO_TX_SHIFT 0
63 /* HSDMA_REG_GLO_CFG */
64 #define HSDMA_GLO_TX_2B_OFFSET BIT(31)
65 #define HSDMA_GLO_CLK_GATE BIT(30)
66 #define HSDMA_GLO_BYTE_SWAP BIT(29)
67 #define HSDMA_GLO_MULTI_DMA BIT(10)
68 #define HSDMA_GLO_TWO_BUF BIT(9)
69 #define HSDMA_GLO_32B_DESC BIT(8)
70 #define HSDMA_GLO_BIG_ENDIAN BIT(7)
71 #define HSDMA_GLO_TX_DONE BIT(6)
72 #define HSDMA_GLO_BT_MASK 0x3
73 #define HSDMA_GLO_BT_SHIFT 4
74 #define HSDMA_GLO_RX_BUSY BIT(3)
75 #define HSDMA_GLO_RX_DMA BIT(2)
76 #define HSDMA_GLO_TX_BUSY BIT(1)
77 #define HSDMA_GLO_TX_DMA BIT(0)
79 #define HSDMA_BT_SIZE_16BYTES (0 << HSDMA_GLO_BT_SHIFT)
80 #define HSDMA_BT_SIZE_32BYTES (1 << HSDMA_GLO_BT_SHIFT)
81 #define HSDMA_BT_SIZE_64BYTES (2 << HSDMA_GLO_BT_SHIFT)
82 #define HSDMA_BT_SIZE_128BYTES (3 << HSDMA_GLO_BT_SHIFT)
84 #define HSDMA_GLO_DEFAULT (HSDMA_GLO_MULTI_DMA | \
85 HSDMA_GLO_RX_DMA | HSDMA_GLO_TX_DMA | HSDMA_BT_SIZE_32BYTES)
87 /* HSDMA_REG_RST_CFG */
88 #define HSDMA_RST_RX_SHIFT 16
89 #define HSDMA_RST_TX_SHIFT 0
91 /* HSDMA_REG_DELAY_INT */
92 #define HSDMA_DELAY_INT_EN BIT(15)
93 #define HSDMA_DELAY_PEND_OFFSET 8
94 #define HSDMA_DELAY_TIME_OFFSET 0
95 #define HSDMA_DELAY_TX_OFFSET 16
96 #define HSDMA_DELAY_RX_OFFSET 0
98 #define HSDMA_DELAY_INIT(x) (HSDMA_DELAY_INT_EN | \
99 ((x) << HSDMA_DELAY_PEND_OFFSET))
100 #define HSDMA_DELAY(x) ((HSDMA_DELAY_INIT(x) << \
101 HSDMA_DELAY_TX_OFFSET) | HSDMA_DELAY_INIT(x))
103 /* HSDMA_REG_INT_STATUS */
104 #define HSDMA_INT_DELAY_RX_COH BIT(31)
105 #define HSDMA_INT_DELAY_RX_INT BIT(30)
106 #define HSDMA_INT_DELAY_TX_COH BIT(29)
107 #define HSDMA_INT_DELAY_TX_INT BIT(28)
108 #define HSDMA_INT_RX_MASK 0x3
109 #define HSDMA_INT_RX_SHIFT 16
110 #define HSDMA_INT_RX_Q0 BIT(16)
111 #define HSDMA_INT_TX_MASK 0xf
112 #define HSDMA_INT_TX_SHIFT 0
113 #define HSDMA_INT_TX_Q0 BIT(0)
115 /* tx/rx dma desc flags */
116 #define HSDMA_PLEN_MASK 0x3fff
117 #define HSDMA_DESC_DONE BIT(31)
118 #define HSDMA_DESC_LS0 BIT(30)
119 #define HSDMA_DESC_PLEN0(_x) (((_x) & HSDMA_PLEN_MASK) << 16)
120 #define HSDMA_DESC_TAG BIT(15)
121 #define HSDMA_DESC_LS1 BIT(14)
122 #define HSDMA_DESC_PLEN1(_x) ((_x) & HSDMA_PLEN_MASK)
125 #define HSDMA_ALIGN_SIZE 3
126 /* align size 128bytes */
127 #define HSDMA_MAX_PLEN 0x3f80
136 struct mtk_hsdma_sg
{
142 struct mtk_hsdma_desc
{
143 struct virt_dma_desc vdesc
;
144 unsigned int num_sgs
;
145 struct mtk_hsdma_sg sg
[1];
148 struct mtk_hsdma_chan
{
149 struct virt_dma_chan vchan
;
151 dma_addr_t desc_addr
;
154 struct hsdma_desc
*tx_ring
;
155 struct hsdma_desc
*rx_ring
;
156 struct mtk_hsdma_desc
*desc
;
157 unsigned int next_sg
;
160 struct mtk_hsdam_engine
{
161 struct dma_device ddev
;
162 struct device_dma_parameters dma_parms
;
164 struct tasklet_struct task
;
165 volatile unsigned long chan_issued
;
167 struct mtk_hsdma_chan chan
[1];
170 static inline struct mtk_hsdam_engine
*mtk_hsdma_chan_get_dev(
171 struct mtk_hsdma_chan
*chan
)
173 return container_of(chan
->vchan
.chan
.device
, struct mtk_hsdam_engine
,
177 static inline struct mtk_hsdma_chan
*to_mtk_hsdma_chan(struct dma_chan
*c
)
179 return container_of(c
, struct mtk_hsdma_chan
, vchan
.chan
);
182 static inline struct mtk_hsdma_desc
*to_mtk_hsdma_desc(
183 struct virt_dma_desc
*vdesc
)
185 return container_of(vdesc
, struct mtk_hsdma_desc
, vdesc
);
188 static inline u32
mtk_hsdma_read(struct mtk_hsdam_engine
*hsdma
, u32 reg
)
190 return readl(hsdma
->base
+ reg
);
193 static inline void mtk_hsdma_write(struct mtk_hsdam_engine
*hsdma
,
194 unsigned reg
, u32 val
)
196 writel(val
, hsdma
->base
+ reg
);
199 static void mtk_hsdma_reset_chan(struct mtk_hsdam_engine
*hsdma
,
200 struct mtk_hsdma_chan
*chan
)
203 chan
->rx_idx
= HSDMA_DESCS_NUM
- 1;
205 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_CTX
, chan
->tx_idx
);
206 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_CRX
, chan
->rx_idx
);
208 mtk_hsdma_write(hsdma
, HSDMA_REG_RST_CFG
,
209 0x1 << (chan
->id
+ HSDMA_RST_TX_SHIFT
));
210 mtk_hsdma_write(hsdma
, HSDMA_REG_RST_CFG
,
211 0x1 << (chan
->id
+ HSDMA_RST_RX_SHIFT
));
214 static void hsdma_dump_reg(struct mtk_hsdam_engine
*hsdma
)
216 dev_dbg(hsdma
->ddev
.dev
, "tbase %08x, tcnt %08x, " \
217 "tctx %08x, tdtx: %08x, rbase %08x, " \
218 "rcnt %08x, rctx %08x, rdtx %08x\n",
219 mtk_hsdma_read(hsdma
, HSDMA_REG_TX_BASE
),
220 mtk_hsdma_read(hsdma
, HSDMA_REG_TX_CNT
),
221 mtk_hsdma_read(hsdma
, HSDMA_REG_TX_CTX
),
222 mtk_hsdma_read(hsdma
, HSDMA_REG_TX_DTX
),
223 mtk_hsdma_read(hsdma
, HSDMA_REG_RX_BASE
),
224 mtk_hsdma_read(hsdma
, HSDMA_REG_RX_CNT
),
225 mtk_hsdma_read(hsdma
, HSDMA_REG_RX_CRX
),
226 mtk_hsdma_read(hsdma
, HSDMA_REG_RX_DRX
));
228 dev_dbg(hsdma
->ddev
.dev
, "info %08x, glo %08x, delay %08x, " \
229 "intr_stat %08x, intr_mask %08x\n",
230 mtk_hsdma_read(hsdma
, HSDMA_REG_INFO
),
231 mtk_hsdma_read(hsdma
, HSDMA_REG_GLO_CFG
),
232 mtk_hsdma_read(hsdma
, HSDMA_REG_DELAY_INT
),
233 mtk_hsdma_read(hsdma
, HSDMA_REG_INT_STATUS
),
234 mtk_hsdma_read(hsdma
, HSDMA_REG_INT_MASK
));
237 static void hsdma_dump_desc(struct mtk_hsdam_engine
*hsdma
,
238 struct mtk_hsdma_chan
*chan
)
240 struct hsdma_desc
*tx_desc
;
241 struct hsdma_desc
*rx_desc
;
244 dev_dbg(hsdma
->ddev
.dev
, "tx idx: %d, rx idx: %d\n",
245 chan
->tx_idx
, chan
->rx_idx
);
247 for (i
= 0; i
< HSDMA_DESCS_NUM
; i
++) {
248 tx_desc
= &chan
->tx_ring
[i
];
249 rx_desc
= &chan
->rx_ring
[i
];
251 dev_dbg(hsdma
->ddev
.dev
, "%d tx addr0: %08x, flags %08x, " \
252 "tx addr1: %08x, rx addr0 %08x, flags %08x\n",
253 i
, tx_desc
->addr0
, tx_desc
->flags
, \
254 tx_desc
->addr1
, rx_desc
->addr0
, rx_desc
->flags
);
258 static void mtk_hsdma_reset(struct mtk_hsdam_engine
*hsdma
,
259 struct mtk_hsdma_chan
*chan
)
264 mtk_hsdma_write(hsdma
, HSDMA_REG_GLO_CFG
, 0);
267 mtk_hsdma_write(hsdma
, HSDMA_REG_INT_MASK
, 0);
269 /* init desc value */
270 for (i
= 0; i
< HSDMA_DESCS_NUM
; i
++) {
271 chan
->tx_ring
[i
].addr0
= 0;
272 chan
->tx_ring
[i
].flags
= HSDMA_DESC_LS0
|
275 for (i
= 0; i
< HSDMA_DESCS_NUM
; i
++) {
276 chan
->rx_ring
[i
].addr0
= 0;
277 chan
->rx_ring
[i
].flags
= 0;
281 mtk_hsdma_reset_chan(hsdma
, chan
);
284 mtk_hsdma_write(hsdma
, HSDMA_REG_INT_MASK
, HSDMA_INT_RX_Q0
);
287 mtk_hsdma_write(hsdma
, HSDMA_REG_GLO_CFG
, HSDMA_GLO_DEFAULT
);
290 static int mtk_hsdma_terminate_all(struct dma_chan
*c
)
292 struct mtk_hsdma_chan
*chan
= to_mtk_hsdma_chan(c
);
293 struct mtk_hsdam_engine
*hsdma
= mtk_hsdma_chan_get_dev(chan
);
294 unsigned long timeout
;
297 spin_lock_bh(&chan
->vchan
.lock
);
299 clear_bit(chan
->id
, &hsdma
->chan_issued
);
300 vchan_get_all_descriptors(&chan
->vchan
, &head
);
301 spin_unlock_bh(&chan
->vchan
.lock
);
303 vchan_dma_desc_free_list(&chan
->vchan
, &head
);
305 /* wait dma transfer complete */
306 timeout
= jiffies
+ msecs_to_jiffies(2000);
307 while (mtk_hsdma_read(hsdma
, HSDMA_REG_GLO_CFG
) &
308 (HSDMA_GLO_RX_BUSY
| HSDMA_GLO_TX_BUSY
)) {
309 if (time_after_eq(jiffies
, timeout
)) {
310 hsdma_dump_desc(hsdma
, chan
);
311 mtk_hsdma_reset(hsdma
, chan
);
312 dev_err(hsdma
->ddev
.dev
, "timeout, reset it\n");
321 static int mtk_hsdma_start_transfer(struct mtk_hsdam_engine
*hsdma
,
322 struct mtk_hsdma_chan
*chan
)
326 struct hsdma_desc
*tx_desc
, *rx_desc
;
327 struct mtk_hsdma_sg
*sg
;
331 sg
= &chan
->desc
->sg
[0];
333 chan
->desc
->num_sgs
= DIV_ROUND_UP(len
, HSDMA_MAX_PLEN
);
337 for (i
= 0; i
< chan
->desc
->num_sgs
; i
++) {
338 tx_desc
= &chan
->tx_ring
[chan
->tx_idx
];
340 if (len
> HSDMA_MAX_PLEN
)
341 tlen
= HSDMA_MAX_PLEN
;
346 tx_desc
->addr1
= src
;
347 tx_desc
->flags
|= HSDMA_DESC_PLEN1(tlen
);
349 tx_desc
->addr0
= src
;
350 tx_desc
->flags
= HSDMA_DESC_PLEN0(tlen
);
353 chan
->tx_idx
= HSDMA_NEXT_DESC(chan
->tx_idx
);
360 tx_desc
->flags
|= HSDMA_DESC_LS0
;
362 tx_desc
->flags
|= HSDMA_DESC_LS1
;
365 rx_idx
= HSDMA_NEXT_DESC(chan
->rx_idx
);
368 for (i
= 0; i
< chan
->desc
->num_sgs
; i
++) {
369 rx_desc
= &chan
->rx_ring
[rx_idx
];
370 if (len
> HSDMA_MAX_PLEN
)
371 tlen
= HSDMA_MAX_PLEN
;
375 rx_desc
->addr0
= dst
;
376 rx_desc
->flags
= HSDMA_DESC_PLEN0(tlen
);
382 rx_idx
= HSDMA_NEXT_DESC(rx_idx
);
385 /* make sure desc and index all up to date */
387 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_CTX
, chan
->tx_idx
);
392 static int gdma_next_desc(struct mtk_hsdma_chan
*chan
)
394 struct virt_dma_desc
*vdesc
;
396 vdesc
= vchan_next_desc(&chan
->vchan
);
401 chan
->desc
= to_mtk_hsdma_desc(vdesc
);
407 static void mtk_hsdma_chan_done(struct mtk_hsdam_engine
*hsdma
,
408 struct mtk_hsdma_chan
*chan
)
410 struct mtk_hsdma_desc
*desc
;
414 spin_lock_bh(&chan
->vchan
.lock
);
417 if (chan
->next_sg
== desc
->num_sgs
) {
418 list_del(&desc
->vdesc
.node
);
419 vchan_cookie_complete(&desc
->vdesc
);
420 chan_issued
= gdma_next_desc(chan
);
423 dev_dbg(hsdma
->ddev
.dev
, "no desc to complete\n");
426 set_bit(chan
->id
, &hsdma
->chan_issued
);
427 spin_unlock_bh(&chan
->vchan
.lock
);
430 static irqreturn_t
mtk_hsdma_irq(int irq
, void *devid
)
432 struct mtk_hsdam_engine
*hsdma
= devid
;
435 status
= mtk_hsdma_read(hsdma
, HSDMA_REG_INT_STATUS
);
436 if (unlikely(!status
))
439 if (likely(status
& HSDMA_INT_RX_Q0
))
440 tasklet_schedule(&hsdma
->task
);
442 dev_dbg(hsdma
->ddev
.dev
, "unhandle irq status %08x\n",
444 /* clean intr bits */
445 mtk_hsdma_write(hsdma
, HSDMA_REG_INT_STATUS
, status
);
450 static void mtk_hsdma_issue_pending(struct dma_chan
*c
)
452 struct mtk_hsdma_chan
*chan
= to_mtk_hsdma_chan(c
);
453 struct mtk_hsdam_engine
*hsdma
= mtk_hsdma_chan_get_dev(chan
);
455 spin_lock_bh(&chan
->vchan
.lock
);
456 if (vchan_issue_pending(&chan
->vchan
) && !chan
->desc
) {
457 if (gdma_next_desc(chan
)) {
458 set_bit(chan
->id
, &hsdma
->chan_issued
);
459 tasklet_schedule(&hsdma
->task
);
461 dev_dbg(hsdma
->ddev
.dev
, "no desc to issue\n");
463 spin_unlock_bh(&chan
->vchan
.lock
);
466 static struct dma_async_tx_descriptor
*mtk_hsdma_prep_dma_memcpy(
467 struct dma_chan
*c
, dma_addr_t dest
, dma_addr_t src
,
468 size_t len
, unsigned long flags
)
470 struct mtk_hsdma_chan
*chan
= to_mtk_hsdma_chan(c
);
471 struct mtk_hsdma_desc
*desc
;
476 desc
= kzalloc(sizeof(struct mtk_hsdma_desc
), GFP_ATOMIC
);
478 dev_err(c
->device
->dev
, "alloc memcpy decs error\n");
482 desc
->sg
[0].src_addr
= src
;
483 desc
->sg
[0].dst_addr
= dest
;
484 desc
->sg
[0].len
= len
;
486 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
489 static enum dma_status
mtk_hsdma_tx_status(struct dma_chan
*c
,
491 struct dma_tx_state
*state
)
493 return dma_cookie_status(c
, cookie
, state
);
496 static void mtk_hsdma_free_chan_resources(struct dma_chan
*c
)
498 vchan_free_chan_resources(to_virt_chan(c
));
501 static void mtk_hsdma_desc_free(struct virt_dma_desc
*vdesc
)
503 kfree(container_of(vdesc
, struct mtk_hsdma_desc
, vdesc
));
506 static void mtk_hsdma_tx(struct mtk_hsdam_engine
*hsdma
)
508 struct mtk_hsdma_chan
*chan
;
510 if (test_and_clear_bit(0, &hsdma
->chan_issued
)) {
511 chan
= &hsdma
->chan
[0];
513 mtk_hsdma_start_transfer(hsdma
, chan
);
515 dev_dbg(hsdma
->ddev
.dev
, "chan 0 no desc to issue\n");
519 static void mtk_hsdma_rx(struct mtk_hsdam_engine
*hsdma
)
521 struct mtk_hsdma_chan
*chan
;
522 int next_idx
, drx_idx
, cnt
;
524 chan
= &hsdma
->chan
[0];
525 next_idx
= HSDMA_NEXT_DESC(chan
->rx_idx
);
526 drx_idx
= mtk_hsdma_read(hsdma
, HSDMA_REG_RX_DRX
);
528 cnt
= (drx_idx
- next_idx
) & HSDMA_DESCS_MASK
;
532 chan
->next_sg
+= cnt
;
533 chan
->rx_idx
= (chan
->rx_idx
+ cnt
) & HSDMA_DESCS_MASK
;
537 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_CRX
, chan
->rx_idx
);
539 mtk_hsdma_chan_done(hsdma
, chan
);
542 static void mtk_hsdma_tasklet(unsigned long arg
)
544 struct mtk_hsdam_engine
*hsdma
= (struct mtk_hsdam_engine
*)arg
;
550 static int mtk_hsdam_alloc_desc(struct mtk_hsdam_engine
*hsdma
,
551 struct mtk_hsdma_chan
*chan
)
555 chan
->tx_ring
= dma_alloc_coherent(hsdma
->ddev
.dev
,
556 2 * HSDMA_DESCS_NUM
* sizeof(*chan
->tx_ring
),
557 &chan
->desc_addr
, GFP_ATOMIC
| __GFP_ZERO
);
561 chan
->rx_ring
= &chan
->tx_ring
[HSDMA_DESCS_NUM
];
563 /* init tx ring value */
564 for (i
= 0; i
< HSDMA_DESCS_NUM
; i
++)
565 chan
->tx_ring
[i
].flags
= HSDMA_DESC_LS0
| HSDMA_DESC_DONE
;
572 static void mtk_hsdam_free_desc(struct mtk_hsdam_engine
*hsdma
,
573 struct mtk_hsdma_chan
*chan
)
576 dma_free_coherent(hsdma
->ddev
.dev
,
577 2 * HSDMA_DESCS_NUM
* sizeof(*chan
->tx_ring
),
578 chan
->tx_ring
, chan
->desc_addr
);
579 chan
->tx_ring
= NULL
;
580 chan
->rx_ring
= NULL
;
584 static int mtk_hsdma_init(struct mtk_hsdam_engine
*hsdma
)
586 struct mtk_hsdma_chan
*chan
;
591 chan
= &hsdma
->chan
[0];
592 ret
= mtk_hsdam_alloc_desc(hsdma
, chan
);
597 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_BASE
, chan
->desc_addr
);
598 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_CNT
, HSDMA_DESCS_NUM
);
600 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_BASE
, chan
->desc_addr
+
601 (sizeof(struct hsdma_desc
) * HSDMA_DESCS_NUM
));
602 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_CNT
, HSDMA_DESCS_NUM
);
604 mtk_hsdma_reset_chan(hsdma
, chan
);
607 mtk_hsdma_write(hsdma
, HSDMA_REG_INT_MASK
, HSDMA_INT_RX_Q0
);
610 mtk_hsdma_write(hsdma
, HSDMA_REG_GLO_CFG
, HSDMA_GLO_DEFAULT
);
613 reg
= mtk_hsdma_read(hsdma
, HSDMA_REG_INFO
);
614 dev_info(hsdma
->ddev
.dev
, "rx: %d, tx: %d\n",
615 (reg
>> HSDMA_INFO_RX_SHIFT
) & HSDMA_INFO_RX_MASK
,
616 (reg
>> HSDMA_INFO_TX_SHIFT
) & HSDMA_INFO_TX_MASK
);
618 hsdma_dump_reg(hsdma
);
623 static void mtk_hsdma_uninit(struct mtk_hsdam_engine
*hsdma
)
625 struct mtk_hsdma_chan
*chan
;
628 mtk_hsdma_write(hsdma
, HSDMA_REG_GLO_CFG
, 0);
631 mtk_hsdma_write(hsdma
, HSDMA_REG_INT_MASK
, 0);
634 chan
= &hsdma
->chan
[0];
635 mtk_hsdam_free_desc(hsdma
, chan
);
638 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_BASE
, 0);
639 mtk_hsdma_write(hsdma
, HSDMA_REG_TX_CNT
, 0);
641 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_BASE
, 0);
642 mtk_hsdma_write(hsdma
, HSDMA_REG_RX_CNT
, 0);
644 mtk_hsdma_reset_chan(hsdma
, chan
);
647 static const struct of_device_id mtk_hsdma_of_match
[] = {
648 { .compatible
= "mediatek,mt7621-hsdma" },
652 static int mtk_hsdma_probe(struct platform_device
*pdev
)
654 const struct of_device_id
*match
;
655 struct mtk_hsdma_chan
*chan
;
656 struct mtk_hsdam_engine
*hsdma
;
657 struct dma_device
*dd
;
658 struct resource
*res
;
663 ret
= dma_set_mask_and_coherent(&pdev
->dev
, DMA_BIT_MASK(32));
667 match
= of_match_device(mtk_hsdma_of_match
, &pdev
->dev
);
671 hsdma
= devm_kzalloc(&pdev
->dev
, sizeof(*hsdma
), GFP_KERNEL
);
673 dev_err(&pdev
->dev
, "alloc dma device failed\n");
677 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
678 base
= devm_ioremap_resource(&pdev
->dev
, res
);
680 return PTR_ERR(base
);
681 hsdma
->base
= base
+ HSDMA_BASE_OFFSET
;
682 tasklet_init(&hsdma
->task
, mtk_hsdma_tasklet
, (unsigned long)hsdma
);
684 irq
= platform_get_irq(pdev
, 0);
686 dev_err(&pdev
->dev
, "failed to get irq\n");
689 ret
= devm_request_irq(&pdev
->dev
, irq
, mtk_hsdma_irq
,
690 0, dev_name(&pdev
->dev
), hsdma
);
692 dev_err(&pdev
->dev
, "failed to request irq\n");
696 device_reset(&pdev
->dev
);
699 dma_cap_set(DMA_MEMCPY
, dd
->cap_mask
);
700 dd
->copy_align
= HSDMA_ALIGN_SIZE
;
701 dd
->device_free_chan_resources
= mtk_hsdma_free_chan_resources
;
702 dd
->device_prep_dma_memcpy
= mtk_hsdma_prep_dma_memcpy
;
703 dd
->device_terminate_all
= mtk_hsdma_terminate_all
;
704 dd
->device_tx_status
= mtk_hsdma_tx_status
;
705 dd
->device_issue_pending
= mtk_hsdma_issue_pending
;
706 dd
->dev
= &pdev
->dev
;
707 dd
->dev
->dma_parms
= &hsdma
->dma_parms
;
708 dma_set_max_seg_size(dd
->dev
, HSDMA_MAX_PLEN
);
709 INIT_LIST_HEAD(&dd
->channels
);
711 chan
= &hsdma
->chan
[0];
713 chan
->vchan
.desc_free
= mtk_hsdma_desc_free
;
714 vchan_init(&chan
->vchan
, dd
);
717 ret
= mtk_hsdma_init(hsdma
);
719 dev_err(&pdev
->dev
, "failed to alloc ring descs\n");
723 ret
= dma_async_device_register(dd
);
725 dev_err(&pdev
->dev
, "failed to register dma device\n");
729 ret
= of_dma_controller_register(pdev
->dev
.of_node
,
730 of_dma_xlate_by_chan_id
, hsdma
);
732 dev_err(&pdev
->dev
, "failed to register of dma controller\n");
736 platform_set_drvdata(pdev
, hsdma
);
741 dma_async_device_unregister(dd
);
745 static int mtk_hsdma_remove(struct platform_device
*pdev
)
747 struct mtk_hsdam_engine
*hsdma
= platform_get_drvdata(pdev
);
749 mtk_hsdma_uninit(hsdma
);
751 of_dma_controller_free(pdev
->dev
.of_node
);
752 dma_async_device_unregister(&hsdma
->ddev
);
757 static struct platform_driver mtk_hsdma_driver
= {
758 .probe
= mtk_hsdma_probe
,
759 .remove
= mtk_hsdma_remove
,
761 .name
= "hsdma-mt7621",
762 .of_match_table
= mtk_hsdma_of_match
,
765 module_platform_driver(mtk_hsdma_driver
);
767 MODULE_AUTHOR("Michael Lee <igvtee@gmail.com>");
768 MODULE_DESCRIPTION("MTK HSDMA driver");
769 MODULE_LICENSE("GPL v2");