2 * Copyright (C) 2017 Spreadtrum Communications Inc.
4 * SPDX-License-Identifier: GPL-2.0
8 #include <linux/dma-mapping.h>
9 #include <linux/errno.h>
10 #include <linux/init.h>
11 #include <linux/interrupt.h>
13 #include <linux/kernel.h>
14 #include <linux/module.h>
16 #include <linux/of_dma.h>
17 #include <linux/of_device.h>
18 #include <linux/pm_runtime.h>
19 #include <linux/slab.h>
23 #define SPRD_DMA_CHN_REG_OFFSET 0x1000
24 #define SPRD_DMA_CHN_REG_LENGTH 0x40
25 #define SPRD_DMA_MEMCPY_MIN_SIZE 64
27 /* DMA global registers definition */
28 #define SPRD_DMA_GLB_PAUSE 0x0
29 #define SPRD_DMA_GLB_FRAG_WAIT 0x4
30 #define SPRD_DMA_GLB_REQ_PEND0_EN 0x8
31 #define SPRD_DMA_GLB_REQ_PEND1_EN 0xc
32 #define SPRD_DMA_GLB_INT_RAW_STS 0x10
33 #define SPRD_DMA_GLB_INT_MSK_STS 0x14
34 #define SPRD_DMA_GLB_REQ_STS 0x18
35 #define SPRD_DMA_GLB_CHN_EN_STS 0x1c
36 #define SPRD_DMA_GLB_DEBUG_STS 0x20
37 #define SPRD_DMA_GLB_ARB_SEL_STS 0x24
38 #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
39 #define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000
41 /* DMA channel registers definition */
42 #define SPRD_DMA_CHN_PAUSE 0x0
43 #define SPRD_DMA_CHN_REQ 0x4
44 #define SPRD_DMA_CHN_CFG 0x8
45 #define SPRD_DMA_CHN_INTC 0xc
46 #define SPRD_DMA_CHN_SRC_ADDR 0x10
47 #define SPRD_DMA_CHN_DES_ADDR 0x14
48 #define SPRD_DMA_CHN_FRG_LEN 0x18
49 #define SPRD_DMA_CHN_BLK_LEN 0x1c
50 #define SPRD_DMA_CHN_TRSC_LEN 0x20
51 #define SPRD_DMA_CHN_TRSF_STEP 0x24
52 #define SPRD_DMA_CHN_WARP_PTR 0x28
53 #define SPRD_DMA_CHN_WARP_TO 0x2c
54 #define SPRD_DMA_CHN_LLIST_PTR 0x30
55 #define SPRD_DMA_CHN_FRAG_STEP 0x34
56 #define SPRD_DMA_CHN_SRC_BLK_STEP 0x38
57 #define SPRD_DMA_CHN_DES_BLK_STEP 0x3c
59 /* SPRD_DMA_CHN_INTC register definition */
60 #define SPRD_DMA_INT_MASK GENMASK(4, 0)
61 #define SPRD_DMA_INT_CLR_OFFSET 24
62 #define SPRD_DMA_FRAG_INT_EN BIT(0)
63 #define SPRD_DMA_BLK_INT_EN BIT(1)
64 #define SPRD_DMA_TRANS_INT_EN BIT(2)
65 #define SPRD_DMA_LIST_INT_EN BIT(3)
66 #define SPRD_DMA_CFG_ERR_INT_EN BIT(4)
68 /* SPRD_DMA_CHN_CFG register definition */
69 #define SPRD_DMA_CHN_EN BIT(0)
70 #define SPRD_DMA_WAIT_BDONE_OFFSET 24
71 #define SPRD_DMA_DONOT_WAIT_BDONE 1
73 /* SPRD_DMA_CHN_REQ register definition */
74 #define SPRD_DMA_REQ_EN BIT(0)
76 /* SPRD_DMA_CHN_PAUSE register definition */
77 #define SPRD_DMA_PAUSE_EN BIT(0)
78 #define SPRD_DMA_PAUSE_STS BIT(2)
79 #define SPRD_DMA_PAUSE_CNT 0x2000
81 /* DMA_CHN_WARP_* register definition */
82 #define SPRD_DMA_HIGH_ADDR_MASK GENMASK(31, 28)
83 #define SPRD_DMA_LOW_ADDR_MASK GENMASK(31, 0)
84 #define SPRD_DMA_HIGH_ADDR_OFFSET 4
86 /* SPRD_DMA_CHN_INTC register definition */
87 #define SPRD_DMA_FRAG_INT_STS BIT(16)
88 #define SPRD_DMA_BLK_INT_STS BIT(17)
89 #define SPRD_DMA_TRSC_INT_STS BIT(18)
90 #define SPRD_DMA_LIST_INT_STS BIT(19)
91 #define SPRD_DMA_CFGERR_INT_STS BIT(20)
92 #define SPRD_DMA_CHN_INT_STS \
93 (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS | \
94 SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS | \
95 SPRD_DMA_CFGERR_INT_STS)
97 /* SPRD_DMA_CHN_FRG_LEN register definition */
98 #define SPRD_DMA_SRC_DATAWIDTH_OFFSET 30
99 #define SPRD_DMA_DES_DATAWIDTH_OFFSET 28
100 #define SPRD_DMA_SWT_MODE_OFFSET 26
101 #define SPRD_DMA_REQ_MODE_OFFSET 24
102 #define SPRD_DMA_REQ_MODE_MASK GENMASK(1, 0)
103 #define SPRD_DMA_FIX_SEL_OFFSET 21
104 #define SPRD_DMA_FIX_EN_OFFSET 20
105 #define SPRD_DMA_LLIST_END_OFFSET 19
106 #define SPRD_DMA_FRG_LEN_MASK GENMASK(16, 0)
108 /* SPRD_DMA_CHN_BLK_LEN register definition */
109 #define SPRD_DMA_BLK_LEN_MASK GENMASK(16, 0)
111 /* SPRD_DMA_CHN_TRSC_LEN register definition */
112 #define SPRD_DMA_TRSC_LEN_MASK GENMASK(27, 0)
114 /* SPRD_DMA_CHN_TRSF_STEP register definition */
115 #define SPRD_DMA_DEST_TRSF_STEP_OFFSET 16
116 #define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0
117 #define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0)
119 #define SPRD_DMA_SOFTWARE_UID 0
122 * enum sprd_dma_req_mode: define the DMA request mode
123 * @SPRD_DMA_FRAG_REQ: fragment request mode
124 * @SPRD_DMA_BLK_REQ: block request mode
125 * @SPRD_DMA_TRANS_REQ: transaction request mode
126 * @SPRD_DMA_LIST_REQ: link-list request mode
128 * We have 4 types request mode: fragment mode, block mode, transaction mode
129 * and linklist mode. One transaction can contain several blocks, one block can
130 * contain several fragments. Link-list mode means we can save several DMA
131 * configuration into one reserved memory, then DMA can fetch each DMA
132 * configuration automatically to start transfer.
134 enum sprd_dma_req_mode
{
142 * enum sprd_dma_int_type: define the DMA interrupt type
143 * @SPRD_DMA_NO_INT: do not need generate DMA interrupts.
144 * @SPRD_DMA_FRAG_INT: fragment done interrupt when one fragment request
146 * @SPRD_DMA_BLK_INT: block done interrupt when one block request is done.
147 * @SPRD_DMA_BLK_FRAG_INT: block and fragment interrupt when one fragment
148 * or one block request is done.
149 * @SPRD_DMA_TRANS_INT: tansaction done interrupt when one transaction
151 * @SPRD_DMA_TRANS_FRAG_INT: transaction and fragment interrupt when one
152 * transaction request or fragment request is done.
153 * @SPRD_DMA_TRANS_BLK_INT: transaction and block interrupt when one
154 * transaction request or block request is done.
155 * @SPRD_DMA_LIST_INT: link-list done interrupt when one link-list request
157 * @SPRD_DMA_CFGERR_INT: configure error interrupt when configuration is
160 enum sprd_dma_int_type
{
164 SPRD_DMA_BLK_FRAG_INT
,
166 SPRD_DMA_TRANS_FRAG_INT
,
167 SPRD_DMA_TRANS_BLK_INT
,
172 /* dma channel hardware configuration */
173 struct sprd_dma_chn_hw
{
192 /* dma request description */
193 struct sprd_dma_desc
{
194 struct virt_dma_desc vd
;
195 struct sprd_dma_chn_hw chn_hw
;
198 /* dma channel description */
199 struct sprd_dma_chn
{
200 struct virt_dma_chan vc
;
201 void __iomem
*chn_base
;
204 struct sprd_dma_desc
*cur_desc
;
207 /* SPRD dma device */
208 struct sprd_dma_dev
{
209 struct dma_device dma_dev
;
210 void __iomem
*glb_base
;
212 struct clk
*ashb_clk
;
215 struct sprd_dma_chn channels
[0];
218 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
);
219 static struct of_dma_filter_info sprd_dma_info
= {
220 .filter_fn
= sprd_dma_filter_fn
,
223 static inline struct sprd_dma_chn
*to_sprd_dma_chan(struct dma_chan
*c
)
225 return container_of(c
, struct sprd_dma_chn
, vc
.chan
);
228 static inline struct sprd_dma_dev
*to_sprd_dma_dev(struct dma_chan
*c
)
230 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(c
);
232 return container_of(schan
, struct sprd_dma_dev
, channels
[c
->chan_id
]);
235 static inline struct sprd_dma_desc
*to_sprd_dma_desc(struct virt_dma_desc
*vd
)
237 return container_of(vd
, struct sprd_dma_desc
, vd
);
240 static void sprd_dma_chn_update(struct sprd_dma_chn
*schan
, u32 reg
,
243 u32 orig
= readl(schan
->chn_base
+ reg
);
246 tmp
= (orig
& ~mask
) | val
;
247 writel(tmp
, schan
->chn_base
+ reg
);
250 static int sprd_dma_enable(struct sprd_dma_dev
*sdev
)
254 ret
= clk_prepare_enable(sdev
->clk
);
259 * The ashb_clk is optional and only for AGCP DMA controller, so we
260 * need add one condition to check if the ashb_clk need enable.
262 if (!IS_ERR(sdev
->ashb_clk
))
263 ret
= clk_prepare_enable(sdev
->ashb_clk
);
268 static void sprd_dma_disable(struct sprd_dma_dev
*sdev
)
270 clk_disable_unprepare(sdev
->clk
);
273 * Need to check if we need disable the optional ashb_clk for AGCP DMA.
275 if (!IS_ERR(sdev
->ashb_clk
))
276 clk_disable_unprepare(sdev
->ashb_clk
);
279 static void sprd_dma_set_uid(struct sprd_dma_chn
*schan
)
281 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
282 u32 dev_id
= schan
->dev_id
;
284 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
285 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
286 SPRD_DMA_GLB_REQ_UID(dev_id
);
288 writel(schan
->chn_num
+ 1, sdev
->glb_base
+ uid_offset
);
292 static void sprd_dma_unset_uid(struct sprd_dma_chn
*schan
)
294 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
295 u32 dev_id
= schan
->dev_id
;
297 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
298 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
299 SPRD_DMA_GLB_REQ_UID(dev_id
);
301 writel(0, sdev
->glb_base
+ uid_offset
);
305 static void sprd_dma_clear_int(struct sprd_dma_chn
*schan
)
307 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_INTC
,
308 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
,
309 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
);
312 static void sprd_dma_enable_chn(struct sprd_dma_chn
*schan
)
314 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
,
318 static void sprd_dma_disable_chn(struct sprd_dma_chn
*schan
)
320 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
, 0);
323 static void sprd_dma_soft_request(struct sprd_dma_chn
*schan
)
325 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_REQ
, SPRD_DMA_REQ_EN
,
329 static void sprd_dma_pause_resume(struct sprd_dma_chn
*schan
, bool enable
)
331 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
332 u32 pause
, timeout
= SPRD_DMA_PAUSE_CNT
;
335 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
336 SPRD_DMA_PAUSE_EN
, SPRD_DMA_PAUSE_EN
);
339 pause
= readl(schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
340 if (pause
& SPRD_DMA_PAUSE_STS
)
344 } while (--timeout
> 0);
347 dev_warn(sdev
->dma_dev
.dev
,
348 "pause dma controller timeout\n");
350 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
351 SPRD_DMA_PAUSE_EN
, 0);
355 static void sprd_dma_stop_and_disable(struct sprd_dma_chn
*schan
)
357 u32 cfg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
359 if (!(cfg
& SPRD_DMA_CHN_EN
))
362 sprd_dma_pause_resume(schan
, true);
363 sprd_dma_disable_chn(schan
);
366 static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn
*schan
)
368 unsigned long addr
, addr_high
;
370 addr
= readl(schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
371 addr_high
= readl(schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
) &
372 SPRD_DMA_HIGH_ADDR_MASK
;
374 return addr
| (addr_high
<< SPRD_DMA_HIGH_ADDR_OFFSET
);
377 static enum sprd_dma_int_type
sprd_dma_get_int_type(struct sprd_dma_chn
*schan
)
379 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
380 u32 intc_sts
= readl(schan
->chn_base
+ SPRD_DMA_CHN_INTC
) &
381 SPRD_DMA_CHN_INT_STS
;
384 case SPRD_DMA_CFGERR_INT_STS
:
385 return SPRD_DMA_CFGERR_INT
;
387 case SPRD_DMA_LIST_INT_STS
:
388 return SPRD_DMA_LIST_INT
;
390 case SPRD_DMA_TRSC_INT_STS
:
391 return SPRD_DMA_TRANS_INT
;
393 case SPRD_DMA_BLK_INT_STS
:
394 return SPRD_DMA_BLK_INT
;
396 case SPRD_DMA_FRAG_INT_STS
:
397 return SPRD_DMA_FRAG_INT
;
400 dev_warn(sdev
->dma_dev
.dev
, "incorrect dma interrupt type\n");
401 return SPRD_DMA_NO_INT
;
405 static enum sprd_dma_req_mode
sprd_dma_get_req_type(struct sprd_dma_chn
*schan
)
407 u32 frag_reg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
409 return (frag_reg
>> SPRD_DMA_REQ_MODE_OFFSET
) & SPRD_DMA_REQ_MODE_MASK
;
412 static void sprd_dma_set_chn_config(struct sprd_dma_chn
*schan
,
413 struct sprd_dma_desc
*sdesc
)
415 struct sprd_dma_chn_hw
*cfg
= &sdesc
->chn_hw
;
417 writel(cfg
->pause
, schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
418 writel(cfg
->cfg
, schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
419 writel(cfg
->intc
, schan
->chn_base
+ SPRD_DMA_CHN_INTC
);
420 writel(cfg
->src_addr
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_ADDR
);
421 writel(cfg
->des_addr
, schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
422 writel(cfg
->frg_len
, schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
423 writel(cfg
->blk_len
, schan
->chn_base
+ SPRD_DMA_CHN_BLK_LEN
);
424 writel(cfg
->trsc_len
, schan
->chn_base
+ SPRD_DMA_CHN_TRSC_LEN
);
425 writel(cfg
->trsf_step
, schan
->chn_base
+ SPRD_DMA_CHN_TRSF_STEP
);
426 writel(cfg
->wrap_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_PTR
);
427 writel(cfg
->wrap_to
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
);
428 writel(cfg
->llist_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_LLIST_PTR
);
429 writel(cfg
->frg_step
, schan
->chn_base
+ SPRD_DMA_CHN_FRAG_STEP
);
430 writel(cfg
->src_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_BLK_STEP
);
431 writel(cfg
->des_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_DES_BLK_STEP
);
432 writel(cfg
->req
, schan
->chn_base
+ SPRD_DMA_CHN_REQ
);
435 static void sprd_dma_start(struct sprd_dma_chn
*schan
)
437 struct virt_dma_desc
*vd
= vchan_next_desc(&schan
->vc
);
443 schan
->cur_desc
= to_sprd_dma_desc(vd
);
446 * Copy the DMA configuration from DMA descriptor to this hardware
449 sprd_dma_set_chn_config(schan
, schan
->cur_desc
);
450 sprd_dma_set_uid(schan
);
451 sprd_dma_enable_chn(schan
);
453 if (schan
->dev_id
== SPRD_DMA_SOFTWARE_UID
)
454 sprd_dma_soft_request(schan
);
457 static void sprd_dma_stop(struct sprd_dma_chn
*schan
)
459 sprd_dma_stop_and_disable(schan
);
460 sprd_dma_unset_uid(schan
);
461 sprd_dma_clear_int(schan
);
464 static bool sprd_dma_check_trans_done(struct sprd_dma_desc
*sdesc
,
465 enum sprd_dma_int_type int_type
,
466 enum sprd_dma_req_mode req_mode
)
468 if (int_type
== SPRD_DMA_NO_INT
)
471 if (int_type
>= req_mode
+ 1)
477 static irqreturn_t
dma_irq_handle(int irq
, void *dev_id
)
479 struct sprd_dma_dev
*sdev
= (struct sprd_dma_dev
*)dev_id
;
480 u32 irq_status
= readl(sdev
->glb_base
+ SPRD_DMA_GLB_INT_MSK_STS
);
481 struct sprd_dma_chn
*schan
;
482 struct sprd_dma_desc
*sdesc
;
483 enum sprd_dma_req_mode req_type
;
484 enum sprd_dma_int_type int_type
;
485 bool trans_done
= false;
489 i
= __ffs(irq_status
);
490 irq_status
&= (irq_status
- 1);
491 schan
= &sdev
->channels
[i
];
493 spin_lock(&schan
->vc
.lock
);
494 int_type
= sprd_dma_get_int_type(schan
);
495 req_type
= sprd_dma_get_req_type(schan
);
496 sprd_dma_clear_int(schan
);
498 sdesc
= schan
->cur_desc
;
500 /* Check if the dma request descriptor is done. */
501 trans_done
= sprd_dma_check_trans_done(sdesc
, int_type
,
503 if (trans_done
== true) {
504 vchan_cookie_complete(&sdesc
->vd
);
505 schan
->cur_desc
= NULL
;
506 sprd_dma_start(schan
);
508 spin_unlock(&schan
->vc
.lock
);
514 static int sprd_dma_alloc_chan_resources(struct dma_chan
*chan
)
516 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
519 ret
= pm_runtime_get_sync(chan
->device
->dev
);
523 schan
->dev_id
= SPRD_DMA_SOFTWARE_UID
;
527 static void sprd_dma_free_chan_resources(struct dma_chan
*chan
)
529 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
532 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
533 sprd_dma_stop(schan
);
534 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
536 vchan_free_chan_resources(&schan
->vc
);
537 pm_runtime_put(chan
->device
->dev
);
540 static enum dma_status
sprd_dma_tx_status(struct dma_chan
*chan
,
542 struct dma_tx_state
*txstate
)
544 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
545 struct virt_dma_desc
*vd
;
550 ret
= dma_cookie_status(chan
, cookie
, txstate
);
551 if (ret
== DMA_COMPLETE
|| !txstate
)
554 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
555 vd
= vchan_find_desc(&schan
->vc
, cookie
);
557 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
558 struct sprd_dma_chn_hw
*hw
= &sdesc
->chn_hw
;
560 if (hw
->trsc_len
> 0)
562 else if (hw
->blk_len
> 0)
564 else if (hw
->frg_len
> 0)
568 } else if (schan
->cur_desc
&& schan
->cur_desc
->vd
.tx
.cookie
== cookie
) {
569 pos
= sprd_dma_get_dst_addr(schan
);
573 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
575 dma_set_residue(txstate
, pos
);
579 static void sprd_dma_issue_pending(struct dma_chan
*chan
)
581 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
584 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
585 if (vchan_issue_pending(&schan
->vc
) && !schan
->cur_desc
)
586 sprd_dma_start(schan
);
587 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
590 static int sprd_dma_config(struct dma_chan
*chan
, struct sprd_dma_desc
*sdesc
,
591 dma_addr_t dest
, dma_addr_t src
, size_t len
)
593 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(chan
);
594 struct sprd_dma_chn_hw
*hw
= &sdesc
->chn_hw
;
595 u32 datawidth
, src_step
, des_step
, fragment_len
;
596 u32 block_len
, req_mode
, irq_mode
, transcation_len
;
597 u32 fix_mode
= 0, fix_en
= 0;
599 if (IS_ALIGNED(len
, 4)) {
603 } else if (IS_ALIGNED(len
, 2)) {
613 fragment_len
= SPRD_DMA_MEMCPY_MIN_SIZE
;
614 if (len
<= SPRD_DMA_BLK_LEN_MASK
) {
617 req_mode
= SPRD_DMA_BLK_REQ
;
618 irq_mode
= SPRD_DMA_BLK_INT
;
620 block_len
= SPRD_DMA_MEMCPY_MIN_SIZE
;
621 transcation_len
= len
;
622 req_mode
= SPRD_DMA_TRANS_REQ
;
623 irq_mode
= SPRD_DMA_TRANS_INT
;
626 hw
->cfg
= SPRD_DMA_DONOT_WAIT_BDONE
<< SPRD_DMA_WAIT_BDONE_OFFSET
;
627 hw
->wrap_ptr
= (u32
)((src
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
628 SPRD_DMA_HIGH_ADDR_MASK
);
629 hw
->wrap_to
= (u32
)((dest
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
630 SPRD_DMA_HIGH_ADDR_MASK
);
632 hw
->src_addr
= (u32
)(src
& SPRD_DMA_LOW_ADDR_MASK
);
633 hw
->des_addr
= (u32
)(dest
& SPRD_DMA_LOW_ADDR_MASK
);
635 if ((src_step
!= 0 && des_step
!= 0) || (src_step
| des_step
) == 0) {
645 hw
->frg_len
= datawidth
<< SPRD_DMA_SRC_DATAWIDTH_OFFSET
|
646 datawidth
<< SPRD_DMA_DES_DATAWIDTH_OFFSET
|
647 req_mode
<< SPRD_DMA_REQ_MODE_OFFSET
|
648 fix_mode
<< SPRD_DMA_FIX_SEL_OFFSET
|
649 fix_en
<< SPRD_DMA_FIX_EN_OFFSET
|
650 (fragment_len
& SPRD_DMA_FRG_LEN_MASK
);
651 hw
->blk_len
= block_len
& SPRD_DMA_BLK_LEN_MASK
;
653 hw
->intc
= SPRD_DMA_CFG_ERR_INT_EN
;
656 case SPRD_DMA_NO_INT
:
659 case SPRD_DMA_FRAG_INT
:
660 hw
->intc
|= SPRD_DMA_FRAG_INT_EN
;
663 case SPRD_DMA_BLK_INT
:
664 hw
->intc
|= SPRD_DMA_BLK_INT_EN
;
667 case SPRD_DMA_BLK_FRAG_INT
:
668 hw
->intc
|= SPRD_DMA_BLK_INT_EN
| SPRD_DMA_FRAG_INT_EN
;
671 case SPRD_DMA_TRANS_INT
:
672 hw
->intc
|= SPRD_DMA_TRANS_INT_EN
;
675 case SPRD_DMA_TRANS_FRAG_INT
:
676 hw
->intc
|= SPRD_DMA_TRANS_INT_EN
| SPRD_DMA_FRAG_INT_EN
;
679 case SPRD_DMA_TRANS_BLK_INT
:
680 hw
->intc
|= SPRD_DMA_TRANS_INT_EN
| SPRD_DMA_BLK_INT_EN
;
683 case SPRD_DMA_LIST_INT
:
684 hw
->intc
|= SPRD_DMA_LIST_INT_EN
;
687 case SPRD_DMA_CFGERR_INT
:
688 hw
->intc
|= SPRD_DMA_CFG_ERR_INT_EN
;
692 dev_err(sdev
->dma_dev
.dev
, "invalid irq mode\n");
696 if (transcation_len
== 0)
697 hw
->trsc_len
= block_len
& SPRD_DMA_TRSC_LEN_MASK
;
699 hw
->trsc_len
= transcation_len
& SPRD_DMA_TRSC_LEN_MASK
;
701 hw
->trsf_step
= (des_step
& SPRD_DMA_TRSF_STEP_MASK
) <<
702 SPRD_DMA_DEST_TRSF_STEP_OFFSET
|
703 (src_step
& SPRD_DMA_TRSF_STEP_MASK
) <<
704 SPRD_DMA_SRC_TRSF_STEP_OFFSET
;
707 hw
->src_blk_step
= 0;
708 hw
->des_blk_step
= 0;
709 hw
->src_blk_step
= 0;
713 static struct dma_async_tx_descriptor
*
714 sprd_dma_prep_dma_memcpy(struct dma_chan
*chan
, dma_addr_t dest
, dma_addr_t src
,
715 size_t len
, unsigned long flags
)
717 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
718 struct sprd_dma_desc
*sdesc
;
721 sdesc
= kzalloc(sizeof(*sdesc
), GFP_NOWAIT
);
725 ret
= sprd_dma_config(chan
, sdesc
, dest
, src
, len
);
731 return vchan_tx_prep(&schan
->vc
, &sdesc
->vd
, flags
);
734 static int sprd_dma_pause(struct dma_chan
*chan
)
736 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
739 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
740 sprd_dma_pause_resume(schan
, true);
741 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
746 static int sprd_dma_resume(struct dma_chan
*chan
)
748 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
751 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
752 sprd_dma_pause_resume(schan
, false);
753 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
758 static int sprd_dma_terminate_all(struct dma_chan
*chan
)
760 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
764 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
765 sprd_dma_stop(schan
);
767 vchan_get_all_descriptors(&schan
->vc
, &head
);
768 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
770 vchan_dma_desc_free_list(&schan
->vc
, &head
);
774 static void sprd_dma_free_desc(struct virt_dma_desc
*vd
)
776 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
781 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
)
783 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
784 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
785 u32 req
= *(u32
*)param
;
787 if (req
< sdev
->total_chns
)
788 return req
== schan
->chn_num
+ 1;
793 static int sprd_dma_probe(struct platform_device
*pdev
)
795 struct device_node
*np
= pdev
->dev
.of_node
;
796 struct sprd_dma_dev
*sdev
;
797 struct sprd_dma_chn
*dma_chn
;
798 struct resource
*res
;
802 ret
= device_property_read_u32(&pdev
->dev
, "#dma-channels", &chn_count
);
804 dev_err(&pdev
->dev
, "get dma channels count failed\n");
808 sdev
= devm_kzalloc(&pdev
->dev
, sizeof(*sdev
) +
809 sizeof(*dma_chn
) * chn_count
,
814 sdev
->clk
= devm_clk_get(&pdev
->dev
, "enable");
815 if (IS_ERR(sdev
->clk
)) {
816 dev_err(&pdev
->dev
, "get enable clock failed\n");
817 return PTR_ERR(sdev
->clk
);
820 /* ashb clock is optional for AGCP DMA */
821 sdev
->ashb_clk
= devm_clk_get(&pdev
->dev
, "ashb_eb");
822 if (IS_ERR(sdev
->ashb_clk
))
823 dev_warn(&pdev
->dev
, "no optional ashb eb clock\n");
826 * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP
827 * DMA controller, it can or do not request the irq, which will save
828 * system power without resuming system by DMA interrupts if AGCP DMA
829 * does not request the irq. Thus the DMA interrupts property should
832 sdev
->irq
= platform_get_irq(pdev
, 0);
834 ret
= devm_request_irq(&pdev
->dev
, sdev
->irq
, dma_irq_handle
,
835 0, "sprd_dma", (void *)sdev
);
837 dev_err(&pdev
->dev
, "request dma irq failed\n");
841 dev_warn(&pdev
->dev
, "no interrupts for the dma controller\n");
844 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
845 sdev
->glb_base
= devm_ioremap_nocache(&pdev
->dev
, res
->start
,
850 dma_cap_set(DMA_MEMCPY
, sdev
->dma_dev
.cap_mask
);
851 sdev
->total_chns
= chn_count
;
852 sdev
->dma_dev
.chancnt
= chn_count
;
853 INIT_LIST_HEAD(&sdev
->dma_dev
.channels
);
854 INIT_LIST_HEAD(&sdev
->dma_dev
.global_node
);
855 sdev
->dma_dev
.dev
= &pdev
->dev
;
856 sdev
->dma_dev
.device_alloc_chan_resources
= sprd_dma_alloc_chan_resources
;
857 sdev
->dma_dev
.device_free_chan_resources
= sprd_dma_free_chan_resources
;
858 sdev
->dma_dev
.device_tx_status
= sprd_dma_tx_status
;
859 sdev
->dma_dev
.device_issue_pending
= sprd_dma_issue_pending
;
860 sdev
->dma_dev
.device_prep_dma_memcpy
= sprd_dma_prep_dma_memcpy
;
861 sdev
->dma_dev
.device_pause
= sprd_dma_pause
;
862 sdev
->dma_dev
.device_resume
= sprd_dma_resume
;
863 sdev
->dma_dev
.device_terminate_all
= sprd_dma_terminate_all
;
865 for (i
= 0; i
< chn_count
; i
++) {
866 dma_chn
= &sdev
->channels
[i
];
867 dma_chn
->chn_num
= i
;
868 dma_chn
->cur_desc
= NULL
;
869 /* get each channel's registers base address. */
870 dma_chn
->chn_base
= sdev
->glb_base
+ SPRD_DMA_CHN_REG_OFFSET
+
871 SPRD_DMA_CHN_REG_LENGTH
* i
;
873 dma_chn
->vc
.desc_free
= sprd_dma_free_desc
;
874 vchan_init(&dma_chn
->vc
, &sdev
->dma_dev
);
877 platform_set_drvdata(pdev
, sdev
);
878 ret
= sprd_dma_enable(sdev
);
882 pm_runtime_set_active(&pdev
->dev
);
883 pm_runtime_enable(&pdev
->dev
);
885 ret
= pm_runtime_get_sync(&pdev
->dev
);
889 ret
= dma_async_device_register(&sdev
->dma_dev
);
891 dev_err(&pdev
->dev
, "register dma device failed:%d\n", ret
);
895 sprd_dma_info
.dma_cap
= sdev
->dma_dev
.cap_mask
;
896 ret
= of_dma_controller_register(np
, of_dma_simple_xlate
,
899 goto err_of_register
;
901 pm_runtime_put(&pdev
->dev
);
905 dma_async_device_unregister(&sdev
->dma_dev
);
907 pm_runtime_put_noidle(&pdev
->dev
);
908 pm_runtime_disable(&pdev
->dev
);
910 sprd_dma_disable(sdev
);
914 static int sprd_dma_remove(struct platform_device
*pdev
)
916 struct sprd_dma_dev
*sdev
= platform_get_drvdata(pdev
);
917 struct sprd_dma_chn
*c
, *cn
;
920 ret
= pm_runtime_get_sync(&pdev
->dev
);
924 /* explicitly free the irq */
926 devm_free_irq(&pdev
->dev
, sdev
->irq
, sdev
);
928 list_for_each_entry_safe(c
, cn
, &sdev
->dma_dev
.channels
,
929 vc
.chan
.device_node
) {
930 list_del(&c
->vc
.chan
.device_node
);
931 tasklet_kill(&c
->vc
.task
);
934 of_dma_controller_free(pdev
->dev
.of_node
);
935 dma_async_device_unregister(&sdev
->dma_dev
);
936 sprd_dma_disable(sdev
);
938 pm_runtime_put_noidle(&pdev
->dev
);
939 pm_runtime_disable(&pdev
->dev
);
943 static const struct of_device_id sprd_dma_match
[] = {
944 { .compatible
= "sprd,sc9860-dma", },
948 static int __maybe_unused
sprd_dma_runtime_suspend(struct device
*dev
)
950 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
952 sprd_dma_disable(sdev
);
956 static int __maybe_unused
sprd_dma_runtime_resume(struct device
*dev
)
958 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
961 ret
= sprd_dma_enable(sdev
);
963 dev_err(sdev
->dma_dev
.dev
, "enable dma failed\n");
968 static const struct dev_pm_ops sprd_dma_pm_ops
= {
969 SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend
,
970 sprd_dma_runtime_resume
,
974 static struct platform_driver sprd_dma_driver
= {
975 .probe
= sprd_dma_probe
,
976 .remove
= sprd_dma_remove
,
979 .of_match_table
= sprd_dma_match
,
980 .pm
= &sprd_dma_pm_ops
,
983 module_platform_driver(sprd_dma_driver
);
985 MODULE_LICENSE("GPL v2");
986 MODULE_DESCRIPTION("DMA driver for Spreadtrum");
987 MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
988 MODULE_ALIAS("platform:sprd-dma");