2 * Copyright (C) 2017 Spreadtrum Communications Inc.
4 * SPDX-License-Identifier: GPL-2.0
8 #include <linux/dma-mapping.h>
9 #include <linux/dma/sprd-dma.h>
10 #include <linux/errno.h>
11 #include <linux/init.h>
12 #include <linux/interrupt.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
17 #include <linux/of_dma.h>
18 #include <linux/of_device.h>
19 #include <linux/pm_runtime.h>
20 #include <linux/slab.h>
24 #define SPRD_DMA_CHN_REG_OFFSET 0x1000
25 #define SPRD_DMA_CHN_REG_LENGTH 0x40
26 #define SPRD_DMA_MEMCPY_MIN_SIZE 64
28 /* DMA global registers definition */
29 #define SPRD_DMA_GLB_PAUSE 0x0
30 #define SPRD_DMA_GLB_FRAG_WAIT 0x4
31 #define SPRD_DMA_GLB_REQ_PEND0_EN 0x8
32 #define SPRD_DMA_GLB_REQ_PEND1_EN 0xc
33 #define SPRD_DMA_GLB_INT_RAW_STS 0x10
34 #define SPRD_DMA_GLB_INT_MSK_STS 0x14
35 #define SPRD_DMA_GLB_REQ_STS 0x18
36 #define SPRD_DMA_GLB_CHN_EN_STS 0x1c
37 #define SPRD_DMA_GLB_DEBUG_STS 0x20
38 #define SPRD_DMA_GLB_ARB_SEL_STS 0x24
39 #define SPRD_DMA_GLB_2STAGE_GRP1 0x28
40 #define SPRD_DMA_GLB_2STAGE_GRP2 0x2c
41 #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
42 #define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000
44 /* DMA channel registers definition */
45 #define SPRD_DMA_CHN_PAUSE 0x0
46 #define SPRD_DMA_CHN_REQ 0x4
47 #define SPRD_DMA_CHN_CFG 0x8
48 #define SPRD_DMA_CHN_INTC 0xc
49 #define SPRD_DMA_CHN_SRC_ADDR 0x10
50 #define SPRD_DMA_CHN_DES_ADDR 0x14
51 #define SPRD_DMA_CHN_FRG_LEN 0x18
52 #define SPRD_DMA_CHN_BLK_LEN 0x1c
53 #define SPRD_DMA_CHN_TRSC_LEN 0x20
54 #define SPRD_DMA_CHN_TRSF_STEP 0x24
55 #define SPRD_DMA_CHN_WARP_PTR 0x28
56 #define SPRD_DMA_CHN_WARP_TO 0x2c
57 #define SPRD_DMA_CHN_LLIST_PTR 0x30
58 #define SPRD_DMA_CHN_FRAG_STEP 0x34
59 #define SPRD_DMA_CHN_SRC_BLK_STEP 0x38
60 #define SPRD_DMA_CHN_DES_BLK_STEP 0x3c
62 /* SPRD_DMA_GLB_2STAGE_GRP register definition */
63 #define SPRD_DMA_GLB_2STAGE_EN BIT(24)
64 #define SPRD_DMA_GLB_CHN_INT_MASK GENMASK(23, 20)
65 #define SPRD_DMA_GLB_DEST_INT BIT(22)
66 #define SPRD_DMA_GLB_SRC_INT BIT(20)
67 #define SPRD_DMA_GLB_LIST_DONE_TRG BIT(19)
68 #define SPRD_DMA_GLB_TRANS_DONE_TRG BIT(18)
69 #define SPRD_DMA_GLB_BLOCK_DONE_TRG BIT(17)
70 #define SPRD_DMA_GLB_FRAG_DONE_TRG BIT(16)
71 #define SPRD_DMA_GLB_TRG_OFFSET 16
72 #define SPRD_DMA_GLB_DEST_CHN_MASK GENMASK(13, 8)
73 #define SPRD_DMA_GLB_DEST_CHN_OFFSET 8
74 #define SPRD_DMA_GLB_SRC_CHN_MASK GENMASK(5, 0)
76 /* SPRD_DMA_CHN_INTC register definition */
77 #define SPRD_DMA_INT_MASK GENMASK(4, 0)
78 #define SPRD_DMA_INT_CLR_OFFSET 24
79 #define SPRD_DMA_FRAG_INT_EN BIT(0)
80 #define SPRD_DMA_BLK_INT_EN BIT(1)
81 #define SPRD_DMA_TRANS_INT_EN BIT(2)
82 #define SPRD_DMA_LIST_INT_EN BIT(3)
83 #define SPRD_DMA_CFG_ERR_INT_EN BIT(4)
85 /* SPRD_DMA_CHN_CFG register definition */
86 #define SPRD_DMA_CHN_EN BIT(0)
87 #define SPRD_DMA_LINKLIST_EN BIT(4)
88 #define SPRD_DMA_WAIT_BDONE_OFFSET 24
89 #define SPRD_DMA_DONOT_WAIT_BDONE 1
91 /* SPRD_DMA_CHN_REQ register definition */
92 #define SPRD_DMA_REQ_EN BIT(0)
94 /* SPRD_DMA_CHN_PAUSE register definition */
95 #define SPRD_DMA_PAUSE_EN BIT(0)
96 #define SPRD_DMA_PAUSE_STS BIT(2)
97 #define SPRD_DMA_PAUSE_CNT 0x2000
99 /* DMA_CHN_WARP_* register definition */
100 #define SPRD_DMA_HIGH_ADDR_MASK GENMASK(31, 28)
101 #define SPRD_DMA_LOW_ADDR_MASK GENMASK(31, 0)
102 #define SPRD_DMA_WRAP_ADDR_MASK GENMASK(27, 0)
103 #define SPRD_DMA_HIGH_ADDR_OFFSET 4
105 /* SPRD_DMA_CHN_INTC register definition */
106 #define SPRD_DMA_FRAG_INT_STS BIT(16)
107 #define SPRD_DMA_BLK_INT_STS BIT(17)
108 #define SPRD_DMA_TRSC_INT_STS BIT(18)
109 #define SPRD_DMA_LIST_INT_STS BIT(19)
110 #define SPRD_DMA_CFGERR_INT_STS BIT(20)
111 #define SPRD_DMA_CHN_INT_STS \
112 (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS | \
113 SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS | \
114 SPRD_DMA_CFGERR_INT_STS)
116 /* SPRD_DMA_CHN_FRG_LEN register definition */
117 #define SPRD_DMA_SRC_DATAWIDTH_OFFSET 30
118 #define SPRD_DMA_DES_DATAWIDTH_OFFSET 28
119 #define SPRD_DMA_SWT_MODE_OFFSET 26
120 #define SPRD_DMA_REQ_MODE_OFFSET 24
121 #define SPRD_DMA_REQ_MODE_MASK GENMASK(1, 0)
122 #define SPRD_DMA_WRAP_SEL_DEST BIT(23)
123 #define SPRD_DMA_WRAP_EN BIT(22)
124 #define SPRD_DMA_FIX_SEL_OFFSET 21
125 #define SPRD_DMA_FIX_EN_OFFSET 20
126 #define SPRD_DMA_LLIST_END BIT(19)
127 #define SPRD_DMA_FRG_LEN_MASK GENMASK(16, 0)
129 /* SPRD_DMA_CHN_BLK_LEN register definition */
130 #define SPRD_DMA_BLK_LEN_MASK GENMASK(16, 0)
132 /* SPRD_DMA_CHN_TRSC_LEN register definition */
133 #define SPRD_DMA_TRSC_LEN_MASK GENMASK(27, 0)
135 /* SPRD_DMA_CHN_TRSF_STEP register definition */
136 #define SPRD_DMA_DEST_TRSF_STEP_OFFSET 16
137 #define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0
138 #define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0)
140 /* SPRD DMA_SRC_BLK_STEP register definition */
141 #define SPRD_DMA_LLIST_HIGH_MASK GENMASK(31, 28)
142 #define SPRD_DMA_LLIST_HIGH_SHIFT 28
144 /* define DMA channel mode & trigger mode mask */
145 #define SPRD_DMA_CHN_MODE_MASK GENMASK(7, 0)
146 #define SPRD_DMA_TRG_MODE_MASK GENMASK(7, 0)
147 #define SPRD_DMA_INT_TYPE_MASK GENMASK(7, 0)
149 /* define the DMA transfer step type */
150 #define SPRD_DMA_NONE_STEP 0
151 #define SPRD_DMA_BYTE_STEP 1
152 #define SPRD_DMA_SHORT_STEP 2
153 #define SPRD_DMA_WORD_STEP 4
154 #define SPRD_DMA_DWORD_STEP 8
156 #define SPRD_DMA_SOFTWARE_UID 0
158 /* dma data width values */
159 enum sprd_dma_datawidth
{
160 SPRD_DMA_DATAWIDTH_1_BYTE
,
161 SPRD_DMA_DATAWIDTH_2_BYTES
,
162 SPRD_DMA_DATAWIDTH_4_BYTES
,
163 SPRD_DMA_DATAWIDTH_8_BYTES
,
166 /* dma channel hardware configuration */
167 struct sprd_dma_chn_hw
{
186 /* dma request description */
187 struct sprd_dma_desc
{
188 struct virt_dma_desc vd
;
189 struct sprd_dma_chn_hw chn_hw
;
190 enum dma_transfer_direction dir
;
193 /* dma channel description */
194 struct sprd_dma_chn
{
195 struct virt_dma_chan vc
;
196 void __iomem
*chn_base
;
197 struct sprd_dma_linklist linklist
;
198 struct dma_slave_config slave_cfg
;
201 enum sprd_dma_chn_mode chn_mode
;
202 enum sprd_dma_trg_mode trg_mode
;
203 enum sprd_dma_int_type int_type
;
204 struct sprd_dma_desc
*cur_desc
;
207 /* SPRD dma device */
208 struct sprd_dma_dev
{
209 struct dma_device dma_dev
;
210 void __iomem
*glb_base
;
212 struct clk
*ashb_clk
;
215 struct sprd_dma_chn channels
[0];
218 static void sprd_dma_free_desc(struct virt_dma_desc
*vd
);
219 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
);
220 static struct of_dma_filter_info sprd_dma_info
= {
221 .filter_fn
= sprd_dma_filter_fn
,
224 static inline struct sprd_dma_chn
*to_sprd_dma_chan(struct dma_chan
*c
)
226 return container_of(c
, struct sprd_dma_chn
, vc
.chan
);
229 static inline struct sprd_dma_dev
*to_sprd_dma_dev(struct dma_chan
*c
)
231 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(c
);
233 return container_of(schan
, struct sprd_dma_dev
, channels
[c
->chan_id
]);
236 static inline struct sprd_dma_desc
*to_sprd_dma_desc(struct virt_dma_desc
*vd
)
238 return container_of(vd
, struct sprd_dma_desc
, vd
);
241 static void sprd_dma_glb_update(struct sprd_dma_dev
*sdev
, u32 reg
,
244 u32 orig
= readl(sdev
->glb_base
+ reg
);
247 tmp
= (orig
& ~mask
) | val
;
248 writel(tmp
, sdev
->glb_base
+ reg
);
251 static void sprd_dma_chn_update(struct sprd_dma_chn
*schan
, u32 reg
,
254 u32 orig
= readl(schan
->chn_base
+ reg
);
257 tmp
= (orig
& ~mask
) | val
;
258 writel(tmp
, schan
->chn_base
+ reg
);
261 static int sprd_dma_enable(struct sprd_dma_dev
*sdev
)
265 ret
= clk_prepare_enable(sdev
->clk
);
270 * The ashb_clk is optional and only for AGCP DMA controller, so we
271 * need add one condition to check if the ashb_clk need enable.
273 if (!IS_ERR(sdev
->ashb_clk
))
274 ret
= clk_prepare_enable(sdev
->ashb_clk
);
279 static void sprd_dma_disable(struct sprd_dma_dev
*sdev
)
281 clk_disable_unprepare(sdev
->clk
);
284 * Need to check if we need disable the optional ashb_clk for AGCP DMA.
286 if (!IS_ERR(sdev
->ashb_clk
))
287 clk_disable_unprepare(sdev
->ashb_clk
);
290 static void sprd_dma_set_uid(struct sprd_dma_chn
*schan
)
292 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
293 u32 dev_id
= schan
->dev_id
;
295 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
296 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
297 SPRD_DMA_GLB_REQ_UID(dev_id
);
299 writel(schan
->chn_num
+ 1, sdev
->glb_base
+ uid_offset
);
303 static void sprd_dma_unset_uid(struct sprd_dma_chn
*schan
)
305 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
306 u32 dev_id
= schan
->dev_id
;
308 if (dev_id
!= SPRD_DMA_SOFTWARE_UID
) {
309 u32 uid_offset
= SPRD_DMA_GLB_REQ_UID_OFFSET
+
310 SPRD_DMA_GLB_REQ_UID(dev_id
);
312 writel(0, sdev
->glb_base
+ uid_offset
);
316 static void sprd_dma_clear_int(struct sprd_dma_chn
*schan
)
318 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_INTC
,
319 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
,
320 SPRD_DMA_INT_MASK
<< SPRD_DMA_INT_CLR_OFFSET
);
323 static void sprd_dma_enable_chn(struct sprd_dma_chn
*schan
)
325 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
,
329 static void sprd_dma_disable_chn(struct sprd_dma_chn
*schan
)
331 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_CFG
, SPRD_DMA_CHN_EN
, 0);
334 static void sprd_dma_soft_request(struct sprd_dma_chn
*schan
)
336 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_REQ
, SPRD_DMA_REQ_EN
,
340 static void sprd_dma_pause_resume(struct sprd_dma_chn
*schan
, bool enable
)
342 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
343 u32 pause
, timeout
= SPRD_DMA_PAUSE_CNT
;
346 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
347 SPRD_DMA_PAUSE_EN
, SPRD_DMA_PAUSE_EN
);
350 pause
= readl(schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
351 if (pause
& SPRD_DMA_PAUSE_STS
)
355 } while (--timeout
> 0);
358 dev_warn(sdev
->dma_dev
.dev
,
359 "pause dma controller timeout\n");
361 sprd_dma_chn_update(schan
, SPRD_DMA_CHN_PAUSE
,
362 SPRD_DMA_PAUSE_EN
, 0);
366 static void sprd_dma_stop_and_disable(struct sprd_dma_chn
*schan
)
368 u32 cfg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
370 if (!(cfg
& SPRD_DMA_CHN_EN
))
373 sprd_dma_pause_resume(schan
, true);
374 sprd_dma_disable_chn(schan
);
377 static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn
*schan
)
379 unsigned long addr
, addr_high
;
381 addr
= readl(schan
->chn_base
+ SPRD_DMA_CHN_SRC_ADDR
);
382 addr_high
= readl(schan
->chn_base
+ SPRD_DMA_CHN_WARP_PTR
) &
383 SPRD_DMA_HIGH_ADDR_MASK
;
385 return addr
| (addr_high
<< SPRD_DMA_HIGH_ADDR_OFFSET
);
388 static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn
*schan
)
390 unsigned long addr
, addr_high
;
392 addr
= readl(schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
393 addr_high
= readl(schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
) &
394 SPRD_DMA_HIGH_ADDR_MASK
;
396 return addr
| (addr_high
<< SPRD_DMA_HIGH_ADDR_OFFSET
);
399 static enum sprd_dma_int_type
sprd_dma_get_int_type(struct sprd_dma_chn
*schan
)
401 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
402 u32 intc_sts
= readl(schan
->chn_base
+ SPRD_DMA_CHN_INTC
) &
403 SPRD_DMA_CHN_INT_STS
;
406 case SPRD_DMA_CFGERR_INT_STS
:
407 return SPRD_DMA_CFGERR_INT
;
409 case SPRD_DMA_LIST_INT_STS
:
410 return SPRD_DMA_LIST_INT
;
412 case SPRD_DMA_TRSC_INT_STS
:
413 return SPRD_DMA_TRANS_INT
;
415 case SPRD_DMA_BLK_INT_STS
:
416 return SPRD_DMA_BLK_INT
;
418 case SPRD_DMA_FRAG_INT_STS
:
419 return SPRD_DMA_FRAG_INT
;
422 dev_warn(sdev
->dma_dev
.dev
, "incorrect dma interrupt type\n");
423 return SPRD_DMA_NO_INT
;
427 static enum sprd_dma_req_mode
sprd_dma_get_req_type(struct sprd_dma_chn
*schan
)
429 u32 frag_reg
= readl(schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
431 return (frag_reg
>> SPRD_DMA_REQ_MODE_OFFSET
) & SPRD_DMA_REQ_MODE_MASK
;
434 static int sprd_dma_set_2stage_config(struct sprd_dma_chn
*schan
)
436 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(&schan
->vc
.chan
);
437 u32 val
, chn
= schan
->chn_num
+ 1;
439 switch (schan
->chn_mode
) {
440 case SPRD_DMA_SRC_CHN0
:
441 val
= chn
& SPRD_DMA_GLB_SRC_CHN_MASK
;
442 val
|= BIT(schan
->trg_mode
- 1) << SPRD_DMA_GLB_TRG_OFFSET
;
443 val
|= SPRD_DMA_GLB_2STAGE_EN
;
444 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
445 val
|= SPRD_DMA_GLB_SRC_INT
;
447 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP1
, val
, val
);
450 case SPRD_DMA_SRC_CHN1
:
451 val
= chn
& SPRD_DMA_GLB_SRC_CHN_MASK
;
452 val
|= BIT(schan
->trg_mode
- 1) << SPRD_DMA_GLB_TRG_OFFSET
;
453 val
|= SPRD_DMA_GLB_2STAGE_EN
;
454 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
455 val
|= SPRD_DMA_GLB_SRC_INT
;
457 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP2
, val
, val
);
460 case SPRD_DMA_DST_CHN0
:
461 val
= (chn
<< SPRD_DMA_GLB_DEST_CHN_OFFSET
) &
462 SPRD_DMA_GLB_DEST_CHN_MASK
;
463 val
|= SPRD_DMA_GLB_2STAGE_EN
;
464 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
465 val
|= SPRD_DMA_GLB_DEST_INT
;
467 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP1
, val
, val
);
470 case SPRD_DMA_DST_CHN1
:
471 val
= (chn
<< SPRD_DMA_GLB_DEST_CHN_OFFSET
) &
472 SPRD_DMA_GLB_DEST_CHN_MASK
;
473 val
|= SPRD_DMA_GLB_2STAGE_EN
;
474 if (schan
->int_type
!= SPRD_DMA_NO_INT
)
475 val
|= SPRD_DMA_GLB_DEST_INT
;
477 sprd_dma_glb_update(sdev
, SPRD_DMA_GLB_2STAGE_GRP2
, val
, val
);
481 dev_err(sdev
->dma_dev
.dev
, "invalid channel mode setting %d\n",
489 static void sprd_dma_set_chn_config(struct sprd_dma_chn
*schan
,
490 struct sprd_dma_desc
*sdesc
)
492 struct sprd_dma_chn_hw
*cfg
= &sdesc
->chn_hw
;
494 writel(cfg
->pause
, schan
->chn_base
+ SPRD_DMA_CHN_PAUSE
);
495 writel(cfg
->cfg
, schan
->chn_base
+ SPRD_DMA_CHN_CFG
);
496 writel(cfg
->intc
, schan
->chn_base
+ SPRD_DMA_CHN_INTC
);
497 writel(cfg
->src_addr
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_ADDR
);
498 writel(cfg
->des_addr
, schan
->chn_base
+ SPRD_DMA_CHN_DES_ADDR
);
499 writel(cfg
->frg_len
, schan
->chn_base
+ SPRD_DMA_CHN_FRG_LEN
);
500 writel(cfg
->blk_len
, schan
->chn_base
+ SPRD_DMA_CHN_BLK_LEN
);
501 writel(cfg
->trsc_len
, schan
->chn_base
+ SPRD_DMA_CHN_TRSC_LEN
);
502 writel(cfg
->trsf_step
, schan
->chn_base
+ SPRD_DMA_CHN_TRSF_STEP
);
503 writel(cfg
->wrap_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_PTR
);
504 writel(cfg
->wrap_to
, schan
->chn_base
+ SPRD_DMA_CHN_WARP_TO
);
505 writel(cfg
->llist_ptr
, schan
->chn_base
+ SPRD_DMA_CHN_LLIST_PTR
);
506 writel(cfg
->frg_step
, schan
->chn_base
+ SPRD_DMA_CHN_FRAG_STEP
);
507 writel(cfg
->src_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_SRC_BLK_STEP
);
508 writel(cfg
->des_blk_step
, schan
->chn_base
+ SPRD_DMA_CHN_DES_BLK_STEP
);
509 writel(cfg
->req
, schan
->chn_base
+ SPRD_DMA_CHN_REQ
);
512 static void sprd_dma_start(struct sprd_dma_chn
*schan
)
514 struct virt_dma_desc
*vd
= vchan_next_desc(&schan
->vc
);
520 schan
->cur_desc
= to_sprd_dma_desc(vd
);
523 * Set 2-stage configuration if the channel starts one 2-stage
526 if (schan
->chn_mode
&& sprd_dma_set_2stage_config(schan
))
530 * Copy the DMA configuration from DMA descriptor to this hardware
533 sprd_dma_set_chn_config(schan
, schan
->cur_desc
);
534 sprd_dma_set_uid(schan
);
535 sprd_dma_enable_chn(schan
);
537 if (schan
->dev_id
== SPRD_DMA_SOFTWARE_UID
&&
538 schan
->chn_mode
!= SPRD_DMA_DST_CHN0
&&
539 schan
->chn_mode
!= SPRD_DMA_DST_CHN1
)
540 sprd_dma_soft_request(schan
);
543 static void sprd_dma_stop(struct sprd_dma_chn
*schan
)
545 sprd_dma_stop_and_disable(schan
);
546 sprd_dma_unset_uid(schan
);
547 sprd_dma_clear_int(schan
);
548 schan
->cur_desc
= NULL
;
551 static bool sprd_dma_check_trans_done(struct sprd_dma_desc
*sdesc
,
552 enum sprd_dma_int_type int_type
,
553 enum sprd_dma_req_mode req_mode
)
555 if (int_type
== SPRD_DMA_NO_INT
)
558 if (int_type
>= req_mode
+ 1)
564 static irqreturn_t
dma_irq_handle(int irq
, void *dev_id
)
566 struct sprd_dma_dev
*sdev
= (struct sprd_dma_dev
*)dev_id
;
567 u32 irq_status
= readl(sdev
->glb_base
+ SPRD_DMA_GLB_INT_MSK_STS
);
568 struct sprd_dma_chn
*schan
;
569 struct sprd_dma_desc
*sdesc
;
570 enum sprd_dma_req_mode req_type
;
571 enum sprd_dma_int_type int_type
;
572 bool trans_done
= false, cyclic
= false;
576 i
= __ffs(irq_status
);
577 irq_status
&= (irq_status
- 1);
578 schan
= &sdev
->channels
[i
];
580 spin_lock(&schan
->vc
.lock
);
582 sdesc
= schan
->cur_desc
;
584 spin_unlock(&schan
->vc
.lock
);
588 int_type
= sprd_dma_get_int_type(schan
);
589 req_type
= sprd_dma_get_req_type(schan
);
590 sprd_dma_clear_int(schan
);
592 /* cyclic mode schedule callback */
593 cyclic
= schan
->linklist
.phy_addr
? true : false;
594 if (cyclic
== true) {
595 vchan_cyclic_callback(&sdesc
->vd
);
597 /* Check if the dma request descriptor is done. */
598 trans_done
= sprd_dma_check_trans_done(sdesc
, int_type
,
600 if (trans_done
== true) {
601 vchan_cookie_complete(&sdesc
->vd
);
602 schan
->cur_desc
= NULL
;
603 sprd_dma_start(schan
);
606 spin_unlock(&schan
->vc
.lock
);
612 static int sprd_dma_alloc_chan_resources(struct dma_chan
*chan
)
614 return pm_runtime_get_sync(chan
->device
->dev
);
617 static void sprd_dma_free_chan_resources(struct dma_chan
*chan
)
619 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
620 struct virt_dma_desc
*cur_vd
= NULL
;
623 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
625 cur_vd
= &schan
->cur_desc
->vd
;
627 sprd_dma_stop(schan
);
628 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
631 sprd_dma_free_desc(cur_vd
);
633 vchan_free_chan_resources(&schan
->vc
);
634 pm_runtime_put(chan
->device
->dev
);
637 static enum dma_status
sprd_dma_tx_status(struct dma_chan
*chan
,
639 struct dma_tx_state
*txstate
)
641 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
642 struct virt_dma_desc
*vd
;
647 ret
= dma_cookie_status(chan
, cookie
, txstate
);
648 if (ret
== DMA_COMPLETE
|| !txstate
)
651 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
652 vd
= vchan_find_desc(&schan
->vc
, cookie
);
654 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
655 struct sprd_dma_chn_hw
*hw
= &sdesc
->chn_hw
;
657 if (hw
->trsc_len
> 0)
659 else if (hw
->blk_len
> 0)
661 else if (hw
->frg_len
> 0)
665 } else if (schan
->cur_desc
&& schan
->cur_desc
->vd
.tx
.cookie
== cookie
) {
666 struct sprd_dma_desc
*sdesc
= schan
->cur_desc
;
668 if (sdesc
->dir
== DMA_DEV_TO_MEM
)
669 pos
= sprd_dma_get_dst_addr(schan
);
671 pos
= sprd_dma_get_src_addr(schan
);
675 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
677 dma_set_residue(txstate
, pos
);
681 static void sprd_dma_issue_pending(struct dma_chan
*chan
)
683 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
686 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
687 if (vchan_issue_pending(&schan
->vc
) && !schan
->cur_desc
)
688 sprd_dma_start(schan
);
689 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
692 static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth
)
695 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
696 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
697 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
698 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
699 return ffs(buswidth
) - 1;
706 static int sprd_dma_get_step(enum dma_slave_buswidth buswidth
)
709 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
710 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
711 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
712 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
720 static int sprd_dma_fill_desc(struct dma_chan
*chan
,
721 struct sprd_dma_chn_hw
*hw
,
722 unsigned int sglen
, int sg_index
,
723 dma_addr_t src
, dma_addr_t dst
, u32 len
,
724 enum dma_transfer_direction dir
,
726 struct dma_slave_config
*slave_cfg
)
728 struct sprd_dma_dev
*sdev
= to_sprd_dma_dev(chan
);
729 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
730 enum sprd_dma_chn_mode chn_mode
= schan
->chn_mode
;
731 u32 req_mode
= (flags
>> SPRD_DMA_REQ_SHIFT
) & SPRD_DMA_REQ_MODE_MASK
;
732 u32 int_mode
= flags
& SPRD_DMA_INT_MASK
;
733 int src_datawidth
, dst_datawidth
, src_step
, dst_step
;
734 u32 temp
, fix_mode
= 0, fix_en
= 0;
735 phys_addr_t llist_ptr
;
737 if (dir
== DMA_MEM_TO_DEV
) {
738 src_step
= sprd_dma_get_step(slave_cfg
->src_addr_width
);
740 dev_err(sdev
->dma_dev
.dev
, "invalid source step\n");
745 * For 2-stage transfer, destination channel step can not be 0,
746 * since destination device is AON IRAM.
748 if (chn_mode
== SPRD_DMA_DST_CHN0
||
749 chn_mode
== SPRD_DMA_DST_CHN1
)
752 dst_step
= SPRD_DMA_NONE_STEP
;
754 dst_step
= sprd_dma_get_step(slave_cfg
->dst_addr_width
);
756 dev_err(sdev
->dma_dev
.dev
, "invalid destination step\n");
759 src_step
= SPRD_DMA_NONE_STEP
;
762 src_datawidth
= sprd_dma_get_datawidth(slave_cfg
->src_addr_width
);
763 if (src_datawidth
< 0) {
764 dev_err(sdev
->dma_dev
.dev
, "invalid source datawidth\n");
765 return src_datawidth
;
768 dst_datawidth
= sprd_dma_get_datawidth(slave_cfg
->dst_addr_width
);
769 if (dst_datawidth
< 0) {
770 dev_err(sdev
->dma_dev
.dev
, "invalid destination datawidth\n");
771 return dst_datawidth
;
774 if (slave_cfg
->slave_id
)
775 schan
->dev_id
= slave_cfg
->slave_id
;
777 hw
->cfg
= SPRD_DMA_DONOT_WAIT_BDONE
<< SPRD_DMA_WAIT_BDONE_OFFSET
;
780 * wrap_ptr and wrap_to will save the high 4 bits source address and
781 * destination address.
783 hw
->wrap_ptr
= (src
>> SPRD_DMA_HIGH_ADDR_OFFSET
) & SPRD_DMA_HIGH_ADDR_MASK
;
784 hw
->wrap_to
= (dst
>> SPRD_DMA_HIGH_ADDR_OFFSET
) & SPRD_DMA_HIGH_ADDR_MASK
;
785 hw
->src_addr
= src
& SPRD_DMA_LOW_ADDR_MASK
;
786 hw
->des_addr
= dst
& SPRD_DMA_LOW_ADDR_MASK
;
789 * If the src step and dst step both are 0 or both are not 0, that means
790 * we can not enable the fix mode. If one is 0 and another one is not,
791 * we can enable the fix mode.
793 if ((src_step
!= 0 && dst_step
!= 0) || (src_step
| dst_step
) == 0) {
803 hw
->intc
= int_mode
| SPRD_DMA_CFG_ERR_INT_EN
;
805 temp
= src_datawidth
<< SPRD_DMA_SRC_DATAWIDTH_OFFSET
;
806 temp
|= dst_datawidth
<< SPRD_DMA_DES_DATAWIDTH_OFFSET
;
807 temp
|= req_mode
<< SPRD_DMA_REQ_MODE_OFFSET
;
808 temp
|= fix_mode
<< SPRD_DMA_FIX_SEL_OFFSET
;
809 temp
|= fix_en
<< SPRD_DMA_FIX_EN_OFFSET
;
810 temp
|= schan
->linklist
.wrap_addr
?
811 SPRD_DMA_WRAP_EN
| SPRD_DMA_WRAP_SEL_DEST
: 0;
812 temp
|= slave_cfg
->src_maxburst
& SPRD_DMA_FRG_LEN_MASK
;
815 hw
->blk_len
= slave_cfg
->src_maxburst
& SPRD_DMA_BLK_LEN_MASK
;
816 hw
->trsc_len
= len
& SPRD_DMA_TRSC_LEN_MASK
;
818 temp
= (dst_step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_DEST_TRSF_STEP_OFFSET
;
819 temp
|= (src_step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_SRC_TRSF_STEP_OFFSET
;
820 hw
->trsf_step
= temp
;
822 /* link-list configuration */
823 if (schan
->linklist
.phy_addr
) {
824 hw
->cfg
|= SPRD_DMA_LINKLIST_EN
;
826 /* link-list index */
827 temp
= sglen
? (sg_index
+ 1) % sglen
: 0;
829 /* Next link-list configuration's physical address offset */
830 temp
= temp
* sizeof(*hw
) + SPRD_DMA_CHN_SRC_ADDR
;
832 * Set the link-list pointer point to next link-list
833 * configuration's physical address.
835 llist_ptr
= schan
->linklist
.phy_addr
+ temp
;
836 hw
->llist_ptr
= lower_32_bits(llist_ptr
);
837 hw
->src_blk_step
= (upper_32_bits(llist_ptr
) << SPRD_DMA_LLIST_HIGH_SHIFT
) &
838 SPRD_DMA_LLIST_HIGH_MASK
;
840 if (schan
->linklist
.wrap_addr
) {
841 hw
->wrap_ptr
|= schan
->linklist
.wrap_addr
&
842 SPRD_DMA_WRAP_ADDR_MASK
;
843 hw
->wrap_to
|= dst
& SPRD_DMA_WRAP_ADDR_MASK
;
847 hw
->src_blk_step
= 0;
851 hw
->des_blk_step
= 0;
855 static int sprd_dma_fill_linklist_desc(struct dma_chan
*chan
,
856 unsigned int sglen
, int sg_index
,
857 dma_addr_t src
, dma_addr_t dst
, u32 len
,
858 enum dma_transfer_direction dir
,
860 struct dma_slave_config
*slave_cfg
)
862 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
863 struct sprd_dma_chn_hw
*hw
;
865 if (!schan
->linklist
.virt_addr
)
868 hw
= (struct sprd_dma_chn_hw
*)(schan
->linklist
.virt_addr
+
869 sg_index
* sizeof(*hw
));
871 return sprd_dma_fill_desc(chan
, hw
, sglen
, sg_index
, src
, dst
, len
,
872 dir
, flags
, slave_cfg
);
875 static struct dma_async_tx_descriptor
*
876 sprd_dma_prep_dma_memcpy(struct dma_chan
*chan
, dma_addr_t dest
, dma_addr_t src
,
877 size_t len
, unsigned long flags
)
879 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
880 struct sprd_dma_desc
*sdesc
;
881 struct sprd_dma_chn_hw
*hw
;
882 enum sprd_dma_datawidth datawidth
;
885 sdesc
= kzalloc(sizeof(*sdesc
), GFP_NOWAIT
);
891 hw
->cfg
= SPRD_DMA_DONOT_WAIT_BDONE
<< SPRD_DMA_WAIT_BDONE_OFFSET
;
892 hw
->intc
= SPRD_DMA_TRANS_INT
| SPRD_DMA_CFG_ERR_INT_EN
;
893 hw
->src_addr
= src
& SPRD_DMA_LOW_ADDR_MASK
;
894 hw
->des_addr
= dest
& SPRD_DMA_LOW_ADDR_MASK
;
895 hw
->wrap_ptr
= (src
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
896 SPRD_DMA_HIGH_ADDR_MASK
;
897 hw
->wrap_to
= (dest
>> SPRD_DMA_HIGH_ADDR_OFFSET
) &
898 SPRD_DMA_HIGH_ADDR_MASK
;
900 if (IS_ALIGNED(len
, 8)) {
901 datawidth
= SPRD_DMA_DATAWIDTH_8_BYTES
;
902 step
= SPRD_DMA_DWORD_STEP
;
903 } else if (IS_ALIGNED(len
, 4)) {
904 datawidth
= SPRD_DMA_DATAWIDTH_4_BYTES
;
905 step
= SPRD_DMA_WORD_STEP
;
906 } else if (IS_ALIGNED(len
, 2)) {
907 datawidth
= SPRD_DMA_DATAWIDTH_2_BYTES
;
908 step
= SPRD_DMA_SHORT_STEP
;
910 datawidth
= SPRD_DMA_DATAWIDTH_1_BYTE
;
911 step
= SPRD_DMA_BYTE_STEP
;
914 temp
= datawidth
<< SPRD_DMA_SRC_DATAWIDTH_OFFSET
;
915 temp
|= datawidth
<< SPRD_DMA_DES_DATAWIDTH_OFFSET
;
916 temp
|= SPRD_DMA_TRANS_REQ
<< SPRD_DMA_REQ_MODE_OFFSET
;
917 temp
|= len
& SPRD_DMA_FRG_LEN_MASK
;
920 hw
->blk_len
= len
& SPRD_DMA_BLK_LEN_MASK
;
921 hw
->trsc_len
= len
& SPRD_DMA_TRSC_LEN_MASK
;
923 temp
= (step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_DEST_TRSF_STEP_OFFSET
;
924 temp
|= (step
& SPRD_DMA_TRSF_STEP_MASK
) << SPRD_DMA_SRC_TRSF_STEP_OFFSET
;
925 hw
->trsf_step
= temp
;
927 return vchan_tx_prep(&schan
->vc
, &sdesc
->vd
, flags
);
930 static struct dma_async_tx_descriptor
*
931 sprd_dma_prep_slave_sg(struct dma_chan
*chan
, struct scatterlist
*sgl
,
932 unsigned int sglen
, enum dma_transfer_direction dir
,
933 unsigned long flags
, void *context
)
935 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
936 struct dma_slave_config
*slave_cfg
= &schan
->slave_cfg
;
937 dma_addr_t src
= 0, dst
= 0;
938 dma_addr_t start_src
= 0, start_dst
= 0;
939 struct sprd_dma_desc
*sdesc
;
940 struct scatterlist
*sg
;
944 if (!is_slave_direction(dir
))
948 struct sprd_dma_linklist
*ll_cfg
=
949 (struct sprd_dma_linklist
*)context
;
951 schan
->linklist
.phy_addr
= ll_cfg
->phy_addr
;
952 schan
->linklist
.virt_addr
= ll_cfg
->virt_addr
;
953 schan
->linklist
.wrap_addr
= ll_cfg
->wrap_addr
;
955 schan
->linklist
.phy_addr
= 0;
956 schan
->linklist
.virt_addr
= 0;
957 schan
->linklist
.wrap_addr
= 0;
961 * Set channel mode, interrupt mode and trigger mode for 2-stage
965 (flags
>> SPRD_DMA_CHN_MODE_SHIFT
) & SPRD_DMA_CHN_MODE_MASK
;
967 (flags
>> SPRD_DMA_TRG_MODE_SHIFT
) & SPRD_DMA_TRG_MODE_MASK
;
968 schan
->int_type
= flags
& SPRD_DMA_INT_TYPE_MASK
;
970 sdesc
= kzalloc(sizeof(*sdesc
), GFP_NOWAIT
);
976 for_each_sg(sgl
, sg
, sglen
, i
) {
977 len
= sg_dma_len(sg
);
979 if (dir
== DMA_MEM_TO_DEV
) {
980 src
= sg_dma_address(sg
);
981 dst
= slave_cfg
->dst_addr
;
983 src
= slave_cfg
->src_addr
;
984 dst
= sg_dma_address(sg
);
993 * The link-list mode needs at least 2 link-list
994 * configurations. If there is only one sg, it doesn't
995 * need to fill the link-list configuration.
1000 ret
= sprd_dma_fill_linklist_desc(chan
, sglen
, i
, src
, dst
, len
,
1001 dir
, flags
, slave_cfg
);
1008 ret
= sprd_dma_fill_desc(chan
, &sdesc
->chn_hw
, 0, 0, start_src
,
1009 start_dst
, len
, dir
, flags
, slave_cfg
);
1015 return vchan_tx_prep(&schan
->vc
, &sdesc
->vd
, flags
);
1018 static int sprd_dma_slave_config(struct dma_chan
*chan
,
1019 struct dma_slave_config
*config
)
1021 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1022 struct dma_slave_config
*slave_cfg
= &schan
->slave_cfg
;
1024 memcpy(slave_cfg
, config
, sizeof(*config
));
1028 static int sprd_dma_pause(struct dma_chan
*chan
)
1030 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1031 unsigned long flags
;
1033 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1034 sprd_dma_pause_resume(schan
, true);
1035 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1040 static int sprd_dma_resume(struct dma_chan
*chan
)
1042 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1043 unsigned long flags
;
1045 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1046 sprd_dma_pause_resume(schan
, false);
1047 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1052 static int sprd_dma_terminate_all(struct dma_chan
*chan
)
1054 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1055 struct virt_dma_desc
*cur_vd
= NULL
;
1056 unsigned long flags
;
1059 spin_lock_irqsave(&schan
->vc
.lock
, flags
);
1060 if (schan
->cur_desc
)
1061 cur_vd
= &schan
->cur_desc
->vd
;
1063 sprd_dma_stop(schan
);
1065 vchan_get_all_descriptors(&schan
->vc
, &head
);
1066 spin_unlock_irqrestore(&schan
->vc
.lock
, flags
);
1069 sprd_dma_free_desc(cur_vd
);
1071 vchan_dma_desc_free_list(&schan
->vc
, &head
);
1075 static void sprd_dma_free_desc(struct virt_dma_desc
*vd
)
1077 struct sprd_dma_desc
*sdesc
= to_sprd_dma_desc(vd
);
1082 static bool sprd_dma_filter_fn(struct dma_chan
*chan
, void *param
)
1084 struct sprd_dma_chn
*schan
= to_sprd_dma_chan(chan
);
1085 u32 slave_id
= *(u32
*)param
;
1087 schan
->dev_id
= slave_id
;
1091 static int sprd_dma_probe(struct platform_device
*pdev
)
1093 struct device_node
*np
= pdev
->dev
.of_node
;
1094 struct sprd_dma_dev
*sdev
;
1095 struct sprd_dma_chn
*dma_chn
;
1099 ret
= device_property_read_u32(&pdev
->dev
, "#dma-channels", &chn_count
);
1101 dev_err(&pdev
->dev
, "get dma channels count failed\n");
1105 sdev
= devm_kzalloc(&pdev
->dev
,
1106 struct_size(sdev
, channels
, chn_count
),
1111 sdev
->clk
= devm_clk_get(&pdev
->dev
, "enable");
1112 if (IS_ERR(sdev
->clk
)) {
1113 dev_err(&pdev
->dev
, "get enable clock failed\n");
1114 return PTR_ERR(sdev
->clk
);
1117 /* ashb clock is optional for AGCP DMA */
1118 sdev
->ashb_clk
= devm_clk_get(&pdev
->dev
, "ashb_eb");
1119 if (IS_ERR(sdev
->ashb_clk
))
1120 dev_warn(&pdev
->dev
, "no optional ashb eb clock\n");
1123 * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP
1124 * DMA controller, it can or do not request the irq, which will save
1125 * system power without resuming system by DMA interrupts if AGCP DMA
1126 * does not request the irq. Thus the DMA interrupts property should
1129 sdev
->irq
= platform_get_irq(pdev
, 0);
1130 if (sdev
->irq
> 0) {
1131 ret
= devm_request_irq(&pdev
->dev
, sdev
->irq
, dma_irq_handle
,
1132 0, "sprd_dma", (void *)sdev
);
1134 dev_err(&pdev
->dev
, "request dma irq failed\n");
1138 dev_warn(&pdev
->dev
, "no interrupts for the dma controller\n");
1141 sdev
->glb_base
= devm_platform_ioremap_resource(pdev
, 0);
1142 if (IS_ERR(sdev
->glb_base
))
1143 return PTR_ERR(sdev
->glb_base
);
1145 dma_cap_set(DMA_MEMCPY
, sdev
->dma_dev
.cap_mask
);
1146 sdev
->total_chns
= chn_count
;
1147 sdev
->dma_dev
.chancnt
= chn_count
;
1148 INIT_LIST_HEAD(&sdev
->dma_dev
.channels
);
1149 INIT_LIST_HEAD(&sdev
->dma_dev
.global_node
);
1150 sdev
->dma_dev
.dev
= &pdev
->dev
;
1151 sdev
->dma_dev
.device_alloc_chan_resources
= sprd_dma_alloc_chan_resources
;
1152 sdev
->dma_dev
.device_free_chan_resources
= sprd_dma_free_chan_resources
;
1153 sdev
->dma_dev
.device_tx_status
= sprd_dma_tx_status
;
1154 sdev
->dma_dev
.device_issue_pending
= sprd_dma_issue_pending
;
1155 sdev
->dma_dev
.device_prep_dma_memcpy
= sprd_dma_prep_dma_memcpy
;
1156 sdev
->dma_dev
.device_prep_slave_sg
= sprd_dma_prep_slave_sg
;
1157 sdev
->dma_dev
.device_config
= sprd_dma_slave_config
;
1158 sdev
->dma_dev
.device_pause
= sprd_dma_pause
;
1159 sdev
->dma_dev
.device_resume
= sprd_dma_resume
;
1160 sdev
->dma_dev
.device_terminate_all
= sprd_dma_terminate_all
;
1162 for (i
= 0; i
< chn_count
; i
++) {
1163 dma_chn
= &sdev
->channels
[i
];
1164 dma_chn
->chn_num
= i
;
1165 dma_chn
->cur_desc
= NULL
;
1166 /* get each channel's registers base address. */
1167 dma_chn
->chn_base
= sdev
->glb_base
+ SPRD_DMA_CHN_REG_OFFSET
+
1168 SPRD_DMA_CHN_REG_LENGTH
* i
;
1170 dma_chn
->vc
.desc_free
= sprd_dma_free_desc
;
1171 vchan_init(&dma_chn
->vc
, &sdev
->dma_dev
);
1174 platform_set_drvdata(pdev
, sdev
);
1175 ret
= sprd_dma_enable(sdev
);
1179 pm_runtime_set_active(&pdev
->dev
);
1180 pm_runtime_enable(&pdev
->dev
);
1182 ret
= pm_runtime_get_sync(&pdev
->dev
);
1186 ret
= dma_async_device_register(&sdev
->dma_dev
);
1188 dev_err(&pdev
->dev
, "register dma device failed:%d\n", ret
);
1192 sprd_dma_info
.dma_cap
= sdev
->dma_dev
.cap_mask
;
1193 ret
= of_dma_controller_register(np
, of_dma_simple_xlate
,
1196 goto err_of_register
;
1198 pm_runtime_put(&pdev
->dev
);
1202 dma_async_device_unregister(&sdev
->dma_dev
);
1204 pm_runtime_put_noidle(&pdev
->dev
);
1205 pm_runtime_disable(&pdev
->dev
);
1207 sprd_dma_disable(sdev
);
1211 static int sprd_dma_remove(struct platform_device
*pdev
)
1213 struct sprd_dma_dev
*sdev
= platform_get_drvdata(pdev
);
1214 struct sprd_dma_chn
*c
, *cn
;
1217 ret
= pm_runtime_get_sync(&pdev
->dev
);
1221 /* explicitly free the irq */
1223 devm_free_irq(&pdev
->dev
, sdev
->irq
, sdev
);
1225 list_for_each_entry_safe(c
, cn
, &sdev
->dma_dev
.channels
,
1226 vc
.chan
.device_node
) {
1227 list_del(&c
->vc
.chan
.device_node
);
1228 tasklet_kill(&c
->vc
.task
);
1231 of_dma_controller_free(pdev
->dev
.of_node
);
1232 dma_async_device_unregister(&sdev
->dma_dev
);
1233 sprd_dma_disable(sdev
);
1235 pm_runtime_put_noidle(&pdev
->dev
);
1236 pm_runtime_disable(&pdev
->dev
);
1240 static const struct of_device_id sprd_dma_match
[] = {
1241 { .compatible
= "sprd,sc9860-dma", },
1245 static int __maybe_unused
sprd_dma_runtime_suspend(struct device
*dev
)
1247 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
1249 sprd_dma_disable(sdev
);
1253 static int __maybe_unused
sprd_dma_runtime_resume(struct device
*dev
)
1255 struct sprd_dma_dev
*sdev
= dev_get_drvdata(dev
);
1258 ret
= sprd_dma_enable(sdev
);
1260 dev_err(sdev
->dma_dev
.dev
, "enable dma failed\n");
1265 static const struct dev_pm_ops sprd_dma_pm_ops
= {
1266 SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend
,
1267 sprd_dma_runtime_resume
,
1271 static struct platform_driver sprd_dma_driver
= {
1272 .probe
= sprd_dma_probe
,
1273 .remove
= sprd_dma_remove
,
1276 .of_match_table
= sprd_dma_match
,
1277 .pm
= &sprd_dma_pm_ops
,
1280 module_platform_driver(sprd_dma_driver
);
1282 MODULE_LICENSE("GPL v2");
1283 MODULE_DESCRIPTION("DMA driver for Spreadtrum");
1284 MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
1285 MODULE_AUTHOR("Eric Long <eric.long@spreadtrum.com>");
1286 MODULE_ALIAS("platform:sprd-dma");