1 // SPDX-License-Identifier: GPL-2.0-only
4 * Copyright (C) STMicroelectronics SA 2017
5 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6 * Pierre-Yves Mordret <pierre-yves.mordret@st.com>
8 * Driver for STM32 MDMA controller
10 * Inspired by stm32-dma.c and dma-jz4780.c
13 #include <linux/bitfield.h>
14 #include <linux/clk.h>
15 #include <linux/delay.h>
16 #include <linux/dmaengine.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmapool.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/iopoll.h>
22 #include <linux/jiffies.h>
23 #include <linux/list.h>
24 #include <linux/log2.h>
25 #include <linux/module.h>
27 #include <linux/of_dma.h>
28 #include <linux/platform_device.h>
29 #include <linux/pm_runtime.h>
30 #include <linux/reset.h>
31 #include <linux/slab.h>
33 #include "../virt-dma.h"
35 #define STM32_MDMA_GISR0 0x0000 /* MDMA Int Status Reg 1 */
37 /* MDMA Channel x interrupt/status register */
38 #define STM32_MDMA_CISR(x) (0x40 + 0x40 * (x)) /* x = 0..62 */
39 #define STM32_MDMA_CISR_CRQA BIT(16)
40 #define STM32_MDMA_CISR_TCIF BIT(4)
41 #define STM32_MDMA_CISR_BTIF BIT(3)
42 #define STM32_MDMA_CISR_BRTIF BIT(2)
43 #define STM32_MDMA_CISR_CTCIF BIT(1)
44 #define STM32_MDMA_CISR_TEIF BIT(0)
46 /* MDMA Channel x interrupt flag clear register */
47 #define STM32_MDMA_CIFCR(x) (0x44 + 0x40 * (x))
48 #define STM32_MDMA_CIFCR_CLTCIF BIT(4)
49 #define STM32_MDMA_CIFCR_CBTIF BIT(3)
50 #define STM32_MDMA_CIFCR_CBRTIF BIT(2)
51 #define STM32_MDMA_CIFCR_CCTCIF BIT(1)
52 #define STM32_MDMA_CIFCR_CTEIF BIT(0)
53 #define STM32_MDMA_CIFCR_CLEAR_ALL (STM32_MDMA_CIFCR_CLTCIF \
54 | STM32_MDMA_CIFCR_CBTIF \
55 | STM32_MDMA_CIFCR_CBRTIF \
56 | STM32_MDMA_CIFCR_CCTCIF \
57 | STM32_MDMA_CIFCR_CTEIF)
59 /* MDMA Channel x error status register */
60 #define STM32_MDMA_CESR(x) (0x48 + 0x40 * (x))
61 #define STM32_MDMA_CESR_BSE BIT(11)
62 #define STM32_MDMA_CESR_ASR BIT(10)
63 #define STM32_MDMA_CESR_TEMD BIT(9)
64 #define STM32_MDMA_CESR_TELD BIT(8)
65 #define STM32_MDMA_CESR_TED BIT(7)
66 #define STM32_MDMA_CESR_TEA_MASK GENMASK(6, 0)
68 /* MDMA Channel x control register */
69 #define STM32_MDMA_CCR(x) (0x4C + 0x40 * (x))
70 #define STM32_MDMA_CCR_SWRQ BIT(16)
71 #define STM32_MDMA_CCR_WEX BIT(14)
72 #define STM32_MDMA_CCR_HEX BIT(13)
73 #define STM32_MDMA_CCR_BEX BIT(12)
74 #define STM32_MDMA_CCR_SM BIT(8)
75 #define STM32_MDMA_CCR_PL_MASK GENMASK(7, 6)
76 #define STM32_MDMA_CCR_PL(n) FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
77 #define STM32_MDMA_CCR_TCIE BIT(5)
78 #define STM32_MDMA_CCR_BTIE BIT(4)
79 #define STM32_MDMA_CCR_BRTIE BIT(3)
80 #define STM32_MDMA_CCR_CTCIE BIT(2)
81 #define STM32_MDMA_CCR_TEIE BIT(1)
82 #define STM32_MDMA_CCR_EN BIT(0)
83 #define STM32_MDMA_CCR_IRQ_MASK (STM32_MDMA_CCR_TCIE \
84 | STM32_MDMA_CCR_BTIE \
85 | STM32_MDMA_CCR_BRTIE \
86 | STM32_MDMA_CCR_CTCIE \
87 | STM32_MDMA_CCR_TEIE)
89 /* MDMA Channel x transfer configuration register */
90 #define STM32_MDMA_CTCR(x) (0x50 + 0x40 * (x))
91 #define STM32_MDMA_CTCR_BWM BIT(31)
92 #define STM32_MDMA_CTCR_SWRM BIT(30)
93 #define STM32_MDMA_CTCR_TRGM_MSK GENMASK(29, 28)
94 #define STM32_MDMA_CTCR_TRGM(n) FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
95 #define STM32_MDMA_CTCR_TRGM_GET(n) FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
96 #define STM32_MDMA_CTCR_PAM_MASK GENMASK(27, 26)
97 #define STM32_MDMA_CTCR_PAM(n) FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
98 #define STM32_MDMA_CTCR_PKE BIT(25)
99 #define STM32_MDMA_CTCR_TLEN_MSK GENMASK(24, 18)
100 #define STM32_MDMA_CTCR_TLEN(n) FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
101 #define STM32_MDMA_CTCR_TLEN_GET(n) FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
102 #define STM32_MDMA_CTCR_LEN2_MSK GENMASK(25, 18)
103 #define STM32_MDMA_CTCR_LEN2(n) FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
104 #define STM32_MDMA_CTCR_LEN2_GET(n) FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
105 #define STM32_MDMA_CTCR_DBURST_MASK GENMASK(17, 15)
106 #define STM32_MDMA_CTCR_DBURST(n) FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
107 #define STM32_MDMA_CTCR_SBURST_MASK GENMASK(14, 12)
108 #define STM32_MDMA_CTCR_SBURST(n) FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
109 #define STM32_MDMA_CTCR_DINCOS_MASK GENMASK(11, 10)
110 #define STM32_MDMA_CTCR_DINCOS(n) FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
111 #define STM32_MDMA_CTCR_SINCOS_MASK GENMASK(9, 8)
112 #define STM32_MDMA_CTCR_SINCOS(n) FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
113 #define STM32_MDMA_CTCR_DSIZE_MASK GENMASK(7, 6)
114 #define STM32_MDMA_CTCR_DSIZE(n) FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
115 #define STM32_MDMA_CTCR_SSIZE_MASK GENMASK(5, 4)
116 #define STM32_MDMA_CTCR_SSIZE(n) FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
117 #define STM32_MDMA_CTCR_DINC_MASK GENMASK(3, 2)
118 #define STM32_MDMA_CTCR_DINC(n) FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
119 #define STM32_MDMA_CTCR_SINC_MASK GENMASK(1, 0)
120 #define STM32_MDMA_CTCR_SINC(n) FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
121 #define STM32_MDMA_CTCR_CFG_MASK (STM32_MDMA_CTCR_SINC_MASK \
122 | STM32_MDMA_CTCR_DINC_MASK \
123 | STM32_MDMA_CTCR_SINCOS_MASK \
124 | STM32_MDMA_CTCR_DINCOS_MASK \
125 | STM32_MDMA_CTCR_LEN2_MSK \
126 | STM32_MDMA_CTCR_TRGM_MSK)
128 /* MDMA Channel x block number of data register */
129 #define STM32_MDMA_CBNDTR(x) (0x54 + 0x40 * (x))
130 #define STM32_MDMA_CBNDTR_BRC_MK GENMASK(31, 20)
131 #define STM32_MDMA_CBNDTR_BRC(n) FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
132 #define STM32_MDMA_CBNDTR_BRC_GET(n) FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
134 #define STM32_MDMA_CBNDTR_BRDUM BIT(19)
135 #define STM32_MDMA_CBNDTR_BRSUM BIT(18)
136 #define STM32_MDMA_CBNDTR_BNDT_MASK GENMASK(16, 0)
137 #define STM32_MDMA_CBNDTR_BNDT(n) FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
139 /* MDMA Channel x source address register */
140 #define STM32_MDMA_CSAR(x) (0x58 + 0x40 * (x))
142 /* MDMA Channel x destination address register */
143 #define STM32_MDMA_CDAR(x) (0x5C + 0x40 * (x))
145 /* MDMA Channel x block repeat address update register */
146 #define STM32_MDMA_CBRUR(x) (0x60 + 0x40 * (x))
147 #define STM32_MDMA_CBRUR_DUV_MASK GENMASK(31, 16)
148 #define STM32_MDMA_CBRUR_DUV(n) FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
149 #define STM32_MDMA_CBRUR_SUV_MASK GENMASK(15, 0)
150 #define STM32_MDMA_CBRUR_SUV(n) FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
152 /* MDMA Channel x link address register */
153 #define STM32_MDMA_CLAR(x) (0x64 + 0x40 * (x))
155 /* MDMA Channel x trigger and bus selection register */
156 #define STM32_MDMA_CTBR(x) (0x68 + 0x40 * (x))
157 #define STM32_MDMA_CTBR_DBUS BIT(17)
158 #define STM32_MDMA_CTBR_SBUS BIT(16)
159 #define STM32_MDMA_CTBR_TSEL_MASK GENMASK(5, 0)
160 #define STM32_MDMA_CTBR_TSEL(n) FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
162 /* MDMA Channel x mask address register */
163 #define STM32_MDMA_CMAR(x) (0x70 + 0x40 * (x))
165 /* MDMA Channel x mask data register */
166 #define STM32_MDMA_CMDR(x) (0x74 + 0x40 * (x))
168 #define STM32_MDMA_MAX_BUF_LEN 128
169 #define STM32_MDMA_MAX_BLOCK_LEN 65536
170 #define STM32_MDMA_MAX_CHANNELS 32
171 #define STM32_MDMA_MAX_REQUESTS 256
172 #define STM32_MDMA_MAX_BURST 128
173 #define STM32_MDMA_VERY_HIGH_PRIORITY 0x3
175 enum stm32_mdma_trigger_mode
{
178 STM32_MDMA_BLOCK_REP
,
179 STM32_MDMA_LINKED_LIST
,
182 enum stm32_mdma_width
{
184 STM32_MDMA_HALF_WORD
,
186 STM32_MDMA_DOUBLE_WORD
,
189 enum stm32_mdma_inc_mode
{
190 STM32_MDMA_FIXED
= 0,
195 struct stm32_mdma_chan_config
{
201 bool m2m_hw
; /* True when MDMA is triggered by STM32 DMA */
204 struct stm32_mdma_hwdesc
{
217 struct stm32_mdma_desc_node
{
218 struct stm32_mdma_hwdesc
*hwdesc
;
219 dma_addr_t hwdesc_phys
;
222 struct stm32_mdma_desc
{
223 struct virt_dma_desc vdesc
;
227 struct stm32_mdma_desc_node node
[] __counted_by(count
);
230 struct stm32_mdma_dma_config
{
231 u32 request
; /* STM32 DMA channel stream id, triggering MDMA */
232 u32 cmar
; /* STM32 DMA interrupt flag clear register address */
233 u32 cmdr
; /* STM32 DMA Transfer Complete flag */
236 struct stm32_mdma_chan
{
237 struct virt_dma_chan vchan
;
238 struct dma_pool
*desc_pool
;
240 struct stm32_mdma_desc
*desc
;
242 struct dma_slave_config dma_config
;
243 struct stm32_mdma_chan_config chan_config
;
249 struct stm32_mdma_device
{
250 struct dma_device ddev
;
256 u32 nr_ahb_addr_masks
;
258 struct stm32_mdma_chan chan
[STM32_MDMA_MAX_CHANNELS
];
259 u32 ahb_addr_masks
[] __counted_by(nr_ahb_addr_masks
);
262 static struct stm32_mdma_device
*stm32_mdma_get_dev(
263 struct stm32_mdma_chan
*chan
)
265 return container_of(chan
->vchan
.chan
.device
, struct stm32_mdma_device
,
269 static struct stm32_mdma_chan
*to_stm32_mdma_chan(struct dma_chan
*c
)
271 return container_of(c
, struct stm32_mdma_chan
, vchan
.chan
);
274 static struct stm32_mdma_desc
*to_stm32_mdma_desc(struct virt_dma_desc
*vdesc
)
276 return container_of(vdesc
, struct stm32_mdma_desc
, vdesc
);
279 static struct device
*chan2dev(struct stm32_mdma_chan
*chan
)
281 return &chan
->vchan
.chan
.dev
->device
;
284 static struct device
*mdma2dev(struct stm32_mdma_device
*mdma_dev
)
286 return mdma_dev
->ddev
.dev
;
289 static u32
stm32_mdma_read(struct stm32_mdma_device
*dmadev
, u32 reg
)
291 return readl_relaxed(dmadev
->base
+ reg
);
294 static void stm32_mdma_write(struct stm32_mdma_device
*dmadev
, u32 reg
, u32 val
)
296 writel_relaxed(val
, dmadev
->base
+ reg
);
299 static void stm32_mdma_set_bits(struct stm32_mdma_device
*dmadev
, u32 reg
,
302 void __iomem
*addr
= dmadev
->base
+ reg
;
304 writel_relaxed(readl_relaxed(addr
) | mask
, addr
);
307 static void stm32_mdma_clr_bits(struct stm32_mdma_device
*dmadev
, u32 reg
,
310 void __iomem
*addr
= dmadev
->base
+ reg
;
312 writel_relaxed(readl_relaxed(addr
) & ~mask
, addr
);
315 static struct stm32_mdma_desc
*stm32_mdma_alloc_desc(
316 struct stm32_mdma_chan
*chan
, u32 count
)
318 struct stm32_mdma_desc
*desc
;
321 desc
= kzalloc(struct_size(desc
, node
, count
), GFP_NOWAIT
);
326 for (i
= 0; i
< count
; i
++) {
327 desc
->node
[i
].hwdesc
=
328 dma_pool_alloc(chan
->desc_pool
, GFP_NOWAIT
,
329 &desc
->node
[i
].hwdesc_phys
);
330 if (!desc
->node
[i
].hwdesc
)
337 dev_err(chan2dev(chan
), "Failed to allocate descriptor\n");
339 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
340 desc
->node
[i
].hwdesc_phys
);
345 static void stm32_mdma_desc_free(struct virt_dma_desc
*vdesc
)
347 struct stm32_mdma_desc
*desc
= to_stm32_mdma_desc(vdesc
);
348 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(vdesc
->tx
.chan
);
351 for (i
= 0; i
< desc
->count
; i
++)
352 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
353 desc
->node
[i
].hwdesc_phys
);
357 static int stm32_mdma_get_width(struct stm32_mdma_chan
*chan
,
358 enum dma_slave_buswidth width
)
361 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
362 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
363 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
364 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
365 return ffs(width
) - 1;
367 dev_err(chan2dev(chan
), "Dma bus width %i not supported\n",
373 static enum dma_slave_buswidth
stm32_mdma_get_max_width(dma_addr_t addr
,
374 u32 buf_len
, u32 tlen
)
376 enum dma_slave_buswidth max_width
= DMA_SLAVE_BUSWIDTH_8_BYTES
;
378 for (max_width
= DMA_SLAVE_BUSWIDTH_8_BYTES
;
379 max_width
> DMA_SLAVE_BUSWIDTH_1_BYTE
;
382 * Address and buffer length both have to be aligned on
385 if ((((buf_len
| addr
) & (max_width
- 1)) == 0) &&
393 static u32
stm32_mdma_get_best_burst(u32 buf_len
, u32 tlen
, u32 max_burst
,
394 enum dma_slave_buswidth width
)
398 best_burst
= min((u32
)1 << __ffs(tlen
| buf_len
),
399 max_burst
* width
) / width
;
401 return (best_burst
> 0) ? best_burst
: 1;
404 static int stm32_mdma_disable_chan(struct stm32_mdma_chan
*chan
)
406 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
407 u32 ccr
, cisr
, id
, reg
;
411 reg
= STM32_MDMA_CCR(id
);
413 /* Disable interrupts */
414 stm32_mdma_clr_bits(dmadev
, reg
, STM32_MDMA_CCR_IRQ_MASK
);
416 ccr
= stm32_mdma_read(dmadev
, reg
);
417 if (ccr
& STM32_MDMA_CCR_EN
) {
418 stm32_mdma_clr_bits(dmadev
, reg
, STM32_MDMA_CCR_EN
);
420 /* Ensure that any ongoing transfer has been completed */
421 ret
= readl_relaxed_poll_timeout_atomic(
422 dmadev
->base
+ STM32_MDMA_CISR(id
), cisr
,
423 (cisr
& STM32_MDMA_CISR_CTCIF
), 10, 1000);
425 dev_err(chan2dev(chan
), "%s: timeout!\n", __func__
);
433 static void stm32_mdma_stop(struct stm32_mdma_chan
*chan
)
435 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
440 ret
= stm32_mdma_disable_chan(chan
);
444 /* Clear interrupt status if it is there */
445 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
447 dev_dbg(chan2dev(chan
), "%s(): clearing interrupt: 0x%08x\n",
449 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(chan
->id
), status
);
455 static void stm32_mdma_set_bus(struct stm32_mdma_device
*dmadev
, u32
*ctbr
,
456 u32 ctbr_mask
, u32 src_addr
)
461 /* Check if memory device is on AHB or AXI */
463 mask
= src_addr
& 0xF0000000;
464 for (i
= 0; i
< dmadev
->nr_ahb_addr_masks
; i
++) {
465 if (mask
== dmadev
->ahb_addr_masks
[i
]) {
472 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan
*chan
,
473 enum dma_transfer_direction direction
,
474 u32
*mdma_ccr
, u32
*mdma_ctcr
,
475 u32
*mdma_ctbr
, dma_addr_t addr
,
478 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
479 struct stm32_mdma_chan_config
*chan_config
= &chan
->chan_config
;
480 enum dma_slave_buswidth src_addr_width
, dst_addr_width
;
481 phys_addr_t src_addr
, dst_addr
;
482 int src_bus_width
, dst_bus_width
;
483 u32 src_maxburst
, dst_maxburst
, src_best_burst
, dst_best_burst
;
484 u32 ccr
, ctcr
, ctbr
, tlen
;
486 src_addr_width
= chan
->dma_config
.src_addr_width
;
487 dst_addr_width
= chan
->dma_config
.dst_addr_width
;
488 src_maxburst
= chan
->dma_config
.src_maxburst
;
489 dst_maxburst
= chan
->dma_config
.dst_maxburst
;
491 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
)) & ~STM32_MDMA_CCR_EN
;
492 ctcr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
));
493 ctbr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
));
495 /* Enable HW request mode */
496 ctcr
&= ~STM32_MDMA_CTCR_SWRM
;
498 /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
499 ctcr
&= ~STM32_MDMA_CTCR_CFG_MASK
;
500 ctcr
|= chan_config
->transfer_config
& STM32_MDMA_CTCR_CFG_MASK
;
503 * For buffer transfer length (TLEN) we have to set
504 * the number of bytes - 1 in CTCR register
506 tlen
= STM32_MDMA_CTCR_LEN2_GET(ctcr
);
507 ctcr
&= ~STM32_MDMA_CTCR_LEN2_MSK
;
508 ctcr
|= STM32_MDMA_CTCR_TLEN((tlen
- 1));
510 /* Disable Pack Enable */
511 ctcr
&= ~STM32_MDMA_CTCR_PKE
;
513 /* Check burst size constraints */
514 if (src_maxburst
* src_addr_width
> STM32_MDMA_MAX_BURST
||
515 dst_maxburst
* dst_addr_width
> STM32_MDMA_MAX_BURST
) {
516 dev_err(chan2dev(chan
),
517 "burst size * bus width higher than %d bytes\n",
518 STM32_MDMA_MAX_BURST
);
522 if ((!is_power_of_2(src_maxburst
) && src_maxburst
> 0) ||
523 (!is_power_of_2(dst_maxburst
) && dst_maxburst
> 0)) {
524 dev_err(chan2dev(chan
), "burst size must be a power of 2\n");
529 * Configure channel control:
530 * - Clear SW request as in this case this is a HW one
531 * - Clear WEX, HEX and BEX bits
532 * - Set priority level
534 ccr
&= ~(STM32_MDMA_CCR_SWRQ
| STM32_MDMA_CCR_WEX
| STM32_MDMA_CCR_HEX
|
535 STM32_MDMA_CCR_BEX
| STM32_MDMA_CCR_PL_MASK
);
536 ccr
|= STM32_MDMA_CCR_PL(chan_config
->priority_level
);
538 /* Configure Trigger selection */
539 ctbr
&= ~STM32_MDMA_CTBR_TSEL_MASK
;
540 ctbr
|= STM32_MDMA_CTBR_TSEL(chan_config
->request
);
544 dst_addr
= chan
->dma_config
.dst_addr
;
546 /* Set device data size */
547 if (chan_config
->m2m_hw
)
548 dst_addr_width
= stm32_mdma_get_max_width(dst_addr
, buf_len
,
549 STM32_MDMA_MAX_BUF_LEN
);
550 dst_bus_width
= stm32_mdma_get_width(chan
, dst_addr_width
);
551 if (dst_bus_width
< 0)
552 return dst_bus_width
;
553 ctcr
&= ~STM32_MDMA_CTCR_DSIZE_MASK
;
554 ctcr
|= STM32_MDMA_CTCR_DSIZE(dst_bus_width
);
555 if (chan_config
->m2m_hw
) {
556 ctcr
&= ~STM32_MDMA_CTCR_DINCOS_MASK
;
557 ctcr
|= STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
560 /* Set device burst value */
561 if (chan_config
->m2m_hw
)
562 dst_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ dst_addr_width
;
564 dst_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
567 chan
->mem_burst
= dst_best_burst
;
568 ctcr
&= ~STM32_MDMA_CTCR_DBURST_MASK
;
569 ctcr
|= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst
)));
571 /* Set memory data size */
572 src_addr_width
= stm32_mdma_get_max_width(addr
, buf_len
, tlen
);
573 chan
->mem_width
= src_addr_width
;
574 src_bus_width
= stm32_mdma_get_width(chan
, src_addr_width
);
575 if (src_bus_width
< 0)
576 return src_bus_width
;
577 ctcr
&= ~STM32_MDMA_CTCR_SSIZE_MASK
|
578 STM32_MDMA_CTCR_SINCOS_MASK
;
579 ctcr
|= STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
580 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
582 /* Set memory burst value */
583 src_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ src_addr_width
;
584 src_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
587 chan
->mem_burst
= src_best_burst
;
588 ctcr
&= ~STM32_MDMA_CTCR_SBURST_MASK
;
589 ctcr
|= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst
)));
592 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
595 if (dst_bus_width
!= src_bus_width
)
596 ctcr
|= STM32_MDMA_CTCR_PKE
;
598 /* Set destination address */
599 stm32_mdma_write(dmadev
, STM32_MDMA_CDAR(chan
->id
), dst_addr
);
603 src_addr
= chan
->dma_config
.src_addr
;
605 /* Set device data size */
606 if (chan_config
->m2m_hw
)
607 src_addr_width
= stm32_mdma_get_max_width(src_addr
, buf_len
,
608 STM32_MDMA_MAX_BUF_LEN
);
610 src_bus_width
= stm32_mdma_get_width(chan
, src_addr_width
);
611 if (src_bus_width
< 0)
612 return src_bus_width
;
613 ctcr
&= ~STM32_MDMA_CTCR_SSIZE_MASK
;
614 ctcr
|= STM32_MDMA_CTCR_SSIZE(src_bus_width
);
615 if (chan_config
->m2m_hw
) {
616 ctcr
&= ~STM32_MDMA_CTCR_SINCOS_MASK
;
617 ctcr
|= STM32_MDMA_CTCR_SINCOS(src_bus_width
);
620 /* Set device burst value */
621 if (chan_config
->m2m_hw
)
622 src_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ src_addr_width
;
624 src_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
627 ctcr
&= ~STM32_MDMA_CTCR_SBURST_MASK
;
628 ctcr
|= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst
)));
630 /* Set memory data size */
631 dst_addr_width
= stm32_mdma_get_max_width(addr
, buf_len
, tlen
);
632 chan
->mem_width
= dst_addr_width
;
633 dst_bus_width
= stm32_mdma_get_width(chan
, dst_addr_width
);
634 if (dst_bus_width
< 0)
635 return dst_bus_width
;
636 ctcr
&= ~(STM32_MDMA_CTCR_DSIZE_MASK
|
637 STM32_MDMA_CTCR_DINCOS_MASK
);
638 ctcr
|= STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
639 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
641 /* Set memory burst value */
642 dst_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ dst_addr_width
;
643 dst_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
646 ctcr
&= ~STM32_MDMA_CTCR_DBURST_MASK
;
647 ctcr
|= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst
)));
650 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
653 if (dst_bus_width
!= src_bus_width
)
654 ctcr
|= STM32_MDMA_CTCR_PKE
;
656 /* Set source address */
657 stm32_mdma_write(dmadev
, STM32_MDMA_CSAR(chan
->id
), src_addr
);
661 dev_err(chan2dev(chan
), "Dma direction is not supported\n");
672 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan
*chan
,
673 struct stm32_mdma_desc_node
*node
)
675 dev_dbg(chan2dev(chan
), "hwdesc: %pad\n", &node
->hwdesc_phys
);
676 dev_dbg(chan2dev(chan
), "CTCR: 0x%08x\n", node
->hwdesc
->ctcr
);
677 dev_dbg(chan2dev(chan
), "CBNDTR: 0x%08x\n", node
->hwdesc
->cbndtr
);
678 dev_dbg(chan2dev(chan
), "CSAR: 0x%08x\n", node
->hwdesc
->csar
);
679 dev_dbg(chan2dev(chan
), "CDAR: 0x%08x\n", node
->hwdesc
->cdar
);
680 dev_dbg(chan2dev(chan
), "CBRUR: 0x%08x\n", node
->hwdesc
->cbrur
);
681 dev_dbg(chan2dev(chan
), "CLAR: 0x%08x\n", node
->hwdesc
->clar
);
682 dev_dbg(chan2dev(chan
), "CTBR: 0x%08x\n", node
->hwdesc
->ctbr
);
683 dev_dbg(chan2dev(chan
), "CMAR: 0x%08x\n", node
->hwdesc
->cmar
);
684 dev_dbg(chan2dev(chan
), "CMDR: 0x%08x\n\n", node
->hwdesc
->cmdr
);
687 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan
*chan
,
688 struct stm32_mdma_desc
*desc
,
689 enum dma_transfer_direction dir
, u32 count
,
690 dma_addr_t src_addr
, dma_addr_t dst_addr
,
691 u32 len
, u32 ctcr
, u32 ctbr
, bool is_last
,
692 bool is_first
, bool is_cyclic
)
694 struct stm32_mdma_chan_config
*config
= &chan
->chan_config
;
695 struct stm32_mdma_hwdesc
*hwdesc
;
696 u32 next
= count
+ 1;
698 hwdesc
= desc
->node
[count
].hwdesc
;
700 hwdesc
->cbndtr
&= ~(STM32_MDMA_CBNDTR_BRC_MK
|
701 STM32_MDMA_CBNDTR_BRDUM
|
702 STM32_MDMA_CBNDTR_BRSUM
|
703 STM32_MDMA_CBNDTR_BNDT_MASK
);
704 hwdesc
->cbndtr
|= STM32_MDMA_CBNDTR_BNDT(len
);
705 hwdesc
->csar
= src_addr
;
706 hwdesc
->cdar
= dst_addr
;
709 hwdesc
->cmar
= config
->mask_addr
;
710 hwdesc
->cmdr
= config
->mask_data
;
714 hwdesc
->clar
= desc
->node
[0].hwdesc_phys
;
718 hwdesc
->clar
= desc
->node
[next
].hwdesc_phys
;
721 stm32_mdma_dump_hwdesc(chan
, &desc
->node
[count
]);
724 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan
*chan
,
725 struct stm32_mdma_desc
*desc
,
726 struct scatterlist
*sgl
, u32 sg_len
,
727 enum dma_transfer_direction direction
)
729 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
730 struct dma_slave_config
*dma_config
= &chan
->dma_config
;
731 struct stm32_mdma_chan_config
*chan_config
= &chan
->chan_config
;
732 struct scatterlist
*sg
;
733 dma_addr_t src_addr
, dst_addr
;
734 u32 m2m_hw_period
, ccr
, ctcr
, ctbr
;
737 if (chan_config
->m2m_hw
)
738 m2m_hw_period
= sg_dma_len(sgl
);
740 for_each_sg(sgl
, sg
, sg_len
, i
) {
741 if (sg_dma_len(sg
) > STM32_MDMA_MAX_BLOCK_LEN
) {
742 dev_err(chan2dev(chan
), "Invalid block len\n");
746 if (direction
== DMA_MEM_TO_DEV
) {
747 src_addr
= sg_dma_address(sg
);
748 dst_addr
= dma_config
->dst_addr
;
749 if (chan_config
->m2m_hw
&& (i
& 1))
750 dst_addr
+= m2m_hw_period
;
751 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
,
752 &ctcr
, &ctbr
, src_addr
,
754 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
757 src_addr
= dma_config
->src_addr
;
758 if (chan_config
->m2m_hw
&& (i
& 1))
759 src_addr
+= m2m_hw_period
;
760 dst_addr
= sg_dma_address(sg
);
761 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
,
762 &ctcr
, &ctbr
, dst_addr
,
764 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
771 stm32_mdma_setup_hwdesc(chan
, desc
, direction
, i
, src_addr
,
772 dst_addr
, sg_dma_len(sg
), ctcr
, ctbr
,
773 i
== sg_len
- 1, i
== 0, false);
776 /* Enable interrupts */
777 ccr
&= ~STM32_MDMA_CCR_IRQ_MASK
;
778 ccr
|= STM32_MDMA_CCR_TEIE
| STM32_MDMA_CCR_CTCIE
;
784 static struct dma_async_tx_descriptor
*
785 stm32_mdma_prep_slave_sg(struct dma_chan
*c
, struct scatterlist
*sgl
,
786 u32 sg_len
, enum dma_transfer_direction direction
,
787 unsigned long flags
, void *context
)
789 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
790 struct stm32_mdma_chan_config
*chan_config
= &chan
->chan_config
;
791 struct stm32_mdma_desc
*desc
;
795 * Once DMA is in setup cyclic mode the channel we cannot assign this
796 * channel anymore. The DMA channel needs to be aborted or terminated
797 * for allowing another request.
799 if (chan
->desc
&& chan
->desc
->cyclic
) {
800 dev_err(chan2dev(chan
),
801 "Request not allowed when dma in cyclic mode\n");
805 desc
= stm32_mdma_alloc_desc(chan
, sg_len
);
809 ret
= stm32_mdma_setup_xfer(chan
, desc
, sgl
, sg_len
, direction
);
814 * In case of M2M HW transfer triggered by STM32 DMA, we do not have to clear the
815 * transfer complete flag by hardware in order to let the CPU rearm the STM32 DMA
816 * with the next sg element and update some data in dmaengine framework.
818 if (chan_config
->m2m_hw
&& direction
== DMA_MEM_TO_DEV
) {
819 struct stm32_mdma_hwdesc
*hwdesc
;
821 for (i
= 0; i
< sg_len
; i
++) {
822 hwdesc
= desc
->node
[i
].hwdesc
;
828 desc
->cyclic
= false;
830 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
833 for (i
= 0; i
< desc
->count
; i
++)
834 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
835 desc
->node
[i
].hwdesc_phys
);
840 static struct dma_async_tx_descriptor
*
841 stm32_mdma_prep_dma_cyclic(struct dma_chan
*c
, dma_addr_t buf_addr
,
842 size_t buf_len
, size_t period_len
,
843 enum dma_transfer_direction direction
,
846 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
847 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
848 struct dma_slave_config
*dma_config
= &chan
->dma_config
;
849 struct stm32_mdma_chan_config
*chan_config
= &chan
->chan_config
;
850 struct stm32_mdma_desc
*desc
;
851 dma_addr_t src_addr
, dst_addr
;
852 u32 ccr
, ctcr
, ctbr
, count
;
856 * Once DMA is in setup cyclic mode the channel we cannot assign this
857 * channel anymore. The DMA channel needs to be aborted or terminated
858 * for allowing another request.
860 if (chan
->desc
&& chan
->desc
->cyclic
) {
861 dev_err(chan2dev(chan
),
862 "Request not allowed when dma in cyclic mode\n");
866 if (!buf_len
|| !period_len
|| period_len
> STM32_MDMA_MAX_BLOCK_LEN
) {
867 dev_err(chan2dev(chan
), "Invalid buffer/period len\n");
871 if (buf_len
% period_len
) {
872 dev_err(chan2dev(chan
), "buf_len not multiple of period_len\n");
876 count
= buf_len
/ period_len
;
878 desc
= stm32_mdma_alloc_desc(chan
, count
);
883 if (direction
== DMA_MEM_TO_DEV
) {
885 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
, &ctcr
,
886 &ctbr
, src_addr
, period_len
);
887 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
891 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
, &ctcr
,
892 &ctbr
, dst_addr
, period_len
);
893 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
900 /* Enable interrupts */
901 ccr
&= ~STM32_MDMA_CCR_IRQ_MASK
;
902 ccr
|= STM32_MDMA_CCR_TEIE
| STM32_MDMA_CCR_CTCIE
| STM32_MDMA_CCR_BTIE
;
905 /* Configure hwdesc list */
906 for (i
= 0; i
< count
; i
++) {
907 if (direction
== DMA_MEM_TO_DEV
) {
908 src_addr
= buf_addr
+ i
* period_len
;
909 dst_addr
= dma_config
->dst_addr
;
910 if (chan_config
->m2m_hw
&& (i
& 1))
911 dst_addr
+= period_len
;
913 src_addr
= dma_config
->src_addr
;
914 if (chan_config
->m2m_hw
&& (i
& 1))
915 src_addr
+= period_len
;
916 dst_addr
= buf_addr
+ i
* period_len
;
919 stm32_mdma_setup_hwdesc(chan
, desc
, direction
, i
, src_addr
,
920 dst_addr
, period_len
, ctcr
, ctbr
,
921 i
== count
- 1, i
== 0, true);
926 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
929 for (i
= 0; i
< desc
->count
; i
++)
930 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
931 desc
->node
[i
].hwdesc_phys
);
936 static struct dma_async_tx_descriptor
*
937 stm32_mdma_prep_dma_memcpy(struct dma_chan
*c
, dma_addr_t dest
, dma_addr_t src
,
938 size_t len
, unsigned long flags
)
940 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
941 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
942 enum dma_slave_buswidth max_width
;
943 struct stm32_mdma_desc
*desc
;
944 struct stm32_mdma_hwdesc
*hwdesc
;
945 u32 ccr
, ctcr
, ctbr
, cbndtr
, count
, max_burst
, mdma_burst
;
946 u32 best_burst
, tlen
;
947 size_t xfer_count
, offset
;
948 int src_bus_width
, dst_bus_width
;
952 * Once DMA is in setup cyclic mode the channel we cannot assign this
953 * channel anymore. The DMA channel needs to be aborted or terminated
954 * to allow another request
956 if (chan
->desc
&& chan
->desc
->cyclic
) {
957 dev_err(chan2dev(chan
),
958 "Request not allowed when dma in cyclic mode\n");
962 count
= DIV_ROUND_UP(len
, STM32_MDMA_MAX_BLOCK_LEN
);
963 desc
= stm32_mdma_alloc_desc(chan
, count
);
967 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
)) & ~STM32_MDMA_CCR_EN
;
968 ctcr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
));
969 ctbr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
));
970 cbndtr
= stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
));
972 /* Enable sw req, some interrupts and clear other bits */
973 ccr
&= ~(STM32_MDMA_CCR_WEX
| STM32_MDMA_CCR_HEX
|
974 STM32_MDMA_CCR_BEX
| STM32_MDMA_CCR_PL_MASK
|
975 STM32_MDMA_CCR_IRQ_MASK
);
976 ccr
|= STM32_MDMA_CCR_TEIE
;
978 /* Enable SW request mode, dest/src inc and clear other bits */
979 ctcr
&= ~(STM32_MDMA_CTCR_BWM
| STM32_MDMA_CTCR_TRGM_MSK
|
980 STM32_MDMA_CTCR_PAM_MASK
| STM32_MDMA_CTCR_PKE
|
981 STM32_MDMA_CTCR_TLEN_MSK
| STM32_MDMA_CTCR_DBURST_MASK
|
982 STM32_MDMA_CTCR_SBURST_MASK
| STM32_MDMA_CTCR_DINCOS_MASK
|
983 STM32_MDMA_CTCR_SINCOS_MASK
| STM32_MDMA_CTCR_DSIZE_MASK
|
984 STM32_MDMA_CTCR_SSIZE_MASK
| STM32_MDMA_CTCR_DINC_MASK
|
985 STM32_MDMA_CTCR_SINC_MASK
);
986 ctcr
|= STM32_MDMA_CTCR_SWRM
| STM32_MDMA_CTCR_SINC(STM32_MDMA_INC
) |
987 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC
);
989 /* Reset HW request */
990 ctbr
&= ~STM32_MDMA_CTBR_TSEL_MASK
;
993 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
, src
);
994 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
, dest
);
996 /* Clear CBNDTR registers */
997 cbndtr
&= ~(STM32_MDMA_CBNDTR_BRC_MK
| STM32_MDMA_CBNDTR_BRDUM
|
998 STM32_MDMA_CBNDTR_BRSUM
| STM32_MDMA_CBNDTR_BNDT_MASK
);
1000 if (len
<= STM32_MDMA_MAX_BLOCK_LEN
) {
1001 cbndtr
|= STM32_MDMA_CBNDTR_BNDT(len
);
1002 if (len
<= STM32_MDMA_MAX_BUF_LEN
) {
1003 /* Setup a buffer transfer */
1004 ccr
|= STM32_MDMA_CCR_TCIE
| STM32_MDMA_CCR_CTCIE
;
1005 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER
);
1007 /* Setup a block transfer */
1008 ccr
|= STM32_MDMA_CCR_BTIE
| STM32_MDMA_CCR_CTCIE
;
1009 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK
);
1012 tlen
= STM32_MDMA_MAX_BUF_LEN
;
1013 ctcr
|= STM32_MDMA_CTCR_TLEN((tlen
- 1));
1015 /* Set source best burst size */
1016 max_width
= stm32_mdma_get_max_width(src
, len
, tlen
);
1017 src_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1019 max_burst
= tlen
/ max_width
;
1020 best_burst
= stm32_mdma_get_best_burst(len
, tlen
, max_burst
,
1022 mdma_burst
= ilog2(best_burst
);
1024 ctcr
|= STM32_MDMA_CTCR_SBURST(mdma_burst
) |
1025 STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
1026 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
1028 /* Set destination best burst size */
1029 max_width
= stm32_mdma_get_max_width(dest
, len
, tlen
);
1030 dst_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1032 max_burst
= tlen
/ max_width
;
1033 best_burst
= stm32_mdma_get_best_burst(len
, tlen
, max_burst
,
1035 mdma_burst
= ilog2(best_burst
);
1037 ctcr
|= STM32_MDMA_CTCR_DBURST(mdma_burst
) |
1038 STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
1039 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
1041 if (dst_bus_width
!= src_bus_width
)
1042 ctcr
|= STM32_MDMA_CTCR_PKE
;
1044 /* Prepare hardware descriptor */
1045 hwdesc
= desc
->node
[0].hwdesc
;
1046 hwdesc
->ctcr
= ctcr
;
1047 hwdesc
->cbndtr
= cbndtr
;
1049 hwdesc
->cdar
= dest
;
1052 hwdesc
->ctbr
= ctbr
;
1056 stm32_mdma_dump_hwdesc(chan
, &desc
->node
[0]);
1058 /* Setup a LLI transfer */
1059 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST
) |
1060 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN
- 1));
1061 ccr
|= STM32_MDMA_CCR_BTIE
| STM32_MDMA_CCR_CTCIE
;
1062 tlen
= STM32_MDMA_MAX_BUF_LEN
;
1064 for (i
= 0, offset
= 0; offset
< len
;
1065 i
++, offset
+= xfer_count
) {
1066 xfer_count
= min_t(size_t, len
- offset
,
1067 STM32_MDMA_MAX_BLOCK_LEN
);
1069 /* Set source best burst size */
1070 max_width
= stm32_mdma_get_max_width(src
, len
, tlen
);
1071 src_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1073 max_burst
= tlen
/ max_width
;
1074 best_burst
= stm32_mdma_get_best_burst(len
, tlen
,
1077 mdma_burst
= ilog2(best_burst
);
1079 ctcr
|= STM32_MDMA_CTCR_SBURST(mdma_burst
) |
1080 STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
1081 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
1083 /* Set destination best burst size */
1084 max_width
= stm32_mdma_get_max_width(dest
, len
, tlen
);
1085 dst_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1087 max_burst
= tlen
/ max_width
;
1088 best_burst
= stm32_mdma_get_best_burst(len
, tlen
,
1091 mdma_burst
= ilog2(best_burst
);
1093 ctcr
|= STM32_MDMA_CTCR_DBURST(mdma_burst
) |
1094 STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
1095 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
1097 if (dst_bus_width
!= src_bus_width
)
1098 ctcr
|= STM32_MDMA_CTCR_PKE
;
1100 /* Prepare hardware descriptor */
1101 stm32_mdma_setup_hwdesc(chan
, desc
, DMA_MEM_TO_MEM
, i
,
1102 src
+ offset
, dest
+ offset
,
1103 xfer_count
, ctcr
, ctbr
,
1104 i
== count
- 1, i
== 0, false);
1110 desc
->cyclic
= false;
1112 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
1115 static void stm32_mdma_dump_reg(struct stm32_mdma_chan
*chan
)
1117 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1119 dev_dbg(chan2dev(chan
), "CCR: 0x%08x\n",
1120 stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
)));
1121 dev_dbg(chan2dev(chan
), "CTCR: 0x%08x\n",
1122 stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
)));
1123 dev_dbg(chan2dev(chan
), "CBNDTR: 0x%08x\n",
1124 stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
)));
1125 dev_dbg(chan2dev(chan
), "CSAR: 0x%08x\n",
1126 stm32_mdma_read(dmadev
, STM32_MDMA_CSAR(chan
->id
)));
1127 dev_dbg(chan2dev(chan
), "CDAR: 0x%08x\n",
1128 stm32_mdma_read(dmadev
, STM32_MDMA_CDAR(chan
->id
)));
1129 dev_dbg(chan2dev(chan
), "CBRUR: 0x%08x\n",
1130 stm32_mdma_read(dmadev
, STM32_MDMA_CBRUR(chan
->id
)));
1131 dev_dbg(chan2dev(chan
), "CLAR: 0x%08x\n",
1132 stm32_mdma_read(dmadev
, STM32_MDMA_CLAR(chan
->id
)));
1133 dev_dbg(chan2dev(chan
), "CTBR: 0x%08x\n",
1134 stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
)));
1135 dev_dbg(chan2dev(chan
), "CMAR: 0x%08x\n",
1136 stm32_mdma_read(dmadev
, STM32_MDMA_CMAR(chan
->id
)));
1137 dev_dbg(chan2dev(chan
), "CMDR: 0x%08x\n",
1138 stm32_mdma_read(dmadev
, STM32_MDMA_CMDR(chan
->id
)));
1141 static void stm32_mdma_start_transfer(struct stm32_mdma_chan
*chan
)
1143 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1144 struct virt_dma_desc
*vdesc
;
1145 struct stm32_mdma_hwdesc
*hwdesc
;
1149 vdesc
= vchan_next_desc(&chan
->vchan
);
1155 list_del(&vdesc
->node
);
1157 chan
->desc
= to_stm32_mdma_desc(vdesc
);
1158 hwdesc
= chan
->desc
->node
[0].hwdesc
;
1159 chan
->curr_hwdesc
= 0;
1161 stm32_mdma_write(dmadev
, STM32_MDMA_CCR(id
), chan
->desc
->ccr
);
1162 stm32_mdma_write(dmadev
, STM32_MDMA_CTCR(id
), hwdesc
->ctcr
);
1163 stm32_mdma_write(dmadev
, STM32_MDMA_CBNDTR(id
), hwdesc
->cbndtr
);
1164 stm32_mdma_write(dmadev
, STM32_MDMA_CSAR(id
), hwdesc
->csar
);
1165 stm32_mdma_write(dmadev
, STM32_MDMA_CDAR(id
), hwdesc
->cdar
);
1166 stm32_mdma_write(dmadev
, STM32_MDMA_CBRUR(id
), hwdesc
->cbrur
);
1167 stm32_mdma_write(dmadev
, STM32_MDMA_CLAR(id
), hwdesc
->clar
);
1168 stm32_mdma_write(dmadev
, STM32_MDMA_CTBR(id
), hwdesc
->ctbr
);
1169 stm32_mdma_write(dmadev
, STM32_MDMA_CMAR(id
), hwdesc
->cmar
);
1170 stm32_mdma_write(dmadev
, STM32_MDMA_CMDR(id
), hwdesc
->cmdr
);
1172 /* Clear interrupt status if it is there */
1173 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(id
));
1175 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(id
), status
);
1177 stm32_mdma_dump_reg(chan
);
1180 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CCR(id
), STM32_MDMA_CCR_EN
);
1182 /* Set SW request in case of MEM2MEM transfer */
1183 if (hwdesc
->ctcr
& STM32_MDMA_CTCR_SWRM
) {
1184 reg
= STM32_MDMA_CCR(id
);
1185 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_SWRQ
);
1190 dev_dbg(chan2dev(chan
), "vchan %pK: started\n", &chan
->vchan
);
1193 static void stm32_mdma_issue_pending(struct dma_chan
*c
)
1195 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1196 unsigned long flags
;
1198 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1200 if (!vchan_issue_pending(&chan
->vchan
))
1203 dev_dbg(chan2dev(chan
), "vchan %pK: issued\n", &chan
->vchan
);
1205 if (!chan
->desc
&& !chan
->busy
)
1206 stm32_mdma_start_transfer(chan
);
1209 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1212 static int stm32_mdma_pause(struct dma_chan
*c
)
1214 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1215 unsigned long flags
;
1218 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1219 ret
= stm32_mdma_disable_chan(chan
);
1220 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1223 dev_dbg(chan2dev(chan
), "vchan %pK: pause\n", &chan
->vchan
);
1228 static int stm32_mdma_resume(struct dma_chan
*c
)
1230 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1231 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1232 struct stm32_mdma_hwdesc
*hwdesc
;
1233 unsigned long flags
;
1236 /* Transfer can be terminated */
1237 if (!chan
->desc
|| (stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
)) & STM32_MDMA_CCR_EN
))
1240 hwdesc
= chan
->desc
->node
[chan
->curr_hwdesc
].hwdesc
;
1242 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1244 /* Re-configure control register */
1245 stm32_mdma_write(dmadev
, STM32_MDMA_CCR(chan
->id
), chan
->desc
->ccr
);
1247 /* Clear interrupt status if it is there */
1248 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
1250 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(chan
->id
), status
);
1252 stm32_mdma_dump_reg(chan
);
1255 reg
= STM32_MDMA_CCR(chan
->id
);
1256 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_EN
);
1258 /* Set SW request in case of MEM2MEM transfer */
1259 if (hwdesc
->ctcr
& STM32_MDMA_CTCR_SWRM
)
1260 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_SWRQ
);
1262 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1264 dev_dbg(chan2dev(chan
), "vchan %pK: resume\n", &chan
->vchan
);
1269 static int stm32_mdma_terminate_all(struct dma_chan
*c
)
1271 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1272 unsigned long flags
;
1275 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1277 vchan_terminate_vdesc(&chan
->desc
->vdesc
);
1279 stm32_mdma_stop(chan
);
1282 vchan_get_all_descriptors(&chan
->vchan
, &head
);
1283 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1285 vchan_dma_desc_free_list(&chan
->vchan
, &head
);
1290 static void stm32_mdma_synchronize(struct dma_chan
*c
)
1292 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1294 vchan_synchronize(&chan
->vchan
);
1297 static int stm32_mdma_slave_config(struct dma_chan
*c
,
1298 struct dma_slave_config
*config
)
1300 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1302 memcpy(&chan
->dma_config
, config
, sizeof(*config
));
1304 /* Check if user is requesting STM32 DMA to trigger MDMA */
1305 if (config
->peripheral_size
) {
1306 struct stm32_mdma_dma_config
*mdma_config
;
1308 mdma_config
= (struct stm32_mdma_dma_config
*)chan
->dma_config
.peripheral_config
;
1309 chan
->chan_config
.request
= mdma_config
->request
;
1310 chan
->chan_config
.mask_addr
= mdma_config
->cmar
;
1311 chan
->chan_config
.mask_data
= mdma_config
->cmdr
;
1312 chan
->chan_config
.m2m_hw
= true;
1318 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan
*chan
,
1319 struct stm32_mdma_desc
*desc
,
1321 struct dma_tx_state
*state
)
1323 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1324 struct stm32_mdma_hwdesc
*hwdesc
;
1325 u32 cisr
, clar
, cbndtr
, residue
, modulo
, burst_size
;
1328 cisr
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
1331 /* Get the next hw descriptor to process from current transfer */
1332 clar
= stm32_mdma_read(dmadev
, STM32_MDMA_CLAR(chan
->id
));
1333 for (i
= desc
->count
- 1; i
>= 0; i
--) {
1334 hwdesc
= desc
->node
[i
].hwdesc
;
1336 if (hwdesc
->clar
== clar
)
1337 break;/* Current transfer found, stop cumulating */
1339 /* Cumulate residue of unprocessed hw descriptors */
1340 residue
+= STM32_MDMA_CBNDTR_BNDT(hwdesc
->cbndtr
);
1342 cbndtr
= stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
));
1343 residue
+= cbndtr
& STM32_MDMA_CBNDTR_BNDT_MASK
;
1345 state
->in_flight_bytes
= 0;
1346 if (chan
->chan_config
.m2m_hw
&& (cisr
& STM32_MDMA_CISR_CRQA
))
1347 state
->in_flight_bytes
= cbndtr
& STM32_MDMA_CBNDTR_BNDT_MASK
;
1349 if (!chan
->mem_burst
)
1352 burst_size
= chan
->mem_burst
* chan
->mem_width
;
1353 modulo
= residue
% burst_size
;
1355 residue
= residue
- modulo
+ burst_size
;
1360 static enum dma_status
stm32_mdma_tx_status(struct dma_chan
*c
,
1361 dma_cookie_t cookie
,
1362 struct dma_tx_state
*state
)
1364 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1365 struct virt_dma_desc
*vdesc
;
1366 enum dma_status status
;
1367 unsigned long flags
;
1370 status
= dma_cookie_status(c
, cookie
, state
);
1371 if ((status
== DMA_COMPLETE
) || (!state
))
1374 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1376 vdesc
= vchan_find_desc(&chan
->vchan
, cookie
);
1377 if (chan
->desc
&& cookie
== chan
->desc
->vdesc
.tx
.cookie
)
1378 residue
= stm32_mdma_desc_residue(chan
, chan
->desc
, chan
->curr_hwdesc
, state
);
1380 residue
= stm32_mdma_desc_residue(chan
, to_stm32_mdma_desc(vdesc
), 0, state
);
1382 dma_set_residue(state
, residue
);
1384 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1389 static void stm32_mdma_xfer_end(struct stm32_mdma_chan
*chan
)
1391 vchan_cookie_complete(&chan
->desc
->vdesc
);
1395 /* Start the next transfer if this driver has a next desc */
1396 stm32_mdma_start_transfer(chan
);
1399 static irqreturn_t
stm32_mdma_irq_handler(int irq
, void *devid
)
1401 struct stm32_mdma_device
*dmadev
= devid
;
1402 struct stm32_mdma_chan
*chan
;
1403 u32 reg
, id
, ccr
, ien
, status
;
1405 /* Find out which channel generates the interrupt */
1406 status
= readl_relaxed(dmadev
->base
+ STM32_MDMA_GISR0
);
1408 dev_dbg(mdma2dev(dmadev
), "spurious it\n");
1412 chan
= &dmadev
->chan
[id
];
1414 /* Handle interrupt for the channel */
1415 spin_lock(&chan
->vchan
.lock
);
1416 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(id
));
1417 /* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1418 status
&= ~STM32_MDMA_CISR_CRQA
;
1419 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(id
));
1420 ien
= (ccr
& STM32_MDMA_CCR_IRQ_MASK
) >> 1;
1422 if (!(status
& ien
)) {
1423 spin_unlock(&chan
->vchan
.lock
);
1425 dev_warn(chan2dev(chan
),
1426 "spurious it (status=0x%04x, ien=0x%04x)\n", status
, ien
);
1428 dev_dbg(chan2dev(chan
),
1429 "spurious it (status=0x%04x, ien=0x%04x)\n", status
, ien
);
1433 reg
= STM32_MDMA_CIFCR(id
);
1435 if (status
& STM32_MDMA_CISR_TEIF
) {
1436 dev_err(chan2dev(chan
), "Transfer Err: stat=0x%08x\n",
1437 readl_relaxed(dmadev
->base
+ STM32_MDMA_CESR(id
)));
1438 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CTEIF
);
1439 status
&= ~STM32_MDMA_CISR_TEIF
;
1442 if (status
& STM32_MDMA_CISR_CTCIF
) {
1443 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CCTCIF
);
1444 status
&= ~STM32_MDMA_CISR_CTCIF
;
1445 stm32_mdma_xfer_end(chan
);
1448 if (status
& STM32_MDMA_CISR_BRTIF
) {
1449 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CBRTIF
);
1450 status
&= ~STM32_MDMA_CISR_BRTIF
;
1453 if (status
& STM32_MDMA_CISR_BTIF
) {
1454 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CBTIF
);
1455 status
&= ~STM32_MDMA_CISR_BTIF
;
1456 chan
->curr_hwdesc
++;
1457 if (chan
->desc
&& chan
->desc
->cyclic
) {
1458 if (chan
->curr_hwdesc
== chan
->desc
->count
)
1459 chan
->curr_hwdesc
= 0;
1460 vchan_cyclic_callback(&chan
->desc
->vdesc
);
1464 if (status
& STM32_MDMA_CISR_TCIF
) {
1465 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CLTCIF
);
1466 status
&= ~STM32_MDMA_CISR_TCIF
;
1470 stm32_mdma_set_bits(dmadev
, reg
, status
);
1471 dev_err(chan2dev(chan
), "DMA error: status=0x%08x\n", status
);
1472 if (!(ccr
& STM32_MDMA_CCR_EN
))
1473 dev_err(chan2dev(chan
), "chan disabled by HW\n");
1476 spin_unlock(&chan
->vchan
.lock
);
1481 static int stm32_mdma_alloc_chan_resources(struct dma_chan
*c
)
1483 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1484 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1487 chan
->desc_pool
= dmam_pool_create(dev_name(&c
->dev
->device
),
1489 sizeof(struct stm32_mdma_hwdesc
),
1490 __alignof__(struct stm32_mdma_hwdesc
),
1492 if (!chan
->desc_pool
) {
1493 dev_err(chan2dev(chan
), "failed to allocate descriptor pool\n");
1497 ret
= pm_runtime_resume_and_get(dmadev
->ddev
.dev
);
1501 ret
= stm32_mdma_disable_chan(chan
);
1503 pm_runtime_put(dmadev
->ddev
.dev
);
1508 static void stm32_mdma_free_chan_resources(struct dma_chan
*c
)
1510 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1511 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1512 unsigned long flags
;
1514 dev_dbg(chan2dev(chan
), "Freeing channel %d\n", chan
->id
);
1517 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1518 stm32_mdma_stop(chan
);
1520 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1523 pm_runtime_put(dmadev
->ddev
.dev
);
1524 vchan_free_chan_resources(to_virt_chan(c
));
1525 dmam_pool_destroy(chan
->desc_pool
);
1526 chan
->desc_pool
= NULL
;
1529 static bool stm32_mdma_filter_fn(struct dma_chan
*c
, void *fn_param
)
1531 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1532 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1534 /* Check if chan is marked Secure */
1535 if (dmadev
->chan_reserved
& BIT(chan
->id
))
1541 static struct dma_chan
*stm32_mdma_of_xlate(struct of_phandle_args
*dma_spec
,
1542 struct of_dma
*ofdma
)
1544 struct stm32_mdma_device
*dmadev
= ofdma
->of_dma_data
;
1545 dma_cap_mask_t mask
= dmadev
->ddev
.cap_mask
;
1546 struct stm32_mdma_chan
*chan
;
1548 struct stm32_mdma_chan_config config
;
1550 if (dma_spec
->args_count
< 5) {
1551 dev_err(mdma2dev(dmadev
), "Bad number of args\n");
1555 memset(&config
, 0, sizeof(config
));
1556 config
.request
= dma_spec
->args
[0];
1557 config
.priority_level
= dma_spec
->args
[1];
1558 config
.transfer_config
= dma_spec
->args
[2];
1559 config
.mask_addr
= dma_spec
->args
[3];
1560 config
.mask_data
= dma_spec
->args
[4];
1562 if (config
.request
>= dmadev
->nr_requests
) {
1563 dev_err(mdma2dev(dmadev
), "Bad request line\n");
1567 if (config
.priority_level
> STM32_MDMA_VERY_HIGH_PRIORITY
) {
1568 dev_err(mdma2dev(dmadev
), "Priority level not supported\n");
1572 c
= __dma_request_channel(&mask
, stm32_mdma_filter_fn
, &config
, ofdma
->of_node
);
1574 dev_err(mdma2dev(dmadev
), "No more channels available\n");
1578 chan
= to_stm32_mdma_chan(c
);
1579 chan
->chan_config
= config
;
1584 static const struct of_device_id stm32_mdma_of_match
[] = {
1585 { .compatible
= "st,stm32h7-mdma", },
1588 MODULE_DEVICE_TABLE(of
, stm32_mdma_of_match
);
1590 static int stm32_mdma_probe(struct platform_device
*pdev
)
1592 struct stm32_mdma_chan
*chan
;
1593 struct stm32_mdma_device
*dmadev
;
1594 struct dma_device
*dd
;
1595 struct device_node
*of_node
;
1596 struct reset_control
*rst
;
1597 u32 nr_channels
, nr_requests
;
1600 of_node
= pdev
->dev
.of_node
;
1604 ret
= device_property_read_u32(&pdev
->dev
, "dma-channels",
1607 nr_channels
= STM32_MDMA_MAX_CHANNELS
;
1608 dev_warn(&pdev
->dev
, "MDMA defaulting on %i channels\n",
1612 ret
= device_property_read_u32(&pdev
->dev
, "dma-requests",
1615 nr_requests
= STM32_MDMA_MAX_REQUESTS
;
1616 dev_warn(&pdev
->dev
, "MDMA defaulting on %i request lines\n",
1620 count
= device_property_count_u32(&pdev
->dev
, "st,ahb-addr-masks");
1624 dmadev
= devm_kzalloc(&pdev
->dev
,
1625 struct_size(dmadev
, ahb_addr_masks
, count
),
1629 dmadev
->nr_ahb_addr_masks
= count
;
1631 dmadev
->nr_channels
= nr_channels
;
1632 dmadev
->nr_requests
= nr_requests
;
1633 device_property_read_u32_array(&pdev
->dev
, "st,ahb-addr-masks",
1634 dmadev
->ahb_addr_masks
,
1637 dmadev
->base
= devm_platform_ioremap_resource(pdev
, 0);
1638 if (IS_ERR(dmadev
->base
))
1639 return PTR_ERR(dmadev
->base
);
1641 dmadev
->clk
= devm_clk_get(&pdev
->dev
, NULL
);
1642 if (IS_ERR(dmadev
->clk
))
1643 return dev_err_probe(&pdev
->dev
, PTR_ERR(dmadev
->clk
),
1644 "Missing clock controller\n");
1646 ret
= clk_prepare_enable(dmadev
->clk
);
1648 dev_err(&pdev
->dev
, "clk_prep_enable error: %d\n", ret
);
1652 rst
= devm_reset_control_get(&pdev
->dev
, NULL
);
1655 if (ret
== -EPROBE_DEFER
)
1658 reset_control_assert(rst
);
1660 reset_control_deassert(rst
);
1664 dma_cap_set(DMA_SLAVE
, dd
->cap_mask
);
1665 dma_cap_set(DMA_PRIVATE
, dd
->cap_mask
);
1666 dma_cap_set(DMA_CYCLIC
, dd
->cap_mask
);
1667 dma_cap_set(DMA_MEMCPY
, dd
->cap_mask
);
1668 dd
->device_alloc_chan_resources
= stm32_mdma_alloc_chan_resources
;
1669 dd
->device_free_chan_resources
= stm32_mdma_free_chan_resources
;
1670 dd
->device_tx_status
= stm32_mdma_tx_status
;
1671 dd
->device_issue_pending
= stm32_mdma_issue_pending
;
1672 dd
->device_prep_slave_sg
= stm32_mdma_prep_slave_sg
;
1673 dd
->device_prep_dma_cyclic
= stm32_mdma_prep_dma_cyclic
;
1674 dd
->device_prep_dma_memcpy
= stm32_mdma_prep_dma_memcpy
;
1675 dd
->device_config
= stm32_mdma_slave_config
;
1676 dd
->device_pause
= stm32_mdma_pause
;
1677 dd
->device_resume
= stm32_mdma_resume
;
1678 dd
->device_terminate_all
= stm32_mdma_terminate_all
;
1679 dd
->device_synchronize
= stm32_mdma_synchronize
;
1680 dd
->descriptor_reuse
= true;
1682 dd
->src_addr_widths
= BIT(DMA_SLAVE_BUSWIDTH_1_BYTE
) |
1683 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES
) |
1684 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES
) |
1685 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES
);
1686 dd
->dst_addr_widths
= BIT(DMA_SLAVE_BUSWIDTH_1_BYTE
) |
1687 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES
) |
1688 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES
) |
1689 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES
);
1690 dd
->directions
= BIT(DMA_DEV_TO_MEM
) | BIT(DMA_MEM_TO_DEV
) |
1691 BIT(DMA_MEM_TO_MEM
);
1692 dd
->residue_granularity
= DMA_RESIDUE_GRANULARITY_BURST
;
1693 dd
->max_burst
= STM32_MDMA_MAX_BURST
;
1694 dd
->dev
= &pdev
->dev
;
1695 INIT_LIST_HEAD(&dd
->channels
);
1697 for (i
= 0; i
< dmadev
->nr_channels
; i
++) {
1698 chan
= &dmadev
->chan
[i
];
1701 if (stm32_mdma_read(dmadev
, STM32_MDMA_CCR(i
)) & STM32_MDMA_CCR_SM
)
1702 dmadev
->chan_reserved
|= BIT(i
);
1704 chan
->vchan
.desc_free
= stm32_mdma_desc_free
;
1705 vchan_init(&chan
->vchan
, dd
);
1708 dmadev
->irq
= platform_get_irq(pdev
, 0);
1709 if (dmadev
->irq
< 0) {
1714 ret
= devm_request_irq(&pdev
->dev
, dmadev
->irq
, stm32_mdma_irq_handler
,
1715 0, dev_name(&pdev
->dev
), dmadev
);
1717 dev_err(&pdev
->dev
, "failed to request IRQ\n");
1721 ret
= dmaenginem_async_device_register(dd
);
1725 ret
= of_dma_controller_register(of_node
, stm32_mdma_of_xlate
, dmadev
);
1728 "STM32 MDMA DMA OF registration failed %d\n", ret
);
1732 platform_set_drvdata(pdev
, dmadev
);
1733 pm_runtime_set_active(&pdev
->dev
);
1734 pm_runtime_enable(&pdev
->dev
);
1735 pm_runtime_get_noresume(&pdev
->dev
);
1736 pm_runtime_put(&pdev
->dev
);
1738 dev_info(&pdev
->dev
, "STM32 MDMA driver registered\n");
1743 clk_disable_unprepare(dmadev
->clk
);
1749 static int stm32_mdma_runtime_suspend(struct device
*dev
)
1751 struct stm32_mdma_device
*dmadev
= dev_get_drvdata(dev
);
1753 clk_disable_unprepare(dmadev
->clk
);
1758 static int stm32_mdma_runtime_resume(struct device
*dev
)
1760 struct stm32_mdma_device
*dmadev
= dev_get_drvdata(dev
);
1763 ret
= clk_prepare_enable(dmadev
->clk
);
1765 dev_err(dev
, "failed to prepare_enable clock\n");
1773 #ifdef CONFIG_PM_SLEEP
1774 static int stm32_mdma_pm_suspend(struct device
*dev
)
1776 struct stm32_mdma_device
*dmadev
= dev_get_drvdata(dev
);
1780 ret
= pm_runtime_resume_and_get(dev
);
1784 for (id
= 0; id
< dmadev
->nr_channels
; id
++) {
1785 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(id
));
1786 if (ccr
& STM32_MDMA_CCR_EN
) {
1787 dev_warn(dev
, "Suspend is prevented by Chan %i\n", id
);
1792 pm_runtime_put_sync(dev
);
1794 pm_runtime_force_suspend(dev
);
1799 static int stm32_mdma_pm_resume(struct device
*dev
)
1801 return pm_runtime_force_resume(dev
);
1805 static const struct dev_pm_ops stm32_mdma_pm_ops
= {
1806 SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend
, stm32_mdma_pm_resume
)
1807 SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend
,
1808 stm32_mdma_runtime_resume
, NULL
)
1811 static struct platform_driver stm32_mdma_driver
= {
1812 .probe
= stm32_mdma_probe
,
1814 .name
= "stm32-mdma",
1815 .of_match_table
= stm32_mdma_of_match
,
1816 .pm
= &stm32_mdma_pm_ops
,
1820 static int __init
stm32_mdma_init(void)
1822 return platform_driver_register(&stm32_mdma_driver
);
1825 subsys_initcall(stm32_mdma_init
);
1827 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1828 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1829 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");