3 * Copyright (C) STMicroelectronics SA 2017
4 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
5 * Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7 * License terms: GPL V2.0.
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License version 2 as published by
11 * the Free Software Foundation.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
18 * Driver for STM32 MDMA controller
20 * Inspired by stm32-dma.c and dma-jz4780.c
24 #include <linux/clk.h>
25 #include <linux/delay.h>
26 #include <linux/dmaengine.h>
27 #include <linux/dma-mapping.h>
28 #include <linux/dmapool.h>
29 #include <linux/err.h>
30 #include <linux/init.h>
31 #include <linux/iopoll.h>
32 #include <linux/jiffies.h>
33 #include <linux/list.h>
34 #include <linux/log2.h>
35 #include <linux/module.h>
37 #include <linux/of_device.h>
38 #include <linux/of_dma.h>
39 #include <linux/platform_device.h>
40 #include <linux/reset.h>
41 #include <linux/slab.h>
45 /* MDMA Generic getter/setter */
46 #define STM32_MDMA_SHIFT(n) (ffs(n) - 1)
47 #define STM32_MDMA_SET(n, mask) (((n) << STM32_MDMA_SHIFT(mask)) & \
49 #define STM32_MDMA_GET(n, mask) (((n) & (mask)) >> \
50 STM32_MDMA_SHIFT(mask))
52 #define STM32_MDMA_GISR0 0x0000 /* MDMA Int Status Reg 1 */
53 #define STM32_MDMA_GISR1 0x0004 /* MDMA Int Status Reg 2 */
55 /* MDMA Channel x interrupt/status register */
56 #define STM32_MDMA_CISR(x) (0x40 + 0x40 * (x)) /* x = 0..62 */
57 #define STM32_MDMA_CISR_CRQA BIT(16)
58 #define STM32_MDMA_CISR_TCIF BIT(4)
59 #define STM32_MDMA_CISR_BTIF BIT(3)
60 #define STM32_MDMA_CISR_BRTIF BIT(2)
61 #define STM32_MDMA_CISR_CTCIF BIT(1)
62 #define STM32_MDMA_CISR_TEIF BIT(0)
64 /* MDMA Channel x interrupt flag clear register */
65 #define STM32_MDMA_CIFCR(x) (0x44 + 0x40 * (x))
66 #define STM32_MDMA_CIFCR_CLTCIF BIT(4)
67 #define STM32_MDMA_CIFCR_CBTIF BIT(3)
68 #define STM32_MDMA_CIFCR_CBRTIF BIT(2)
69 #define STM32_MDMA_CIFCR_CCTCIF BIT(1)
70 #define STM32_MDMA_CIFCR_CTEIF BIT(0)
71 #define STM32_MDMA_CIFCR_CLEAR_ALL (STM32_MDMA_CIFCR_CLTCIF \
72 | STM32_MDMA_CIFCR_CBTIF \
73 | STM32_MDMA_CIFCR_CBRTIF \
74 | STM32_MDMA_CIFCR_CCTCIF \
75 | STM32_MDMA_CIFCR_CTEIF)
77 /* MDMA Channel x error status register */
78 #define STM32_MDMA_CESR(x) (0x48 + 0x40 * (x))
79 #define STM32_MDMA_CESR_BSE BIT(11)
80 #define STM32_MDMA_CESR_ASR BIT(10)
81 #define STM32_MDMA_CESR_TEMD BIT(9)
82 #define STM32_MDMA_CESR_TELD BIT(8)
83 #define STM32_MDMA_CESR_TED BIT(7)
84 #define STM32_MDMA_CESR_TEA_MASK GENMASK(6, 0)
86 /* MDMA Channel x control register */
87 #define STM32_MDMA_CCR(x) (0x4C + 0x40 * (x))
88 #define STM32_MDMA_CCR_SWRQ BIT(16)
89 #define STM32_MDMA_CCR_WEX BIT(14)
90 #define STM32_MDMA_CCR_HEX BIT(13)
91 #define STM32_MDMA_CCR_BEX BIT(12)
92 #define STM32_MDMA_CCR_PL_MASK GENMASK(7, 6)
93 #define STM32_MDMA_CCR_PL(n) STM32_MDMA_SET(n, \
94 STM32_MDMA_CCR_PL_MASK)
95 #define STM32_MDMA_CCR_TCIE BIT(5)
96 #define STM32_MDMA_CCR_BTIE BIT(4)
97 #define STM32_MDMA_CCR_BRTIE BIT(3)
98 #define STM32_MDMA_CCR_CTCIE BIT(2)
99 #define STM32_MDMA_CCR_TEIE BIT(1)
100 #define STM32_MDMA_CCR_EN BIT(0)
101 #define STM32_MDMA_CCR_IRQ_MASK (STM32_MDMA_CCR_TCIE \
102 | STM32_MDMA_CCR_BTIE \
103 | STM32_MDMA_CCR_BRTIE \
104 | STM32_MDMA_CCR_CTCIE \
105 | STM32_MDMA_CCR_TEIE)
107 /* MDMA Channel x transfer configuration register */
108 #define STM32_MDMA_CTCR(x) (0x50 + 0x40 * (x))
109 #define STM32_MDMA_CTCR_BWM BIT(31)
110 #define STM32_MDMA_CTCR_SWRM BIT(30)
111 #define STM32_MDMA_CTCR_TRGM_MSK GENMASK(29, 28)
112 #define STM32_MDMA_CTCR_TRGM(n) STM32_MDMA_SET((n), \
113 STM32_MDMA_CTCR_TRGM_MSK)
114 #define STM32_MDMA_CTCR_TRGM_GET(n) STM32_MDMA_GET((n), \
115 STM32_MDMA_CTCR_TRGM_MSK)
116 #define STM32_MDMA_CTCR_PAM_MASK GENMASK(27, 26)
117 #define STM32_MDMA_CTCR_PAM(n) STM32_MDMA_SET(n, \
118 STM32_MDMA_CTCR_PAM_MASK)
119 #define STM32_MDMA_CTCR_PKE BIT(25)
120 #define STM32_MDMA_CTCR_TLEN_MSK GENMASK(24, 18)
121 #define STM32_MDMA_CTCR_TLEN(n) STM32_MDMA_SET((n), \
122 STM32_MDMA_CTCR_TLEN_MSK)
123 #define STM32_MDMA_CTCR_TLEN_GET(n) STM32_MDMA_GET((n), \
124 STM32_MDMA_CTCR_TLEN_MSK)
125 #define STM32_MDMA_CTCR_LEN2_MSK GENMASK(25, 18)
126 #define STM32_MDMA_CTCR_LEN2(n) STM32_MDMA_SET((n), \
127 STM32_MDMA_CTCR_LEN2_MSK)
128 #define STM32_MDMA_CTCR_LEN2_GET(n) STM32_MDMA_GET((n), \
129 STM32_MDMA_CTCR_LEN2_MSK)
130 #define STM32_MDMA_CTCR_DBURST_MASK GENMASK(17, 15)
131 #define STM32_MDMA_CTCR_DBURST(n) STM32_MDMA_SET(n, \
132 STM32_MDMA_CTCR_DBURST_MASK)
133 #define STM32_MDMA_CTCR_SBURST_MASK GENMASK(14, 12)
134 #define STM32_MDMA_CTCR_SBURST(n) STM32_MDMA_SET(n, \
135 STM32_MDMA_CTCR_SBURST_MASK)
136 #define STM32_MDMA_CTCR_DINCOS_MASK GENMASK(11, 10)
137 #define STM32_MDMA_CTCR_DINCOS(n) STM32_MDMA_SET((n), \
138 STM32_MDMA_CTCR_DINCOS_MASK)
139 #define STM32_MDMA_CTCR_SINCOS_MASK GENMASK(9, 8)
140 #define STM32_MDMA_CTCR_SINCOS(n) STM32_MDMA_SET((n), \
141 STM32_MDMA_CTCR_SINCOS_MASK)
142 #define STM32_MDMA_CTCR_DSIZE_MASK GENMASK(7, 6)
143 #define STM32_MDMA_CTCR_DSIZE(n) STM32_MDMA_SET(n, \
144 STM32_MDMA_CTCR_DSIZE_MASK)
145 #define STM32_MDMA_CTCR_SSIZE_MASK GENMASK(5, 4)
146 #define STM32_MDMA_CTCR_SSIZE(n) STM32_MDMA_SET(n, \
147 STM32_MDMA_CTCR_SSIZE_MASK)
148 #define STM32_MDMA_CTCR_DINC_MASK GENMASK(3, 2)
149 #define STM32_MDMA_CTCR_DINC(n) STM32_MDMA_SET((n), \
150 STM32_MDMA_CTCR_DINC_MASK)
151 #define STM32_MDMA_CTCR_SINC_MASK GENMASK(1, 0)
152 #define STM32_MDMA_CTCR_SINC(n) STM32_MDMA_SET((n), \
153 STM32_MDMA_CTCR_SINC_MASK)
154 #define STM32_MDMA_CTCR_CFG_MASK (STM32_MDMA_CTCR_SINC_MASK \
155 | STM32_MDMA_CTCR_DINC_MASK \
156 | STM32_MDMA_CTCR_SINCOS_MASK \
157 | STM32_MDMA_CTCR_DINCOS_MASK \
158 | STM32_MDMA_CTCR_LEN2_MSK \
159 | STM32_MDMA_CTCR_TRGM_MSK)
161 /* MDMA Channel x block number of data register */
162 #define STM32_MDMA_CBNDTR(x) (0x54 + 0x40 * (x))
163 #define STM32_MDMA_CBNDTR_BRC_MK GENMASK(31, 20)
164 #define STM32_MDMA_CBNDTR_BRC(n) STM32_MDMA_SET(n, \
165 STM32_MDMA_CBNDTR_BRC_MK)
166 #define STM32_MDMA_CBNDTR_BRC_GET(n) STM32_MDMA_GET((n), \
167 STM32_MDMA_CBNDTR_BRC_MK)
169 #define STM32_MDMA_CBNDTR_BRDUM BIT(19)
170 #define STM32_MDMA_CBNDTR_BRSUM BIT(18)
171 #define STM32_MDMA_CBNDTR_BNDT_MASK GENMASK(16, 0)
172 #define STM32_MDMA_CBNDTR_BNDT(n) STM32_MDMA_SET(n, \
173 STM32_MDMA_CBNDTR_BNDT_MASK)
175 /* MDMA Channel x source address register */
176 #define STM32_MDMA_CSAR(x) (0x58 + 0x40 * (x))
178 /* MDMA Channel x destination address register */
179 #define STM32_MDMA_CDAR(x) (0x5C + 0x40 * (x))
181 /* MDMA Channel x block repeat address update register */
182 #define STM32_MDMA_CBRUR(x) (0x60 + 0x40 * (x))
183 #define STM32_MDMA_CBRUR_DUV_MASK GENMASK(31, 16)
184 #define STM32_MDMA_CBRUR_DUV(n) STM32_MDMA_SET(n, \
185 STM32_MDMA_CBRUR_DUV_MASK)
186 #define STM32_MDMA_CBRUR_SUV_MASK GENMASK(15, 0)
187 #define STM32_MDMA_CBRUR_SUV(n) STM32_MDMA_SET(n, \
188 STM32_MDMA_CBRUR_SUV_MASK)
190 /* MDMA Channel x link address register */
191 #define STM32_MDMA_CLAR(x) (0x64 + 0x40 * (x))
193 /* MDMA Channel x trigger and bus selection register */
194 #define STM32_MDMA_CTBR(x) (0x68 + 0x40 * (x))
195 #define STM32_MDMA_CTBR_DBUS BIT(17)
196 #define STM32_MDMA_CTBR_SBUS BIT(16)
197 #define STM32_MDMA_CTBR_TSEL_MASK GENMASK(7, 0)
198 #define STM32_MDMA_CTBR_TSEL(n) STM32_MDMA_SET(n, \
199 STM32_MDMA_CTBR_TSEL_MASK)
201 /* MDMA Channel x mask address register */
202 #define STM32_MDMA_CMAR(x) (0x70 + 0x40 * (x))
204 /* MDMA Channel x mask data register */
205 #define STM32_MDMA_CMDR(x) (0x74 + 0x40 * (x))
207 #define STM32_MDMA_MAX_BUF_LEN 128
208 #define STM32_MDMA_MAX_BLOCK_LEN 65536
209 #define STM32_MDMA_MAX_CHANNELS 63
210 #define STM32_MDMA_MAX_REQUESTS 256
211 #define STM32_MDMA_MAX_BURST 128
212 #define STM32_MDMA_VERY_HIGH_PRIORITY 0x11
214 enum stm32_mdma_trigger_mode
{
217 STM32_MDMA_BLOCK_REP
,
218 STM32_MDMA_LINKED_LIST
,
221 enum stm32_mdma_width
{
223 STM32_MDMA_HALF_WORD
,
225 STM32_MDMA_DOUBLE_WORD
,
228 enum stm32_mdma_inc_mode
{
229 STM32_MDMA_FIXED
= 0,
234 struct stm32_mdma_chan_config
{
242 struct stm32_mdma_hwdesc
{
255 struct stm32_mdma_desc_node
{
256 struct stm32_mdma_hwdesc
*hwdesc
;
257 dma_addr_t hwdesc_phys
;
260 struct stm32_mdma_desc
{
261 struct virt_dma_desc vdesc
;
265 struct stm32_mdma_desc_node node
[];
268 struct stm32_mdma_chan
{
269 struct virt_dma_chan vchan
;
270 struct dma_pool
*desc_pool
;
272 struct stm32_mdma_desc
*desc
;
274 struct dma_slave_config dma_config
;
275 struct stm32_mdma_chan_config chan_config
;
281 struct stm32_mdma_device
{
282 struct dma_device ddev
;
286 struct reset_control
*rst
;
289 u32 nr_ahb_addr_masks
;
290 struct stm32_mdma_chan chan
[STM32_MDMA_MAX_CHANNELS
];
291 u32 ahb_addr_masks
[];
294 static struct stm32_mdma_device
*stm32_mdma_get_dev(
295 struct stm32_mdma_chan
*chan
)
297 return container_of(chan
->vchan
.chan
.device
, struct stm32_mdma_device
,
301 static struct stm32_mdma_chan
*to_stm32_mdma_chan(struct dma_chan
*c
)
303 return container_of(c
, struct stm32_mdma_chan
, vchan
.chan
);
306 static struct stm32_mdma_desc
*to_stm32_mdma_desc(struct virt_dma_desc
*vdesc
)
308 return container_of(vdesc
, struct stm32_mdma_desc
, vdesc
);
311 static struct device
*chan2dev(struct stm32_mdma_chan
*chan
)
313 return &chan
->vchan
.chan
.dev
->device
;
316 static struct device
*mdma2dev(struct stm32_mdma_device
*mdma_dev
)
318 return mdma_dev
->ddev
.dev
;
321 static u32
stm32_mdma_read(struct stm32_mdma_device
*dmadev
, u32 reg
)
323 return readl_relaxed(dmadev
->base
+ reg
);
326 static void stm32_mdma_write(struct stm32_mdma_device
*dmadev
, u32 reg
, u32 val
)
328 writel_relaxed(val
, dmadev
->base
+ reg
);
331 static void stm32_mdma_set_bits(struct stm32_mdma_device
*dmadev
, u32 reg
,
334 void __iomem
*addr
= dmadev
->base
+ reg
;
336 writel_relaxed(readl_relaxed(addr
) | mask
, addr
);
339 static void stm32_mdma_clr_bits(struct stm32_mdma_device
*dmadev
, u32 reg
,
342 void __iomem
*addr
= dmadev
->base
+ reg
;
344 writel_relaxed(readl_relaxed(addr
) & ~mask
, addr
);
347 static struct stm32_mdma_desc
*stm32_mdma_alloc_desc(
348 struct stm32_mdma_chan
*chan
, u32 count
)
350 struct stm32_mdma_desc
*desc
;
353 desc
= kzalloc(offsetof(typeof(*desc
), node
[count
]), GFP_NOWAIT
);
357 for (i
= 0; i
< count
; i
++) {
358 desc
->node
[i
].hwdesc
=
359 dma_pool_alloc(chan
->desc_pool
, GFP_NOWAIT
,
360 &desc
->node
[i
].hwdesc_phys
);
361 if (!desc
->node
[i
].hwdesc
)
370 dev_err(chan2dev(chan
), "Failed to allocate descriptor\n");
372 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
373 desc
->node
[i
].hwdesc_phys
);
378 static void stm32_mdma_desc_free(struct virt_dma_desc
*vdesc
)
380 struct stm32_mdma_desc
*desc
= to_stm32_mdma_desc(vdesc
);
381 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(vdesc
->tx
.chan
);
384 for (i
= 0; i
< desc
->count
; i
++)
385 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
386 desc
->node
[i
].hwdesc_phys
);
390 static int stm32_mdma_get_width(struct stm32_mdma_chan
*chan
,
391 enum dma_slave_buswidth width
)
394 case DMA_SLAVE_BUSWIDTH_1_BYTE
:
395 case DMA_SLAVE_BUSWIDTH_2_BYTES
:
396 case DMA_SLAVE_BUSWIDTH_4_BYTES
:
397 case DMA_SLAVE_BUSWIDTH_8_BYTES
:
398 return ffs(width
) - 1;
400 dev_err(chan2dev(chan
), "Dma bus width %i not supported\n",
406 static enum dma_slave_buswidth
stm32_mdma_get_max_width(dma_addr_t addr
,
407 u32 buf_len
, u32 tlen
)
409 enum dma_slave_buswidth max_width
= DMA_SLAVE_BUSWIDTH_8_BYTES
;
411 for (max_width
= DMA_SLAVE_BUSWIDTH_8_BYTES
;
412 max_width
> DMA_SLAVE_BUSWIDTH_1_BYTE
;
415 * Address and buffer length both have to be aligned on
418 if ((((buf_len
| addr
) & (max_width
- 1)) == 0) &&
426 static u32
stm32_mdma_get_best_burst(u32 buf_len
, u32 tlen
, u32 max_burst
,
427 enum dma_slave_buswidth width
)
431 best_burst
= min((u32
)1 << __ffs(tlen
| buf_len
),
432 max_burst
* width
) / width
;
434 return (best_burst
> 0) ? best_burst
: 1;
437 static int stm32_mdma_disable_chan(struct stm32_mdma_chan
*chan
)
439 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
440 u32 ccr
, cisr
, id
, reg
;
444 reg
= STM32_MDMA_CCR(id
);
446 /* Disable interrupts */
447 stm32_mdma_clr_bits(dmadev
, reg
, STM32_MDMA_CCR_IRQ_MASK
);
449 ccr
= stm32_mdma_read(dmadev
, reg
);
450 if (ccr
& STM32_MDMA_CCR_EN
) {
451 stm32_mdma_clr_bits(dmadev
, reg
, STM32_MDMA_CCR_EN
);
453 /* Ensure that any ongoing transfer has been completed */
454 ret
= readl_relaxed_poll_timeout_atomic(
455 dmadev
->base
+ STM32_MDMA_CISR(id
), cisr
,
456 (cisr
& STM32_MDMA_CISR_CTCIF
), 10, 1000);
458 dev_err(chan2dev(chan
), "%s: timeout!\n", __func__
);
466 static void stm32_mdma_stop(struct stm32_mdma_chan
*chan
)
468 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
473 ret
= stm32_mdma_disable_chan(chan
);
477 /* Clear interrupt status if it is there */
478 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
480 dev_dbg(chan2dev(chan
), "%s(): clearing interrupt: 0x%08x\n",
482 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(chan
->id
), status
);
488 static void stm32_mdma_set_bus(struct stm32_mdma_device
*dmadev
, u32
*ctbr
,
489 u32 ctbr_mask
, u32 src_addr
)
494 /* Check if memory device is on AHB or AXI */
496 mask
= src_addr
& 0xF0000000;
497 for (i
= 0; i
< dmadev
->nr_ahb_addr_masks
; i
++) {
498 if (mask
== dmadev
->ahb_addr_masks
[i
]) {
505 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan
*chan
,
506 enum dma_transfer_direction direction
,
507 u32
*mdma_ccr
, u32
*mdma_ctcr
,
508 u32
*mdma_ctbr
, dma_addr_t addr
,
511 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
512 struct stm32_mdma_chan_config
*chan_config
= &chan
->chan_config
;
513 enum dma_slave_buswidth src_addr_width
, dst_addr_width
;
514 phys_addr_t src_addr
, dst_addr
;
515 int src_bus_width
, dst_bus_width
;
516 u32 src_maxburst
, dst_maxburst
, src_best_burst
, dst_best_burst
;
517 u32 ccr
, ctcr
, ctbr
, tlen
;
519 src_addr_width
= chan
->dma_config
.src_addr_width
;
520 dst_addr_width
= chan
->dma_config
.dst_addr_width
;
521 src_maxburst
= chan
->dma_config
.src_maxburst
;
522 dst_maxburst
= chan
->dma_config
.dst_maxburst
;
524 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
));
525 ctcr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
));
526 ctbr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
));
528 /* Enable HW request mode */
529 ctcr
&= ~STM32_MDMA_CTCR_SWRM
;
531 /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
532 ctcr
&= ~STM32_MDMA_CTCR_CFG_MASK
;
533 ctcr
|= chan_config
->transfer_config
& STM32_MDMA_CTCR_CFG_MASK
;
536 * For buffer transfer length (TLEN) we have to set
537 * the number of bytes - 1 in CTCR register
539 tlen
= STM32_MDMA_CTCR_LEN2_GET(ctcr
);
540 ctcr
&= ~STM32_MDMA_CTCR_LEN2_MSK
;
541 ctcr
|= STM32_MDMA_CTCR_TLEN((tlen
- 1));
543 /* Disable Pack Enable */
544 ctcr
&= ~STM32_MDMA_CTCR_PKE
;
546 /* Check burst size constraints */
547 if (src_maxburst
* src_addr_width
> STM32_MDMA_MAX_BURST
||
548 dst_maxburst
* dst_addr_width
> STM32_MDMA_MAX_BURST
) {
549 dev_err(chan2dev(chan
),
550 "burst size * bus width higher than %d bytes\n",
551 STM32_MDMA_MAX_BURST
);
555 if ((!is_power_of_2(src_maxburst
) && src_maxburst
> 0) ||
556 (!is_power_of_2(dst_maxburst
) && dst_maxburst
> 0)) {
557 dev_err(chan2dev(chan
), "burst size must be a power of 2\n");
562 * Configure channel control:
563 * - Clear SW request as in this case this is a HW one
564 * - Clear WEX, HEX and BEX bits
565 * - Set priority level
567 ccr
&= ~(STM32_MDMA_CCR_SWRQ
| STM32_MDMA_CCR_WEX
| STM32_MDMA_CCR_HEX
|
568 STM32_MDMA_CCR_BEX
| STM32_MDMA_CCR_PL_MASK
);
569 ccr
|= STM32_MDMA_CCR_PL(chan_config
->priority_level
);
571 /* Configure Trigger selection */
572 ctbr
&= ~STM32_MDMA_CTBR_TSEL_MASK
;
573 ctbr
|= STM32_MDMA_CTBR_TSEL(chan_config
->request
);
577 dst_addr
= chan
->dma_config
.dst_addr
;
579 /* Set device data size */
580 dst_bus_width
= stm32_mdma_get_width(chan
, dst_addr_width
);
581 if (dst_bus_width
< 0)
582 return dst_bus_width
;
583 ctcr
&= ~STM32_MDMA_CTCR_DSIZE_MASK
;
584 ctcr
|= STM32_MDMA_CTCR_DSIZE(dst_bus_width
);
586 /* Set device burst value */
587 dst_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
590 chan
->mem_burst
= dst_best_burst
;
591 ctcr
&= ~STM32_MDMA_CTCR_DBURST_MASK
;
592 ctcr
|= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst
)));
594 /* Set memory data size */
595 src_addr_width
= stm32_mdma_get_max_width(addr
, buf_len
, tlen
);
596 chan
->mem_width
= src_addr_width
;
597 src_bus_width
= stm32_mdma_get_width(chan
, src_addr_width
);
598 if (src_bus_width
< 0)
599 return src_bus_width
;
600 ctcr
&= ~STM32_MDMA_CTCR_SSIZE_MASK
|
601 STM32_MDMA_CTCR_SINCOS_MASK
;
602 ctcr
|= STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
603 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
605 /* Set memory burst value */
606 src_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ src_addr_width
;
607 src_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
610 chan
->mem_burst
= src_best_burst
;
611 ctcr
&= ~STM32_MDMA_CTCR_SBURST_MASK
;
612 ctcr
|= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst
)));
615 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
618 if (dst_bus_width
!= src_bus_width
)
619 ctcr
|= STM32_MDMA_CTCR_PKE
;
621 /* Set destination address */
622 stm32_mdma_write(dmadev
, STM32_MDMA_CDAR(chan
->id
), dst_addr
);
626 src_addr
= chan
->dma_config
.src_addr
;
628 /* Set device data size */
629 src_bus_width
= stm32_mdma_get_width(chan
, src_addr_width
);
630 if (src_bus_width
< 0)
631 return src_bus_width
;
632 ctcr
&= ~STM32_MDMA_CTCR_SSIZE_MASK
;
633 ctcr
|= STM32_MDMA_CTCR_SSIZE(src_bus_width
);
635 /* Set device burst value */
636 src_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
639 ctcr
&= ~STM32_MDMA_CTCR_SBURST_MASK
;
640 ctcr
|= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst
)));
642 /* Set memory data size */
643 dst_addr_width
= stm32_mdma_get_max_width(addr
, buf_len
, tlen
);
644 chan
->mem_width
= dst_addr_width
;
645 dst_bus_width
= stm32_mdma_get_width(chan
, dst_addr_width
);
646 if (dst_bus_width
< 0)
647 return dst_bus_width
;
648 ctcr
&= ~(STM32_MDMA_CTCR_DSIZE_MASK
|
649 STM32_MDMA_CTCR_DINCOS_MASK
);
650 ctcr
|= STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
651 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
653 /* Set memory burst value */
654 dst_maxburst
= STM32_MDMA_MAX_BUF_LEN
/ dst_addr_width
;
655 dst_best_burst
= stm32_mdma_get_best_burst(buf_len
, tlen
,
658 ctcr
&= ~STM32_MDMA_CTCR_DBURST_MASK
;
659 ctcr
|= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst
)));
662 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
665 if (dst_bus_width
!= src_bus_width
)
666 ctcr
|= STM32_MDMA_CTCR_PKE
;
668 /* Set source address */
669 stm32_mdma_write(dmadev
, STM32_MDMA_CSAR(chan
->id
), src_addr
);
673 dev_err(chan2dev(chan
), "Dma direction is not supported\n");
684 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan
*chan
,
685 struct stm32_mdma_desc_node
*node
)
687 dev_dbg(chan2dev(chan
), "hwdesc: %pad\n", &node
->hwdesc_phys
);
688 dev_dbg(chan2dev(chan
), "CTCR: 0x%08x\n", node
->hwdesc
->ctcr
);
689 dev_dbg(chan2dev(chan
), "CBNDTR: 0x%08x\n", node
->hwdesc
->cbndtr
);
690 dev_dbg(chan2dev(chan
), "CSAR: 0x%08x\n", node
->hwdesc
->csar
);
691 dev_dbg(chan2dev(chan
), "CDAR: 0x%08x\n", node
->hwdesc
->cdar
);
692 dev_dbg(chan2dev(chan
), "CBRUR: 0x%08x\n", node
->hwdesc
->cbrur
);
693 dev_dbg(chan2dev(chan
), "CLAR: 0x%08x\n", node
->hwdesc
->clar
);
694 dev_dbg(chan2dev(chan
), "CTBR: 0x%08x\n", node
->hwdesc
->ctbr
);
695 dev_dbg(chan2dev(chan
), "CMAR: 0x%08x\n", node
->hwdesc
->cmar
);
696 dev_dbg(chan2dev(chan
), "CMDR: 0x%08x\n\n", node
->hwdesc
->cmdr
);
699 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan
*chan
,
700 struct stm32_mdma_desc
*desc
,
701 enum dma_transfer_direction dir
, u32 count
,
702 dma_addr_t src_addr
, dma_addr_t dst_addr
,
703 u32 len
, u32 ctcr
, u32 ctbr
, bool is_last
,
704 bool is_first
, bool is_cyclic
)
706 struct stm32_mdma_chan_config
*config
= &chan
->chan_config
;
707 struct stm32_mdma_hwdesc
*hwdesc
;
708 u32 next
= count
+ 1;
710 hwdesc
= desc
->node
[count
].hwdesc
;
712 hwdesc
->cbndtr
&= ~(STM32_MDMA_CBNDTR_BRC_MK
|
713 STM32_MDMA_CBNDTR_BRDUM
|
714 STM32_MDMA_CBNDTR_BRSUM
|
715 STM32_MDMA_CBNDTR_BNDT_MASK
);
716 hwdesc
->cbndtr
|= STM32_MDMA_CBNDTR_BNDT(len
);
717 hwdesc
->csar
= src_addr
;
718 hwdesc
->cdar
= dst_addr
;
721 hwdesc
->cmar
= config
->mask_addr
;
722 hwdesc
->cmdr
= config
->mask_data
;
726 hwdesc
->clar
= desc
->node
[0].hwdesc_phys
;
730 hwdesc
->clar
= desc
->node
[next
].hwdesc_phys
;
733 stm32_mdma_dump_hwdesc(chan
, &desc
->node
[count
]);
736 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan
*chan
,
737 struct stm32_mdma_desc
*desc
,
738 struct scatterlist
*sgl
, u32 sg_len
,
739 enum dma_transfer_direction direction
)
741 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
742 struct dma_slave_config
*dma_config
= &chan
->dma_config
;
743 struct scatterlist
*sg
;
744 dma_addr_t src_addr
, dst_addr
;
748 for_each_sg(sgl
, sg
, sg_len
, i
) {
749 if (sg_dma_len(sg
) > STM32_MDMA_MAX_BLOCK_LEN
) {
750 dev_err(chan2dev(chan
), "Invalid block len\n");
754 if (direction
== DMA_MEM_TO_DEV
) {
755 src_addr
= sg_dma_address(sg
);
756 dst_addr
= dma_config
->dst_addr
;
757 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
,
758 &ctcr
, &ctbr
, src_addr
,
760 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
763 src_addr
= dma_config
->src_addr
;
764 dst_addr
= sg_dma_address(sg
);
765 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
,
766 &ctcr
, &ctbr
, dst_addr
,
768 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
775 stm32_mdma_setup_hwdesc(chan
, desc
, direction
, i
, src_addr
,
776 dst_addr
, sg_dma_len(sg
), ctcr
, ctbr
,
777 i
== sg_len
- 1, i
== 0, false);
780 /* Enable interrupts */
781 ccr
&= ~STM32_MDMA_CCR_IRQ_MASK
;
782 ccr
|= STM32_MDMA_CCR_TEIE
| STM32_MDMA_CCR_CTCIE
;
784 ccr
|= STM32_MDMA_CCR_BTIE
;
790 static struct dma_async_tx_descriptor
*
791 stm32_mdma_prep_slave_sg(struct dma_chan
*c
, struct scatterlist
*sgl
,
792 u32 sg_len
, enum dma_transfer_direction direction
,
793 unsigned long flags
, void *context
)
795 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
796 struct stm32_mdma_desc
*desc
;
800 * Once DMA is in setup cyclic mode the channel we cannot assign this
801 * channel anymore. The DMA channel needs to be aborted or terminated
802 * for allowing another request.
804 if (chan
->desc
&& chan
->desc
->cyclic
) {
805 dev_err(chan2dev(chan
),
806 "Request not allowed when dma in cyclic mode\n");
810 desc
= stm32_mdma_alloc_desc(chan
, sg_len
);
814 ret
= stm32_mdma_setup_xfer(chan
, desc
, sgl
, sg_len
, direction
);
818 desc
->cyclic
= false;
820 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
823 for (i
= 0; i
< desc
->count
; i
++)
824 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
825 desc
->node
[i
].hwdesc_phys
);
830 static struct dma_async_tx_descriptor
*
831 stm32_mdma_prep_dma_cyclic(struct dma_chan
*c
, dma_addr_t buf_addr
,
832 size_t buf_len
, size_t period_len
,
833 enum dma_transfer_direction direction
,
836 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
837 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
838 struct dma_slave_config
*dma_config
= &chan
->dma_config
;
839 struct stm32_mdma_desc
*desc
;
840 dma_addr_t src_addr
, dst_addr
;
841 u32 ccr
, ctcr
, ctbr
, count
;
845 * Once DMA is in setup cyclic mode the channel we cannot assign this
846 * channel anymore. The DMA channel needs to be aborted or terminated
847 * for allowing another request.
849 if (chan
->desc
&& chan
->desc
->cyclic
) {
850 dev_err(chan2dev(chan
),
851 "Request not allowed when dma in cyclic mode\n");
855 if (!buf_len
|| !period_len
|| period_len
> STM32_MDMA_MAX_BLOCK_LEN
) {
856 dev_err(chan2dev(chan
), "Invalid buffer/period len\n");
860 if (buf_len
% period_len
) {
861 dev_err(chan2dev(chan
), "buf_len not multiple of period_len\n");
865 count
= buf_len
/ period_len
;
867 desc
= stm32_mdma_alloc_desc(chan
, count
);
872 if (direction
== DMA_MEM_TO_DEV
) {
874 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
, &ctcr
,
875 &ctbr
, src_addr
, period_len
);
876 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
,
880 ret
= stm32_mdma_set_xfer_param(chan
, direction
, &ccr
, &ctcr
,
881 &ctbr
, dst_addr
, period_len
);
882 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
,
889 /* Enable interrupts */
890 ccr
&= ~STM32_MDMA_CCR_IRQ_MASK
;
891 ccr
|= STM32_MDMA_CCR_TEIE
| STM32_MDMA_CCR_CTCIE
| STM32_MDMA_CCR_BTIE
;
894 /* Configure hwdesc list */
895 for (i
= 0; i
< count
; i
++) {
896 if (direction
== DMA_MEM_TO_DEV
) {
897 src_addr
= buf_addr
+ i
* period_len
;
898 dst_addr
= dma_config
->dst_addr
;
900 src_addr
= dma_config
->src_addr
;
901 dst_addr
= buf_addr
+ i
* period_len
;
904 stm32_mdma_setup_hwdesc(chan
, desc
, direction
, i
, src_addr
,
905 dst_addr
, period_len
, ctcr
, ctbr
,
906 i
== count
- 1, i
== 0, true);
911 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
914 for (i
= 0; i
< desc
->count
; i
++)
915 dma_pool_free(chan
->desc_pool
, desc
->node
[i
].hwdesc
,
916 desc
->node
[i
].hwdesc_phys
);
921 static struct dma_async_tx_descriptor
*
922 stm32_mdma_prep_dma_memcpy(struct dma_chan
*c
, dma_addr_t dest
, dma_addr_t src
,
923 size_t len
, unsigned long flags
)
925 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
926 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
927 enum dma_slave_buswidth max_width
;
928 struct stm32_mdma_desc
*desc
;
929 struct stm32_mdma_hwdesc
*hwdesc
;
930 u32 ccr
, ctcr
, ctbr
, cbndtr
, count
, max_burst
, mdma_burst
;
931 u32 best_burst
, tlen
;
932 size_t xfer_count
, offset
;
933 int src_bus_width
, dst_bus_width
;
937 * Once DMA is in setup cyclic mode the channel we cannot assign this
938 * channel anymore. The DMA channel needs to be aborted or terminated
939 * to allow another request
941 if (chan
->desc
&& chan
->desc
->cyclic
) {
942 dev_err(chan2dev(chan
),
943 "Request not allowed when dma in cyclic mode\n");
947 count
= DIV_ROUND_UP(len
, STM32_MDMA_MAX_BLOCK_LEN
);
948 desc
= stm32_mdma_alloc_desc(chan
, count
);
952 ccr
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
));
953 ctcr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
));
954 ctbr
= stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
));
955 cbndtr
= stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
));
957 /* Enable sw req, some interrupts and clear other bits */
958 ccr
&= ~(STM32_MDMA_CCR_WEX
| STM32_MDMA_CCR_HEX
|
959 STM32_MDMA_CCR_BEX
| STM32_MDMA_CCR_PL_MASK
|
960 STM32_MDMA_CCR_IRQ_MASK
);
961 ccr
|= STM32_MDMA_CCR_TEIE
;
963 /* Enable SW request mode, dest/src inc and clear other bits */
964 ctcr
&= ~(STM32_MDMA_CTCR_BWM
| STM32_MDMA_CTCR_TRGM_MSK
|
965 STM32_MDMA_CTCR_PAM_MASK
| STM32_MDMA_CTCR_PKE
|
966 STM32_MDMA_CTCR_TLEN_MSK
| STM32_MDMA_CTCR_DBURST_MASK
|
967 STM32_MDMA_CTCR_SBURST_MASK
| STM32_MDMA_CTCR_DINCOS_MASK
|
968 STM32_MDMA_CTCR_SINCOS_MASK
| STM32_MDMA_CTCR_DSIZE_MASK
|
969 STM32_MDMA_CTCR_SSIZE_MASK
| STM32_MDMA_CTCR_DINC_MASK
|
970 STM32_MDMA_CTCR_SINC_MASK
);
971 ctcr
|= STM32_MDMA_CTCR_SWRM
| STM32_MDMA_CTCR_SINC(STM32_MDMA_INC
) |
972 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC
);
974 /* Reset HW request */
975 ctbr
&= ~STM32_MDMA_CTBR_TSEL_MASK
;
978 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_SBUS
, src
);
979 stm32_mdma_set_bus(dmadev
, &ctbr
, STM32_MDMA_CTBR_DBUS
, dest
);
981 /* Clear CBNDTR registers */
982 cbndtr
&= ~(STM32_MDMA_CBNDTR_BRC_MK
| STM32_MDMA_CBNDTR_BRDUM
|
983 STM32_MDMA_CBNDTR_BRSUM
| STM32_MDMA_CBNDTR_BNDT_MASK
);
985 if (len
<= STM32_MDMA_MAX_BLOCK_LEN
) {
986 cbndtr
|= STM32_MDMA_CBNDTR_BNDT(len
);
987 if (len
<= STM32_MDMA_MAX_BUF_LEN
) {
988 /* Setup a buffer transfer */
989 ccr
|= STM32_MDMA_CCR_TCIE
| STM32_MDMA_CCR_CTCIE
;
990 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER
);
992 /* Setup a block transfer */
993 ccr
|= STM32_MDMA_CCR_BTIE
| STM32_MDMA_CCR_CTCIE
;
994 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK
);
997 tlen
= STM32_MDMA_MAX_BUF_LEN
;
998 ctcr
|= STM32_MDMA_CTCR_TLEN((tlen
- 1));
1000 /* Set source best burst size */
1001 max_width
= stm32_mdma_get_max_width(src
, len
, tlen
);
1002 src_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1004 max_burst
= tlen
/ max_width
;
1005 best_burst
= stm32_mdma_get_best_burst(len
, tlen
, max_burst
,
1007 mdma_burst
= ilog2(best_burst
);
1009 ctcr
|= STM32_MDMA_CTCR_SBURST(mdma_burst
) |
1010 STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
1011 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
1013 /* Set destination best burst size */
1014 max_width
= stm32_mdma_get_max_width(dest
, len
, tlen
);
1015 dst_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1017 max_burst
= tlen
/ max_width
;
1018 best_burst
= stm32_mdma_get_best_burst(len
, tlen
, max_burst
,
1020 mdma_burst
= ilog2(best_burst
);
1022 ctcr
|= STM32_MDMA_CTCR_DBURST(mdma_burst
) |
1023 STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
1024 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
1026 if (dst_bus_width
!= src_bus_width
)
1027 ctcr
|= STM32_MDMA_CTCR_PKE
;
1029 /* Prepare hardware descriptor */
1030 hwdesc
= desc
->node
[0].hwdesc
;
1031 hwdesc
->ctcr
= ctcr
;
1032 hwdesc
->cbndtr
= cbndtr
;
1034 hwdesc
->cdar
= dest
;
1037 hwdesc
->ctbr
= ctbr
;
1041 stm32_mdma_dump_hwdesc(chan
, &desc
->node
[0]);
1043 /* Setup a LLI transfer */
1044 ctcr
|= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST
) |
1045 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN
- 1));
1046 ccr
|= STM32_MDMA_CCR_BTIE
| STM32_MDMA_CCR_CTCIE
;
1047 tlen
= STM32_MDMA_MAX_BUF_LEN
;
1049 for (i
= 0, offset
= 0; offset
< len
;
1050 i
++, offset
+= xfer_count
) {
1051 xfer_count
= min_t(size_t, len
- offset
,
1052 STM32_MDMA_MAX_BLOCK_LEN
);
1054 /* Set source best burst size */
1055 max_width
= stm32_mdma_get_max_width(src
, len
, tlen
);
1056 src_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1058 max_burst
= tlen
/ max_width
;
1059 best_burst
= stm32_mdma_get_best_burst(len
, tlen
,
1062 mdma_burst
= ilog2(best_burst
);
1064 ctcr
|= STM32_MDMA_CTCR_SBURST(mdma_burst
) |
1065 STM32_MDMA_CTCR_SSIZE(src_bus_width
) |
1066 STM32_MDMA_CTCR_SINCOS(src_bus_width
);
1068 /* Set destination best burst size */
1069 max_width
= stm32_mdma_get_max_width(dest
, len
, tlen
);
1070 dst_bus_width
= stm32_mdma_get_width(chan
, max_width
);
1072 max_burst
= tlen
/ max_width
;
1073 best_burst
= stm32_mdma_get_best_burst(len
, tlen
,
1076 mdma_burst
= ilog2(best_burst
);
1078 ctcr
|= STM32_MDMA_CTCR_DBURST(mdma_burst
) |
1079 STM32_MDMA_CTCR_DSIZE(dst_bus_width
) |
1080 STM32_MDMA_CTCR_DINCOS(dst_bus_width
);
1082 if (dst_bus_width
!= src_bus_width
)
1083 ctcr
|= STM32_MDMA_CTCR_PKE
;
1085 /* Prepare hardware descriptor */
1086 stm32_mdma_setup_hwdesc(chan
, desc
, DMA_MEM_TO_MEM
, i
,
1087 src
+ offset
, dest
+ offset
,
1088 xfer_count
, ctcr
, ctbr
,
1089 i
== count
- 1, i
== 0, false);
1095 desc
->cyclic
= false;
1097 return vchan_tx_prep(&chan
->vchan
, &desc
->vdesc
, flags
);
1100 static void stm32_mdma_dump_reg(struct stm32_mdma_chan
*chan
)
1102 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1104 dev_dbg(chan2dev(chan
), "CCR: 0x%08x\n",
1105 stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
)));
1106 dev_dbg(chan2dev(chan
), "CTCR: 0x%08x\n",
1107 stm32_mdma_read(dmadev
, STM32_MDMA_CTCR(chan
->id
)));
1108 dev_dbg(chan2dev(chan
), "CBNDTR: 0x%08x\n",
1109 stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
)));
1110 dev_dbg(chan2dev(chan
), "CSAR: 0x%08x\n",
1111 stm32_mdma_read(dmadev
, STM32_MDMA_CSAR(chan
->id
)));
1112 dev_dbg(chan2dev(chan
), "CDAR: 0x%08x\n",
1113 stm32_mdma_read(dmadev
, STM32_MDMA_CDAR(chan
->id
)));
1114 dev_dbg(chan2dev(chan
), "CBRUR: 0x%08x\n",
1115 stm32_mdma_read(dmadev
, STM32_MDMA_CBRUR(chan
->id
)));
1116 dev_dbg(chan2dev(chan
), "CLAR: 0x%08x\n",
1117 stm32_mdma_read(dmadev
, STM32_MDMA_CLAR(chan
->id
)));
1118 dev_dbg(chan2dev(chan
), "CTBR: 0x%08x\n",
1119 stm32_mdma_read(dmadev
, STM32_MDMA_CTBR(chan
->id
)));
1120 dev_dbg(chan2dev(chan
), "CMAR: 0x%08x\n",
1121 stm32_mdma_read(dmadev
, STM32_MDMA_CMAR(chan
->id
)));
1122 dev_dbg(chan2dev(chan
), "CMDR: 0x%08x\n",
1123 stm32_mdma_read(dmadev
, STM32_MDMA_CMDR(chan
->id
)));
1126 static void stm32_mdma_start_transfer(struct stm32_mdma_chan
*chan
)
1128 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1129 struct virt_dma_desc
*vdesc
;
1130 struct stm32_mdma_hwdesc
*hwdesc
;
1134 vdesc
= vchan_next_desc(&chan
->vchan
);
1140 chan
->desc
= to_stm32_mdma_desc(vdesc
);
1141 hwdesc
= chan
->desc
->node
[0].hwdesc
;
1142 chan
->curr_hwdesc
= 0;
1144 stm32_mdma_write(dmadev
, STM32_MDMA_CCR(id
), chan
->desc
->ccr
);
1145 stm32_mdma_write(dmadev
, STM32_MDMA_CTCR(id
), hwdesc
->ctcr
);
1146 stm32_mdma_write(dmadev
, STM32_MDMA_CBNDTR(id
), hwdesc
->cbndtr
);
1147 stm32_mdma_write(dmadev
, STM32_MDMA_CSAR(id
), hwdesc
->csar
);
1148 stm32_mdma_write(dmadev
, STM32_MDMA_CDAR(id
), hwdesc
->cdar
);
1149 stm32_mdma_write(dmadev
, STM32_MDMA_CBRUR(id
), hwdesc
->cbrur
);
1150 stm32_mdma_write(dmadev
, STM32_MDMA_CLAR(id
), hwdesc
->clar
);
1151 stm32_mdma_write(dmadev
, STM32_MDMA_CTBR(id
), hwdesc
->ctbr
);
1152 stm32_mdma_write(dmadev
, STM32_MDMA_CMAR(id
), hwdesc
->cmar
);
1153 stm32_mdma_write(dmadev
, STM32_MDMA_CMDR(id
), hwdesc
->cmdr
);
1155 /* Clear interrupt status if it is there */
1156 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(id
));
1158 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(id
), status
);
1160 stm32_mdma_dump_reg(chan
);
1163 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CCR(id
), STM32_MDMA_CCR_EN
);
1165 /* Set SW request in case of MEM2MEM transfer */
1166 if (hwdesc
->ctcr
& STM32_MDMA_CTCR_SWRM
) {
1167 reg
= STM32_MDMA_CCR(id
);
1168 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_SWRQ
);
1173 dev_dbg(chan2dev(chan
), "vchan %pK: started\n", &chan
->vchan
);
1176 static void stm32_mdma_issue_pending(struct dma_chan
*c
)
1178 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1179 unsigned long flags
;
1181 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1183 if (!vchan_issue_pending(&chan
->vchan
))
1186 dev_dbg(chan2dev(chan
), "vchan %pK: issued\n", &chan
->vchan
);
1188 if (!chan
->desc
&& !chan
->busy
)
1189 stm32_mdma_start_transfer(chan
);
1192 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1195 static int stm32_mdma_pause(struct dma_chan
*c
)
1197 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1198 unsigned long flags
;
1201 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1202 ret
= stm32_mdma_disable_chan(chan
);
1203 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1206 dev_dbg(chan2dev(chan
), "vchan %pK: pause\n", &chan
->vchan
);
1211 static int stm32_mdma_resume(struct dma_chan
*c
)
1213 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1214 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1215 struct stm32_mdma_hwdesc
*hwdesc
;
1216 unsigned long flags
;
1219 hwdesc
= chan
->desc
->node
[chan
->curr_hwdesc
].hwdesc
;
1221 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1223 /* Re-configure control register */
1224 stm32_mdma_write(dmadev
, STM32_MDMA_CCR(chan
->id
), chan
->desc
->ccr
);
1226 /* Clear interrupt status if it is there */
1227 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
1229 stm32_mdma_set_bits(dmadev
, STM32_MDMA_CIFCR(chan
->id
), status
);
1231 stm32_mdma_dump_reg(chan
);
1234 reg
= STM32_MDMA_CCR(chan
->id
);
1235 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_EN
);
1237 /* Set SW request in case of MEM2MEM transfer */
1238 if (hwdesc
->ctcr
& STM32_MDMA_CTCR_SWRM
)
1239 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CCR_SWRQ
);
1241 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1243 dev_dbg(chan2dev(chan
), "vchan %pK: resume\n", &chan
->vchan
);
1248 static int stm32_mdma_terminate_all(struct dma_chan
*c
)
1250 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1251 unsigned long flags
;
1254 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1256 stm32_mdma_stop(chan
);
1259 vchan_get_all_descriptors(&chan
->vchan
, &head
);
1260 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1262 vchan_dma_desc_free_list(&chan
->vchan
, &head
);
1267 static void stm32_mdma_synchronize(struct dma_chan
*c
)
1269 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1271 vchan_synchronize(&chan
->vchan
);
1274 static int stm32_mdma_slave_config(struct dma_chan
*c
,
1275 struct dma_slave_config
*config
)
1277 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1279 memcpy(&chan
->dma_config
, config
, sizeof(*config
));
1284 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan
*chan
,
1285 struct stm32_mdma_desc
*desc
,
1288 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1289 struct stm32_mdma_hwdesc
*hwdesc
= desc
->node
[0].hwdesc
;
1290 u32 cbndtr
, residue
, modulo
, burst_size
;
1294 for (i
= curr_hwdesc
+ 1; i
< desc
->count
; i
++) {
1295 hwdesc
= desc
->node
[i
].hwdesc
;
1296 residue
+= STM32_MDMA_CBNDTR_BNDT(hwdesc
->cbndtr
);
1298 cbndtr
= stm32_mdma_read(dmadev
, STM32_MDMA_CBNDTR(chan
->id
));
1299 residue
+= cbndtr
& STM32_MDMA_CBNDTR_BNDT_MASK
;
1301 if (!chan
->mem_burst
)
1304 burst_size
= chan
->mem_burst
* chan
->mem_width
;
1305 modulo
= residue
% burst_size
;
1307 residue
= residue
- modulo
+ burst_size
;
1312 static enum dma_status
stm32_mdma_tx_status(struct dma_chan
*c
,
1313 dma_cookie_t cookie
,
1314 struct dma_tx_state
*state
)
1316 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1317 struct virt_dma_desc
*vdesc
;
1318 enum dma_status status
;
1319 unsigned long flags
;
1322 status
= dma_cookie_status(c
, cookie
, state
);
1323 if ((status
== DMA_COMPLETE
) || (!state
))
1326 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1328 vdesc
= vchan_find_desc(&chan
->vchan
, cookie
);
1329 if (chan
->desc
&& cookie
== chan
->desc
->vdesc
.tx
.cookie
)
1330 residue
= stm32_mdma_desc_residue(chan
, chan
->desc
,
1333 residue
= stm32_mdma_desc_residue(chan
,
1334 to_stm32_mdma_desc(vdesc
), 0);
1335 dma_set_residue(state
, residue
);
1337 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1342 static void stm32_mdma_xfer_end(struct stm32_mdma_chan
*chan
)
1344 list_del(&chan
->desc
->vdesc
.node
);
1345 vchan_cookie_complete(&chan
->desc
->vdesc
);
1349 /* Start the next transfer if this driver has a next desc */
1350 stm32_mdma_start_transfer(chan
);
1353 static irqreturn_t
stm32_mdma_irq_handler(int irq
, void *devid
)
1355 struct stm32_mdma_device
*dmadev
= devid
;
1356 struct stm32_mdma_chan
*chan
= devid
;
1357 u32 reg
, id
, ien
, status
, flag
;
1359 /* Find out which channel generates the interrupt */
1360 status
= readl_relaxed(dmadev
->base
+ STM32_MDMA_GISR0
);
1364 status
= readl_relaxed(dmadev
->base
+ STM32_MDMA_GISR1
);
1366 dev_dbg(mdma2dev(dmadev
), "spurious it\n");
1371 * As GISR0 provides status for channel id from 0 to 31,
1372 * so GISR1 provides status for channel id from 32 to 62
1377 chan
= &dmadev
->chan
[id
];
1379 dev_dbg(mdma2dev(dmadev
), "MDMA channel not initialized\n");
1383 /* Handle interrupt for the channel */
1384 spin_lock(&chan
->vchan
.lock
);
1385 status
= stm32_mdma_read(dmadev
, STM32_MDMA_CISR(chan
->id
));
1386 ien
= stm32_mdma_read(dmadev
, STM32_MDMA_CCR(chan
->id
));
1387 ien
&= STM32_MDMA_CCR_IRQ_MASK
;
1390 if (!(status
& ien
)) {
1391 spin_unlock(&chan
->vchan
.lock
);
1392 dev_dbg(chan2dev(chan
),
1393 "spurious it (status=0x%04x, ien=0x%04x)\n",
1398 flag
= __ffs(status
& ien
);
1399 reg
= STM32_MDMA_CIFCR(chan
->id
);
1401 switch (1 << flag
) {
1402 case STM32_MDMA_CISR_TEIF
:
1404 status
= readl_relaxed(dmadev
->base
+ STM32_MDMA_CESR(id
));
1405 dev_err(chan2dev(chan
), "Transfer Err: stat=0x%08x\n", status
);
1406 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CTEIF
);
1409 case STM32_MDMA_CISR_CTCIF
:
1410 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CCTCIF
);
1411 stm32_mdma_xfer_end(chan
);
1414 case STM32_MDMA_CISR_BRTIF
:
1415 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CBRTIF
);
1418 case STM32_MDMA_CISR_BTIF
:
1419 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CBTIF
);
1420 chan
->curr_hwdesc
++;
1421 if (chan
->desc
&& chan
->desc
->cyclic
) {
1422 if (chan
->curr_hwdesc
== chan
->desc
->count
)
1423 chan
->curr_hwdesc
= 0;
1424 vchan_cyclic_callback(&chan
->desc
->vdesc
);
1428 case STM32_MDMA_CISR_TCIF
:
1429 stm32_mdma_set_bits(dmadev
, reg
, STM32_MDMA_CIFCR_CLTCIF
);
1433 dev_err(chan2dev(chan
), "it %d unhandled (status=0x%04x)\n",
1437 spin_unlock(&chan
->vchan
.lock
);
1443 static int stm32_mdma_alloc_chan_resources(struct dma_chan
*c
)
1445 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1446 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1449 chan
->desc_pool
= dmam_pool_create(dev_name(&c
->dev
->device
),
1451 sizeof(struct stm32_mdma_hwdesc
),
1452 __alignof__(struct stm32_mdma_hwdesc
),
1454 if (!chan
->desc_pool
) {
1455 dev_err(chan2dev(chan
), "failed to allocate descriptor pool\n");
1459 ret
= clk_prepare_enable(dmadev
->clk
);
1461 dev_err(chan2dev(chan
), "clk_prepare_enable failed: %d\n", ret
);
1465 ret
= stm32_mdma_disable_chan(chan
);
1467 clk_disable_unprepare(dmadev
->clk
);
1472 static void stm32_mdma_free_chan_resources(struct dma_chan
*c
)
1474 struct stm32_mdma_chan
*chan
= to_stm32_mdma_chan(c
);
1475 struct stm32_mdma_device
*dmadev
= stm32_mdma_get_dev(chan
);
1476 unsigned long flags
;
1478 dev_dbg(chan2dev(chan
), "Freeing channel %d\n", chan
->id
);
1481 spin_lock_irqsave(&chan
->vchan
.lock
, flags
);
1482 stm32_mdma_stop(chan
);
1484 spin_unlock_irqrestore(&chan
->vchan
.lock
, flags
);
1487 clk_disable_unprepare(dmadev
->clk
);
1488 vchan_free_chan_resources(to_virt_chan(c
));
1489 dmam_pool_destroy(chan
->desc_pool
);
1490 chan
->desc_pool
= NULL
;
1493 static struct dma_chan
*stm32_mdma_of_xlate(struct of_phandle_args
*dma_spec
,
1494 struct of_dma
*ofdma
)
1496 struct stm32_mdma_device
*dmadev
= ofdma
->of_dma_data
;
1497 struct stm32_mdma_chan
*chan
;
1499 struct stm32_mdma_chan_config config
;
1501 if (dma_spec
->args_count
< 5) {
1502 dev_err(mdma2dev(dmadev
), "Bad number of args\n");
1506 config
.request
= dma_spec
->args
[0];
1507 config
.priority_level
= dma_spec
->args
[1];
1508 config
.transfer_config
= dma_spec
->args
[2];
1509 config
.mask_addr
= dma_spec
->args
[3];
1510 config
.mask_data
= dma_spec
->args
[4];
1512 if (config
.request
>= dmadev
->nr_requests
) {
1513 dev_err(mdma2dev(dmadev
), "Bad request line\n");
1517 if (config
.priority_level
> STM32_MDMA_VERY_HIGH_PRIORITY
) {
1518 dev_err(mdma2dev(dmadev
), "Priority level not supported\n");
1522 c
= dma_get_any_slave_channel(&dmadev
->ddev
);
1524 dev_err(mdma2dev(dmadev
), "No more channels available\n");
1528 chan
= to_stm32_mdma_chan(c
);
1529 chan
->chan_config
= config
;
1534 static const struct of_device_id stm32_mdma_of_match
[] = {
1535 { .compatible
= "st,stm32h7-mdma", },
1538 MODULE_DEVICE_TABLE(of
, stm32_mdma_of_match
);
1540 static int stm32_mdma_probe(struct platform_device
*pdev
)
1542 struct stm32_mdma_chan
*chan
;
1543 struct stm32_mdma_device
*dmadev
;
1544 struct dma_device
*dd
;
1545 struct device_node
*of_node
;
1546 struct resource
*res
;
1547 u32 nr_channels
, nr_requests
;
1550 of_node
= pdev
->dev
.of_node
;
1554 ret
= device_property_read_u32(&pdev
->dev
, "dma-channels",
1557 nr_channels
= STM32_MDMA_MAX_CHANNELS
;
1558 dev_warn(&pdev
->dev
, "MDMA defaulting on %i channels\n",
1562 ret
= device_property_read_u32(&pdev
->dev
, "dma-requests",
1565 nr_requests
= STM32_MDMA_MAX_REQUESTS
;
1566 dev_warn(&pdev
->dev
, "MDMA defaulting on %i request lines\n",
1570 count
= device_property_read_u32_array(&pdev
->dev
, "st,ahb-addr-masks",
1575 dmadev
= devm_kzalloc(&pdev
->dev
, sizeof(*dmadev
) + sizeof(u32
) * count
,
1580 dmadev
->nr_channels
= nr_channels
;
1581 dmadev
->nr_requests
= nr_requests
;
1582 device_property_read_u32_array(&pdev
->dev
, "st,ahb-addr-masks",
1583 dmadev
->ahb_addr_masks
,
1585 dmadev
->nr_ahb_addr_masks
= count
;
1587 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1588 dmadev
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
1589 if (IS_ERR(dmadev
->base
))
1590 return PTR_ERR(dmadev
->base
);
1592 dmadev
->clk
= devm_clk_get(&pdev
->dev
, NULL
);
1593 if (IS_ERR(dmadev
->clk
)) {
1594 ret
= PTR_ERR(dmadev
->clk
);
1595 if (ret
== -EPROBE_DEFER
)
1596 dev_info(&pdev
->dev
, "Missing controller clock\n");
1600 dmadev
->rst
= devm_reset_control_get(&pdev
->dev
, NULL
);
1601 if (!IS_ERR(dmadev
->rst
)) {
1602 reset_control_assert(dmadev
->rst
);
1604 reset_control_deassert(dmadev
->rst
);
1608 dma_cap_set(DMA_SLAVE
, dd
->cap_mask
);
1609 dma_cap_set(DMA_PRIVATE
, dd
->cap_mask
);
1610 dma_cap_set(DMA_CYCLIC
, dd
->cap_mask
);
1611 dma_cap_set(DMA_MEMCPY
, dd
->cap_mask
);
1612 dd
->device_alloc_chan_resources
= stm32_mdma_alloc_chan_resources
;
1613 dd
->device_free_chan_resources
= stm32_mdma_free_chan_resources
;
1614 dd
->device_tx_status
= stm32_mdma_tx_status
;
1615 dd
->device_issue_pending
= stm32_mdma_issue_pending
;
1616 dd
->device_prep_slave_sg
= stm32_mdma_prep_slave_sg
;
1617 dd
->device_prep_dma_cyclic
= stm32_mdma_prep_dma_cyclic
;
1618 dd
->device_prep_dma_memcpy
= stm32_mdma_prep_dma_memcpy
;
1619 dd
->device_config
= stm32_mdma_slave_config
;
1620 dd
->device_pause
= stm32_mdma_pause
;
1621 dd
->device_resume
= stm32_mdma_resume
;
1622 dd
->device_terminate_all
= stm32_mdma_terminate_all
;
1623 dd
->device_synchronize
= stm32_mdma_synchronize
;
1624 dd
->src_addr_widths
= BIT(DMA_SLAVE_BUSWIDTH_1_BYTE
) |
1625 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES
) |
1626 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES
) |
1627 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES
);
1628 dd
->dst_addr_widths
= BIT(DMA_SLAVE_BUSWIDTH_1_BYTE
) |
1629 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES
) |
1630 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES
) |
1631 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES
);
1632 dd
->directions
= BIT(DMA_DEV_TO_MEM
) | BIT(DMA_MEM_TO_DEV
) |
1633 BIT(DMA_MEM_TO_MEM
);
1634 dd
->residue_granularity
= DMA_RESIDUE_GRANULARITY_BURST
;
1635 dd
->max_burst
= STM32_MDMA_MAX_BURST
;
1636 dd
->dev
= &pdev
->dev
;
1637 INIT_LIST_HEAD(&dd
->channels
);
1639 for (i
= 0; i
< dmadev
->nr_channels
; i
++) {
1640 chan
= &dmadev
->chan
[i
];
1642 chan
->vchan
.desc_free
= stm32_mdma_desc_free
;
1643 vchan_init(&chan
->vchan
, dd
);
1646 dmadev
->irq
= platform_get_irq(pdev
, 0);
1647 if (dmadev
->irq
< 0) {
1648 dev_err(&pdev
->dev
, "failed to get IRQ\n");
1652 ret
= devm_request_irq(&pdev
->dev
, dmadev
->irq
, stm32_mdma_irq_handler
,
1653 0, dev_name(&pdev
->dev
), dmadev
);
1655 dev_err(&pdev
->dev
, "failed to request IRQ\n");
1659 ret
= dma_async_device_register(dd
);
1663 ret
= of_dma_controller_register(of_node
, stm32_mdma_of_xlate
, dmadev
);
1666 "STM32 MDMA DMA OF registration failed %d\n", ret
);
1667 goto err_unregister
;
1670 platform_set_drvdata(pdev
, dmadev
);
1672 dev_info(&pdev
->dev
, "STM32 MDMA driver registered\n");
1677 dma_async_device_unregister(dd
);
1682 static struct platform_driver stm32_mdma_driver
= {
1683 .probe
= stm32_mdma_probe
,
1685 .name
= "stm32-mdma",
1686 .of_match_table
= stm32_mdma_of_match
,
1690 static int __init
stm32_mdma_init(void)
1692 return platform_driver_register(&stm32_mdma_driver
);
1695 subsys_initcall(stm32_mdma_init
);
1697 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1698 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1699 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1700 MODULE_LICENSE("GPL v2");