2 * Copyright (c) 2014-2016 Jared D. McNeill <jmcneill@invisible.ca>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
15 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
16 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
17 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
18 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
19 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
21 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
22 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 * Allwinner A10/A20 DMA controller
32 #include <sys/param.h>
33 #include <sys/systm.h>
36 #include <sys/condvar.h>
37 #include <sys/kernel.h>
39 #include <sys/module.h>
40 #include <sys/mutex.h>
42 #include <machine/bus.h>
44 #include <dev/ofw/ofw_bus.h>
45 #include <dev/ofw/ofw_bus_subr.h>
47 #include <arm/allwinner/a10_dmac.h>
48 #include <dev/clk/clk.h>
50 #include "sunxi_dma_if.h"
52 #define NDMA_CHANNELS 8
53 #define DDMA_CHANNELS 8
62 struct a10dmac_channel
{
63 struct a10dmac_softc
* ch_sc
;
65 enum a10dmac_type ch_type
;
66 void (*ch_callback
)(void *);
67 void * ch_callbackarg
;
71 struct a10dmac_softc
{
72 struct resource
* sc_res
[2];
76 struct a10dmac_channel sc_ndma_channels
[NDMA_CHANNELS
];
77 struct a10dmac_channel sc_ddma_channels
[DDMA_CHANNELS
];
80 static struct resource_spec a10dmac_spec
[] = {
81 { SYS_RES_MEMORY
, 0, RF_ACTIVE
},
82 { SYS_RES_IRQ
, 0, RF_ACTIVE
},
86 #define DMA_READ(sc, reg) bus_read_4((sc)->sc_res[0], (reg))
87 #define DMA_WRITE(sc, reg, val) bus_write_4((sc)->sc_res[0], (reg), (val))
88 #define DMACH_READ(ch, reg) \
89 DMA_READ((ch)->ch_sc, (reg) + (ch)->ch_regoff)
90 #define DMACH_WRITE(ch, reg, val) \
91 DMA_WRITE((ch)->ch_sc, (reg) + (ch)->ch_regoff, (val))
93 static void a10dmac_intr(void *);
96 a10dmac_probe(device_t dev
)
98 if (!ofw_bus_status_okay(dev
))
101 if (!ofw_bus_is_compatible(dev
, "allwinner,sun4i-a10-dma"))
104 device_set_desc(dev
, "Allwinner DMA controller");
105 return (BUS_PROBE_DEFAULT
);
109 a10dmac_attach(device_t dev
)
111 struct a10dmac_softc
*sc
;
116 sc
= device_get_softc(dev
);
118 if (bus_alloc_resources(dev
, a10dmac_spec
, sc
->sc_res
)) {
119 device_printf(dev
, "cannot allocate resources for device\n");
123 mtx_init(&sc
->sc_mtx
, "a10 dmac", NULL
, MTX_SPIN
);
125 /* Activate DMA controller clock */
126 error
= clk_get_by_ofw_index(dev
, 0, 0, &clk
);
128 device_printf(dev
, "cannot get clock\n");
131 error
= clk_enable(clk
);
133 device_printf(dev
, "cannot enable clock\n");
137 /* Disable all interrupts and clear pending status */
138 DMA_WRITE(sc
, AWIN_DMA_IRQ_EN_REG
, 0);
139 DMA_WRITE(sc
, AWIN_DMA_IRQ_PEND_STA_REG
, ~0);
141 /* Initialize channels */
142 for (index
= 0; index
< NDMA_CHANNELS
; index
++) {
143 sc
->sc_ndma_channels
[index
].ch_sc
= sc
;
144 sc
->sc_ndma_channels
[index
].ch_index
= index
;
145 sc
->sc_ndma_channels
[index
].ch_type
= CH_NDMA
;
146 sc
->sc_ndma_channels
[index
].ch_callback
= NULL
;
147 sc
->sc_ndma_channels
[index
].ch_callbackarg
= NULL
;
148 sc
->sc_ndma_channels
[index
].ch_regoff
= AWIN_NDMA_REG(index
);
149 DMACH_WRITE(&sc
->sc_ndma_channels
[index
], AWIN_NDMA_CTL_REG
, 0);
151 for (index
= 0; index
< DDMA_CHANNELS
; index
++) {
152 sc
->sc_ddma_channels
[index
].ch_sc
= sc
;
153 sc
->sc_ddma_channels
[index
].ch_index
= index
;
154 sc
->sc_ddma_channels
[index
].ch_type
= CH_DDMA
;
155 sc
->sc_ddma_channels
[index
].ch_callback
= NULL
;
156 sc
->sc_ddma_channels
[index
].ch_callbackarg
= NULL
;
157 sc
->sc_ddma_channels
[index
].ch_regoff
= AWIN_DDMA_REG(index
);
158 DMACH_WRITE(&sc
->sc_ddma_channels
[index
], AWIN_DDMA_CTL_REG
, 0);
161 error
= bus_setup_intr(dev
, sc
->sc_res
[1], INTR_MPSAFE
| INTR_TYPE_MISC
,
162 NULL
, a10dmac_intr
, sc
, &sc
->sc_ih
);
164 device_printf(dev
, "could not setup interrupt handler\n");
165 bus_release_resources(dev
, a10dmac_spec
, sc
->sc_res
);
166 mtx_destroy(&sc
->sc_mtx
);
170 OF_device_register_xref(OF_xref_from_node(ofw_bus_get_node(dev
)), dev
);
175 a10dmac_intr(void *priv
)
177 struct a10dmac_softc
*sc
= priv
;
178 uint32_t sta
, bit
, mask
;
181 sta
= DMA_READ(sc
, AWIN_DMA_IRQ_PEND_STA_REG
);
182 DMA_WRITE(sc
, AWIN_DMA_IRQ_PEND_STA_REG
, sta
);
184 while ((bit
= ffs(sta
& AWIN_DMA_IRQ_END_MASK
)) != 0) {
185 mask
= (1U << (bit
- 1));
188 * Map status bit to channel number. The status register is
189 * encoded with two bits of status per channel (lowest bit
190 * is half transfer pending, highest bit is end transfer
191 * pending). The 8 normal DMA channel status are in the lower
192 * 16 bits and the 8 dedicated DMA channel status are in
193 * the upper 16 bits. The output is a channel number from 0-7.
195 index
= ((bit
- 1) / 2) & 7;
196 if (mask
& AWIN_DMA_IRQ_NDMA
) {
197 if (sc
->sc_ndma_channels
[index
].ch_callback
== NULL
)
199 sc
->sc_ndma_channels
[index
].ch_callback(
200 sc
->sc_ndma_channels
[index
].ch_callbackarg
);
202 if (sc
->sc_ddma_channels
[index
].ch_callback
== NULL
)
204 sc
->sc_ddma_channels
[index
].ch_callback(
205 sc
->sc_ddma_channels
[index
].ch_callbackarg
);
211 a10dmac_read_ctl(struct a10dmac_channel
*ch
)
213 if (ch
->ch_type
== CH_NDMA
) {
214 return (DMACH_READ(ch
, AWIN_NDMA_CTL_REG
));
216 return (DMACH_READ(ch
, AWIN_DDMA_CTL_REG
));
221 a10dmac_write_ctl(struct a10dmac_channel
*ch
, uint32_t val
)
223 if (ch
->ch_type
== CH_NDMA
) {
224 DMACH_WRITE(ch
, AWIN_NDMA_CTL_REG
, val
);
226 DMACH_WRITE(ch
, AWIN_DDMA_CTL_REG
, val
);
231 a10dmac_set_config(device_t dev
, void *priv
, const struct sunxi_dma_config
*cfg
)
233 struct a10dmac_channel
*ch
= priv
;
235 unsigned int dst_dw
, dst_bl
, dst_bs
, dst_wc
, dst_am
;
236 unsigned int src_dw
, src_bl
, src_bs
, src_wc
, src_am
;
238 switch (cfg
->dst_width
) {
240 dst_dw
= AWIN_DMA_CTL_DATA_WIDTH_8
;
243 dst_dw
= AWIN_DMA_CTL_DATA_WIDTH_16
;
246 dst_dw
= AWIN_DMA_CTL_DATA_WIDTH_32
;
251 switch (cfg
->dst_burst_len
) {
253 dst_bl
= AWIN_DMA_CTL_BURST_LEN_1
;
256 dst_bl
= AWIN_DMA_CTL_BURST_LEN_4
;
259 dst_bl
= AWIN_DMA_CTL_BURST_LEN_8
;
264 switch (cfg
->src_width
) {
266 src_dw
= AWIN_DMA_CTL_DATA_WIDTH_8
;
269 src_dw
= AWIN_DMA_CTL_DATA_WIDTH_16
;
272 src_dw
= AWIN_DMA_CTL_DATA_WIDTH_32
;
277 switch (cfg
->src_burst_len
) {
279 src_bl
= AWIN_DMA_CTL_BURST_LEN_1
;
282 src_bl
= AWIN_DMA_CTL_BURST_LEN_4
;
285 src_bl
= AWIN_DMA_CTL_BURST_LEN_8
;
291 val
= (dst_dw
<< AWIN_DMA_CTL_DST_DATA_WIDTH_SHIFT
) |
292 (dst_bl
<< AWIN_DMA_CTL_DST_BURST_LEN_SHIFT
) |
293 (cfg
->dst_drqtype
<< AWIN_DMA_CTL_DST_DRQ_TYPE_SHIFT
) |
294 (src_dw
<< AWIN_DMA_CTL_SRC_DATA_WIDTH_SHIFT
) |
295 (src_bl
<< AWIN_DMA_CTL_SRC_BURST_LEN_SHIFT
) |
296 (cfg
->src_drqtype
<< AWIN_DMA_CTL_SRC_DRQ_TYPE_SHIFT
);
298 if (ch
->ch_type
== CH_NDMA
) {
300 val
|= AWIN_NDMA_CTL_DST_ADDR_NOINCR
;
302 val
|= AWIN_NDMA_CTL_SRC_ADDR_NOINCR
;
304 DMACH_WRITE(ch
, AWIN_NDMA_CTL_REG
, val
);
306 dst_am
= cfg
->dst_noincr
? AWIN_DDMA_CTL_DMA_ADDR_IO
:
307 AWIN_DDMA_CTL_DMA_ADDR_LINEAR
;
308 src_am
= cfg
->src_noincr
? AWIN_DDMA_CTL_DMA_ADDR_IO
:
309 AWIN_DDMA_CTL_DMA_ADDR_LINEAR
;
311 val
|= (dst_am
<< AWIN_DDMA_CTL_DST_ADDR_MODE_SHIFT
);
312 val
|= (src_am
<< AWIN_DDMA_CTL_SRC_ADDR_MODE_SHIFT
);
314 DMACH_WRITE(ch
, AWIN_DDMA_CTL_REG
, val
);
316 dst_bs
= cfg
->dst_blksize
- 1;
317 dst_wc
= cfg
->dst_wait_cyc
- 1;
318 src_bs
= cfg
->src_blksize
- 1;
319 src_wc
= cfg
->src_wait_cyc
- 1;
321 DMACH_WRITE(ch
, AWIN_DDMA_PARA_REG
,
322 (dst_bs
<< AWIN_DDMA_PARA_DST_DATA_BLK_SIZ_SHIFT
) |
323 (dst_wc
<< AWIN_DDMA_PARA_DST_WAIT_CYC_SHIFT
) |
324 (src_bs
<< AWIN_DDMA_PARA_SRC_DATA_BLK_SIZ_SHIFT
) |
325 (src_wc
<< AWIN_DDMA_PARA_SRC_WAIT_CYC_SHIFT
));
332 a10dmac_alloc(device_t dev
, bool dedicated
, void (*cb
)(void *), void *cbarg
)
334 struct a10dmac_softc
*sc
= device_get_softc(dev
);
335 struct a10dmac_channel
*ch_list
;
336 struct a10dmac_channel
*ch
= NULL
;
338 uint8_t ch_count
, index
;
341 ch_list
= sc
->sc_ddma_channels
;
342 ch_count
= DDMA_CHANNELS
;
344 ch_list
= sc
->sc_ndma_channels
;
345 ch_count
= NDMA_CHANNELS
;
348 mtx_lock_spin(&sc
->sc_mtx
);
349 for (index
= 0; index
< ch_count
; index
++) {
350 if (ch_list
[index
].ch_callback
== NULL
) {
351 ch
= &ch_list
[index
];
352 ch
->ch_callback
= cb
;
353 ch
->ch_callbackarg
= cbarg
;
355 irqen
= DMA_READ(sc
, AWIN_DMA_IRQ_EN_REG
);
356 if (ch
->ch_type
== CH_NDMA
)
357 irqen
|= AWIN_DMA_IRQ_NDMA_END(index
);
359 irqen
|= AWIN_DMA_IRQ_DDMA_END(index
);
360 DMA_WRITE(sc
, AWIN_DMA_IRQ_EN_REG
, irqen
);
365 mtx_unlock_spin(&sc
->sc_mtx
);
371 a10dmac_free(device_t dev
, void *priv
)
373 struct a10dmac_channel
*ch
= priv
;
374 struct a10dmac_softc
*sc
= ch
->ch_sc
;
375 uint32_t irqen
, sta
, cfg
;
377 mtx_lock_spin(&sc
->sc_mtx
);
379 irqen
= DMA_READ(sc
, AWIN_DMA_IRQ_EN_REG
);
380 cfg
= a10dmac_read_ctl(ch
);
381 if (ch
->ch_type
== CH_NDMA
) {
382 sta
= AWIN_DMA_IRQ_NDMA_END(ch
->ch_index
);
383 cfg
&= ~AWIN_NDMA_CTL_DMA_LOADING
;
385 sta
= AWIN_DMA_IRQ_DDMA_END(ch
->ch_index
);
386 cfg
&= ~AWIN_DDMA_CTL_DMA_LOADING
;
389 a10dmac_write_ctl(ch
, cfg
);
390 DMA_WRITE(sc
, AWIN_DMA_IRQ_EN_REG
, irqen
);
391 DMA_WRITE(sc
, AWIN_DMA_IRQ_PEND_STA_REG
, sta
);
393 ch
->ch_callback
= NULL
;
394 ch
->ch_callbackarg
= NULL
;
396 mtx_unlock_spin(&sc
->sc_mtx
);
400 a10dmac_transfer(device_t dev
, void *priv
, bus_addr_t src
, bus_addr_t dst
,
403 struct a10dmac_channel
*ch
= priv
;
406 cfg
= a10dmac_read_ctl(ch
);
407 if (ch
->ch_type
== CH_NDMA
) {
408 if (cfg
& AWIN_NDMA_CTL_DMA_LOADING
)
411 DMACH_WRITE(ch
, AWIN_NDMA_SRC_ADDR_REG
, src
);
412 DMACH_WRITE(ch
, AWIN_NDMA_DEST_ADDR_REG
, dst
);
413 DMACH_WRITE(ch
, AWIN_NDMA_BC_REG
, nbytes
);
415 cfg
|= AWIN_NDMA_CTL_DMA_LOADING
;
416 a10dmac_write_ctl(ch
, cfg
);
418 if (cfg
& AWIN_DDMA_CTL_DMA_LOADING
)
421 DMACH_WRITE(ch
, AWIN_DDMA_SRC_START_ADDR_REG
, src
);
422 DMACH_WRITE(ch
, AWIN_DDMA_DEST_START_ADDR_REG
, dst
);
423 DMACH_WRITE(ch
, AWIN_DDMA_BC_REG
, nbytes
);
425 cfg
|= AWIN_DDMA_CTL_DMA_LOADING
;
426 a10dmac_write_ctl(ch
, cfg
);
433 a10dmac_halt(device_t dev
, void *priv
)
435 struct a10dmac_channel
*ch
= priv
;
438 cfg
= a10dmac_read_ctl(ch
);
439 if (ch
->ch_type
== CH_NDMA
) {
440 cfg
&= ~AWIN_NDMA_CTL_DMA_LOADING
;
442 cfg
&= ~AWIN_DDMA_CTL_DMA_LOADING
;
444 a10dmac_write_ctl(ch
, cfg
);
447 static device_method_t a10dmac_methods
[] = {
448 /* Device interface */
449 DEVMETHOD(device_probe
, a10dmac_probe
),
450 DEVMETHOD(device_attach
, a10dmac_attach
),
452 /* sunxi DMA interface */
453 DEVMETHOD(sunxi_dma_alloc
, a10dmac_alloc
),
454 DEVMETHOD(sunxi_dma_free
, a10dmac_free
),
455 DEVMETHOD(sunxi_dma_set_config
, a10dmac_set_config
),
456 DEVMETHOD(sunxi_dma_transfer
, a10dmac_transfer
),
457 DEVMETHOD(sunxi_dma_halt
, a10dmac_halt
),
462 static driver_t a10dmac_driver
= {
465 sizeof(struct a10dmac_softc
)
468 DRIVER_MODULE(a10dmac
, simplebus
, a10dmac_driver
, 0, 0);