1 /**HEADER********************************************************************
3 * Copyright (c) 2009 Freescale Semiconductor;
6 ***************************************************************************
8 * THIS SOFTWARE IS PROVIDED BY FREESCALE "AS IS" AND ANY EXPRESSED OR
9 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
10 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
11 * IN NO EVENT SHALL FREESCALE OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
12 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
13 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
14 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
15 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
16 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
17 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
18 * THE POSSIBILITY OF SUCH DAMAGE.
20 **************************************************************************
28 * This file contains implementation of eDMA driver provided to other driver
30 *END************************************************************************/
39 #define DMA_BASE_PTRS {(DMA_MemMapPtr)0x40008000}
41 #if (MQX_CPU == PSP_CPU_MK70F120M)
42 #define DMAMUX_BASE_PTRS {(DMAMUX_MemMapPtr)0x40021000, (DMAMUX_MemMapPtr)0x40022000}
45 #if MQX_CPU == PSP_CPU_MK60D100M
46 #define DMAMUX_BASE_PTRS {(DMAMUX_MemMapPtr)0x40021000}
52 MQX_EDMA_STRUCT_PTR g_edma
= NULL
;
55 * Peripheral base pointers
58 DMA_MemMapPtr edma_base
[] = DMA_BASE_PTRS
;
59 DMAMUX_MemMapPtr dmamux_base
[] = DMAMUX_BASE_PTRS
;
61 static uint_8 edma_trans_size
[6] = {1, 2, 4, 0, 16, 32};
63 static void edma_interrupt_handler(pointer parameter
);
64 static void edma_err_interrupt_handler(pointer parameter
);
72 DMAMUX_MemMapPtr base_mux
;
73 char_ptr identifier
= "edma:";
75 /* Create an gloable struct to protect*/
76 g_edma
= _mem_alloc_system_zero((_mem_size
)sizeof(MQX_EDMA_STRUCT
));
79 return (MQX_OUT_OF_MEMORY
);
82 /* enable DMAMUX clock */
84 /* create an lwsem to protect the channel resource */
85 _lwsem_create((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
, 1);
87 /* Init Channel resource bit map */
88 _lwsem_wait((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
90 g_edma
->context
= _mem_alloc_system_zero((_mem_size
)(sizeof(MQX_EDMA_CHAN_CONTEXT_PTR
) * EDMA_TOTAL_CHANNELS
));
92 for (i
=0; i
< EDMA_TOTAL_CHANNELS
; i
++)
95 g_edma
->context
[i
] = NULL
;
97 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
99 /* Do some installation */
100 for (i
= 0; i
< EDMA_MODULES
; i
++)\
104 /* Get base address of module's DMA controller registers */
105 base
= _bsp_get_DMACTRL_base_address(i
* EDMA_CHANNELS
+ 1);
106 /* Cancel current DMA transfer */
107 DMA_ERQ_REG(base
) = 0;
108 DMA_CR_REG(base
) |= DMA_CR_CX_MASK
| DMA_CR_HALT_MASK
;
109 while (DMA_CR_REG(base
) & DMA_CR_CX_MASK
) {};
110 /* Clear halt bit and set halt of error bit */
111 DMA_CR_REG(base
) = (DMA_CR_REG(base
) & ~DMA_CR_HALT_MASK
) | DMA_CR_HOE_MASK
;
112 /* Clear all error bit */
113 DMA_CERR_REG(base
) |= 0x40;
115 for (j
= i
* EDMA_CHANNELS
; j
< EDMA_CHANNELS
* (i
+ 1); j
++)
117 base_mux
= _bsp_get_DMAMUX_base_address(j
);
118 DMAMUX_CHCFG_REG(base_mux
, _bsp_get_DMAMUX_chan(j
));
121 /* register error interrupt */
122 err_int
= _bsp_get_edma_error_vector(i
*EDMA_CHANNELS
);
123 _int_install_isr(err_int
,edma_err_interrupt_handler
, (void *)NULL
);
124 _bsp_int_init(err_int
, 3, 0, TRUE
);
125 #if PSP_MQX_CPU_IS_ARM_CORTEX_A5
126 _int_install_isr(_bsp_get_edma_done_vector(i
* EDMA_CHANNELS
+ 1), edma_interrupt_handler
, (void *)NULL
);
127 _bsp_int_init(_bsp_get_edma_done_vector(i
* EDMA_CHANNELS
+ 1), 3, 0, TRUE
);
129 /* Enable error interrupt */
130 //DMA_SEEI_REG(base) |= 0x40;
133 result
= _io_dev_install(identifier
,
134 NULL
, NULL
, NULL
, NULL
,NULL
, (pointer
)g_edma
);
138 _mqx_uint
dma_deinit(void)
143 for (i
=0; i
<EDMA_CHANNELS
* EDMA_MODULES
; i
++)
146 _lwsem_destroy((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
148 for (i
= 0; i
< EDMA_MODULES
; i
++)
150 err_int
= _bsp_get_edma_error_vector(i
* EDMA_CHANNELS
);
151 _bsp_int_disable(err_int
);
152 #if PSP_MQX_CPU_IS_ARM_CORTEX_A5
153 _bsp_int_disable(_bsp_get_edma_done_vector(i
* EDMA_CHANNELS
+ 1));
160 _mqx_uint
edma_verify_chan(uint_8 channel
)
164 _lwsem_wait((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
165 if (!EDMA_VERIFY_CHANNEL(channel
))
167 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
168 return MQX_INVALID_PARAMETER
;
170 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
176 /* request a channel for EDMA */
177 _mqx_uint
edma_request_channel(uint_8 channel
, EDMA_CALLBACK
*cback
, void *para
, uint_8 pri
, EDMA_ERR_CALLBACK
*err_cback
, void *err_para
)
179 _lwsem_wait((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
180 if (EDMA_VERIFY_CHANNEL(channel
))
181 return MQX_INVALID_PARAMETER
;
182 EDMA_SET_CHANNEL(channel
);
184 g_edma
->context
[channel
] = _mem_alloc_system_zero((_mem_size
)sizeof(MQX_EDMA_CHAN_CONTEXT
));
185 if (!g_edma
->context
[channel
])
186 return MQX_OUT_OF_MEMORY
;
188 g_edma
->context
[channel
]->callback
= cback
;
189 g_edma
->context
[channel
]->parameter
= para
;
190 g_edma
->context
[channel
]->err_callback
= err_cback
;
191 g_edma
->context
[channel
]->err_parameter
= err_para
;
192 g_edma
->context
[channel
]->errsta
= MQX_EDMA_ERR_NONE
;
193 g_edma
->context
[channel
]->channel
= channel
;
194 #if PSP_MQX_CPU_IS_ARM_CORTEX_M4 || PSP_MQX_CPU_IS_ARM_CORTEX_M0P
195 _int_install_isr(_bsp_get_edma_done_vector(channel
), edma_interrupt_handler
, (void *)g_edma
->context
[channel
]);
196 _bsp_int_init(_bsp_get_edma_done_vector(channel
), pri
, 0, TRUE
);
199 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
204 void edma_free_chan(uint_8 channel
)
206 MQX_EDMA_CHAN_CONTEXT_PTR chan_info
= g_edma
->context
[channel
];
208 _lwsem_wait((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
209 EDMA_CLR_CHANNEL(channel
);
213 if (chan_info
->tcd_buf
)
214 _mem_free(chan_info
->tcd_buf
);
215 _mem_free(chan_info
);
219 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
223 _mqx_uint
edma_config_scatter(
225 MQX_EDMA_TYPE type
, MQX_EDMA_HARDWARE_REQUEST request
,
226 MQX_EDMA_SCATTER_STRUCT_PTR scatter_list
,
227 MQX_EDMA_TRANFER_SIZE trans_size
,
228 uint_8 nents
, uint_32 burst_size
232 MQX_EDMA_TCD_PTR p_tcd
;
234 DMAMUX_MemMapPtr base_mux
;
235 uint_32 chan
, chan_mux
;
238 /* check if the channel is allocated */
239 if (edma_verify_chan(channel
) != MQX_OK
)
240 return MQX_INVALID_PARAMETER
;
242 base
= _bsp_get_DMACTRL_base_address(channel
);
243 base_mux
= _bsp_get_DMAMUX_base_address(channel
);
244 chan
= _bsp_get_DMACTRL_chan(channel
);
245 chan_mux
= _bsp_get_DMAMUX_chan(channel
);
247 /* the buffer size should be multi times of burst size*/
249 if (burst_size
% edma_trans_size
[trans_size
])
250 return MQX_INVALID_PARAMETER
;
252 for (i
=0; i
<nents
; i
++)
254 if (scatter_list
[i
].length
% burst_size
)
255 return MQX_INVALID_PARAMETER
;
258 /* allocate space for edma scatter lsit */
259 if ((g_edma
->context
[channel
]->tcd_buf
) && (g_edma
->context
[channel
]->nents
< (nents
- 1)))
261 _mem_free(g_edma
->context
[channel
]->tcd_buf
);
262 g_edma
->context
[channel
]->tcd_buf
= NULL
;
265 if ((!g_edma
->context
[channel
]->tcd_buf
)&&(nents
> 1))
267 g_edma
->context
[channel
]->tcd_buf
=
268 #if MQX_USE_UNCACHED_MEM && PSP_HAS_DATA_CACHE
269 _mem_alloc_align_uncached((nents
- 1) * sizeof(MQX_EDMA_TCD
), MQX_EDMA_TCD_ALIGNMENT
);
271 _mem_alloc_align((nents
- 1) * sizeof(MQX_EDMA_TCD
), MQX_EDMA_TCD_ALIGNMENT
);
274 g_edma
->context
[channel
]->nents
= nents
- 1;
275 /* save channel type into context */
276 g_edma
->context
[channel
]->type
= type
;
278 /* config the first descriptor */
279 /**********************/
280 /* config intinial source and dest address*/
281 DMA_SADDR_REG(base
, chan
) = scatter_list
[0].src_addr
;
282 DMA_DADDR_REG(base
, chan
) = scatter_list
[0].dst_addr
;
283 /* config the transfer size for source and destination*/
284 DMA_ATTR_REG(base
, chan
) = DMA_ATTR_SSIZE(trans_size
) | DMA_ATTR_DSIZE(trans_size
);
287 case MQX_EDMA_MEM_TO_MEM
:
288 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
289 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
290 DMA_SLAST_REG(base
, chan
) = 0;
292 case MQX_EDMA_PERI_TO_MEM
:
293 DMA_SOFF_REG(base
, chan
) = 0;
294 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
295 DMA_SLAST_REG(base
, chan
) = 0;
297 case MQX_EDMA_MEM_TO_PERI
:
298 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
299 DMA_DOFF_REG(base
, chan
) = 0;
300 DMA_SLAST_REG(base
, chan
) = 0;
306 /* config the nbytes, only minor loop disable offset is support*/
307 DMA_NBYTES_MLNO_REG(base
, chan
) = burst_size
;
308 test_reg
= DMA_NBYTES_MLNO_REG(base
, chan
);
309 DMA_CITER_ELINKNO_REG(base
, chan
) = DMA_CITER_ELINKNO_CITER(scatter_list
[0].length
/ burst_size
);
310 DMA_BITER_ELINKNO_REG(base
, chan
) = DMA_BITER_ELINKNO_BITER(scatter_list
[0].length
/ burst_size
);
314 /* if it is not the only nent, enable scatter list and don't enable interrupt */
315 DMA_CSR_REG(base
, chan
) = DMA_CSR_ESG_MASK
;
316 DMA_DLAST_SGA_REG(base
, chan
) = DMA_DLAST_SGA_DLASTSGA(g_edma
->context
[channel
]->tcd_buf
);
320 /* if it is the only nent, set the interrupt */
321 DMA_CSR_REG(base
, chan
) = DMA_CSR_DREQ_MASK
| DMA_CSR_INTMAJOR_MASK
;
322 DMA_DLAST_SGA_REG(base
, chan
) = 0;
325 /* config other descriptor */
326 for (i
=1; i
<nents
; i
++)
328 p_tcd
= (MQX_EDMA_TCD_PTR
)(g_edma
->context
[channel
]->tcd_buf
+ (i
- 1) * 8);
329 p_tcd
->ATTR
= DMA_ATTR_REG(base
, chan
);
330 p_tcd
->BITER_ELINKNO
= DMA_BITER_ELINKNO_BITER(scatter_list
[i
].length
/burst_size
);
331 p_tcd
->CITER_ELINKNO
= DMA_CITER_ELINKNO_CITER(scatter_list
[i
].length
/burst_size
);
332 p_tcd
->CSR
= DMA_CSR_REG(base
, chan
);
333 p_tcd
->SADDR
= scatter_list
[i
].src_addr
;
334 p_tcd
->DADDR
= scatter_list
[i
].dst_addr
;
335 p_tcd
->NBYTES_MLNO
= DMA_NBYTES_MLNO_REG(base
, chan
);
336 p_tcd
->SLAST
= DMA_SLAST_REG(base
, chan
);
337 p_tcd
->DOFF
= DMA_DOFF_REG(base
, chan
);
338 p_tcd
->SOFF
= DMA_SOFF_REG(base
, chan
);
339 /* config the DLA LAST and interrupt setting*/
340 if (i
!= (nents
- 1))
342 p_tcd
->DLAST_SGA
= DMA_DLAST_SGA_DLASTSGA((uint_32
)g_edma
->context
[channel
]->tcd_buf
+ (i
+ 1) * 32);
346 p_tcd
->DLAST_SGA
= 0;
347 p_tcd
->CSR
= DMA_CSR_DREQ_MASK
| DMA_CSR_INTMAJOR_MASK
;
351 /*Config the DMA request and set DMAMUX */
352 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = 0;
353 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = DMAMUX_CHCFG_ENBL_MASK
| request
;
358 _mem_free(g_edma
->context
[channel
]->tcd_buf
);
359 g_edma
->context
[channel
]->tcd_buf
= NULL
;
361 return MQX_INVALID_PARAMETER
;
365 _mqx_uint
edma_config_scatter(
367 MQX_EDMA_TYPE type
, MQX_EDMA_HARDWARE_REQUEST request
,
368 MQX_EDMA_SCATTER_STRUCT_PTR scatter_list
,
369 MQX_EDMA_TRANFER_SIZE trans_size
, /*[2]=>4 */
370 uint_8 nents
/*8*/, uint_32 burst_size
/* 16*/
374 (SSI_TX_DMA_CHN, MQX_EDMA_MEM_TO_PERI, iprtd->dma_data.peripheral_type,
375 scatter_list, MQX_EDMA_TRANS_SIZE_32_BITS, iprtd->periods,TX_BURST_SIZE_OF_BYTES
378 MQX_EDMA_TCD_PTR p_tcd
;
380 DMAMUX_MemMapPtr base_mux
;
381 uint_32 chan
, chan_mux
;
384 /* check if the channel is allocated */
385 if (edma_verify_chan(channel
) != MQX_OK
)
386 return MQX_INVALID_PARAMETER
;
388 base
= _bsp_get_DMACTRL_base_address(channel
);
389 base_mux
= _bsp_get_DMAMUX_base_address(channel
);
390 chan
= _bsp_get_DMACTRL_chan(channel
);
391 chan_mux
= _bsp_get_DMAMUX_chan(channel
);
393 /* the buffer size should be multi times of burst size*/
395 if (burst_size
% edma_trans_size
[trans_size
])
396 return MQX_INVALID_PARAMETER
;
398 for (i
=0; i
<nents
; i
++)
400 if (scatter_list
[i
].length
% burst_size
)
401 return MQX_INVALID_PARAMETER
;
404 /* allocate space for edma scatter lsit */
405 if ((g_edma
->context
[channel
]->tcd_buf
) && (g_edma
->context
[channel
]->nents
< (nents
)))
407 _mem_free(g_edma
->context
[channel
]->tcd_buf
);
408 g_edma
->context
[channel
]->tcd_buf
= NULL
;
411 if ((!g_edma
->context
[channel
]->tcd_buf
)&&(nents
> 1))
413 g_edma
->context
[channel
]->tcd_buf
=
414 #if MQX_USE_UNCACHED_MEM && PSP_HAS_DATA_CACHE
415 _mem_alloc_align_uncached((nents
) * sizeof(MQX_EDMA_TCD
), MQX_EDMA_TCD_ALIGNMENT
);
417 _mem_alloc_align((nents
) * sizeof(MQX_EDMA_TCD
), MQX_EDMA_TCD_ALIGNMENT
);
420 g_edma
->context
[channel
]->nents
= nents
;
421 /* save channel type into context */
422 g_edma
->context
[channel
]->type
= type
;
425 DMA_NBYTES_MLNO_REG(base, chan) = burst_size;
426 DMA_ATTR_REG(base, chan) = DMA_ATTR_SSIZE(trans_size) | DMA_ATTR_DSIZE(trans_size);
427 DMA_CSR_REG(base, chan) = DMA_CSR_DREQ_MASK | DMA_CSR_INTMAJOR_MASK; */
429 /* config the transfer size for source and destination*/
430 DMA_ATTR_REG(base
, chan
) = DMA_ATTR_SSIZE(trans_size
) | DMA_ATTR_DSIZE(trans_size
);
433 case MQX_EDMA_MEM_TO_MEM
:
434 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
435 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
436 DMA_SLAST_REG(base
, chan
) = 0;
438 case MQX_EDMA_PERI_TO_MEM
:
439 DMA_SOFF_REG(base
, chan
) = 0;
440 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
441 DMA_SLAST_REG(base
, chan
) = 0;
443 case MQX_EDMA_MEM_TO_PERI
:
444 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
]; /* 4 */
445 DMA_DOFF_REG(base
, chan
) = 0;
446 DMA_SLAST_REG(base
, chan
) = 0;
452 /* config other descriptor */
453 for (i
= 0; i
< nents
; i
++)
455 p_tcd
= (MQX_EDMA_TCD_PTR
)(g_edma
->context
[channel
]->tcd_buf
+ (i
) * 8);
456 p_tcd
->ATTR
= DMA_ATTR_REG(base
, chan
)/*DMA_ATTR_SSIZE(trans_size) | DMA_ATTR_DSIZE(trans_size)*/;
457 p_tcd
->BITER_ELINKNO
= DMA_BITER_ELINKNO_BITER(scatter_list
[i
].length
/burst_size
);
458 p_tcd
->CITER_ELINKNO
= DMA_CITER_ELINKNO_CITER(scatter_list
[i
].length
/burst_size
);
459 p_tcd
->CSR
= /*DMA_CSR_REG(base, chan)*/(DMA_CSR_ESG_MASK
| DMA_CSR_INTMAJOR_MASK
);
460 p_tcd
->SADDR
= scatter_list
[i
].src_addr
;
461 p_tcd
->DADDR
= scatter_list
[i
].dst_addr
;
462 p_tcd
->NBYTES_MLNO
= /* DMA_NBYTES_MLNO_REG(base, chan)*/burst_size
;
463 p_tcd
->SLAST
= DMA_SLAST_REG(base
, chan
);
464 p_tcd
->DOFF
= DMA_DOFF_REG(base
, chan
);
465 p_tcd
->SOFF
= DMA_SOFF_REG(base
, chan
);
466 /* config the DLA LAST and interrupt setting*/
467 if (i
!= (nents
- 1))
469 p_tcd
->DLAST_SGA
= DMA_DLAST_SGA_DLASTSGA((uint_32
)g_edma
->context
[channel
]->tcd_buf
+ (i
+ 1) * 32);
473 p_tcd
->DLAST_SGA
= DMA_DLAST_SGA_DLASTSGA((uint_32
)g_edma
->context
[channel
]->tcd_buf
+ (0) * 32);
478 p_tcd
= (MQX_EDMA_TCD_PTR
)(g_edma
->context
[channel
]->tcd_buf
);
480 DMA_ATTR_REG(base
, chan
) = p_tcd
->ATTR
;
481 DMA_BITER_ELINKNO_REG(base
, chan
) = p_tcd
->BITER_ELINKNO
;
482 DMA_CITER_ELINKNO_REG(base
, chan
) = p_tcd
->CITER_ELINKNO
;
483 DMA_CSR_REG(base
, chan
) = p_tcd
->CSR
;
484 DMA_SADDR_REG(base
, chan
) = p_tcd
->SADDR
;
485 DMA_DADDR_REG(base
, chan
) = p_tcd
->DADDR
;
486 DMA_NBYTES_MLNO_REG(base
, chan
) = p_tcd
->NBYTES_MLOFFYES
;
487 DMA_SLAST_REG(base
, chan
) = p_tcd
->SLAST
;
488 DMA_DOFF_REG(base
, chan
) = p_tcd
->DOFF
;
489 DMA_SOFF_REG(base
, chan
) = p_tcd
->SOFF
;
491 DMA_DLAST_SGA_REG(base
, chan
) = p_tcd
->DLAST_SGA
;
494 /*Config the DMA request and set DMAMUX */
495 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = 0;
496 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = DMAMUX_CHCFG_ENBL_MASK
| request
;
501 _mem_free(g_edma
->context
[channel
]->tcd_buf
);
502 g_edma
->context
[channel
]->tcd_buf
= NULL
;
504 return MQX_INVALID_PARAMETER
;
511 _mqx_uint
edma_config_circle(
514 MQX_EDMA_HARDWARE_REQUEST request
,
515 uint_32 src_addr
, uint_32 dst_addr
,
516 MQX_EDMA_TRANFER_SIZE trans_size
,uint_32 burst_size
,
521 DMAMUX_MemMapPtr base_mux
;
522 uint_32 chan
, chan_mux
;
524 /* check if the channel is allocated */
525 if (edma_verify_chan(channel
) != MQX_OK
)
526 return MQX_INVALID_PARAMETER
;
528 base
= _bsp_get_DMACTRL_base_address(channel
);
529 base_mux
= _bsp_get_DMAMUX_base_address(channel
);
530 chan
= _bsp_get_DMACTRL_chan(channel
);
531 chan_mux
= _bsp_get_DMAMUX_chan(channel
);
533 /* the buffer size should be multi times of burst size*/
534 if (burst_size
% edma_trans_size
[trans_size
])
535 return MQX_INVALID_PARAMETER
;
536 if (size
% burst_size
)
537 return MQX_INVALID_PARAMETER
;
539 g_edma
->context
[channel
]->tcd_buf
= NULL
;
540 g_edma
->context
[channel
]->nents
= 0;
542 /* save channel type into context */
543 g_edma
->context
[channel
]->type
= type
;
545 /**********************/
546 /* config intinial source and dest address*/
547 DMA_SADDR_REG(base
, chan
) = src_addr
;
548 DMA_DADDR_REG(base
, chan
) = dst_addr
;
549 /* config the transfer size for source and destination*/
550 DMA_ATTR_REG(base
, chan
) = DMA_ATTR_SSIZE(trans_size
) | DMA_ATTR_DSIZE(trans_size
);
553 case MQX_EDMA_MEM_TO_MEM
:
554 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
555 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
556 DMA_SLAST_REG(base
, chan
) = -(size
);
557 DMA_DLAST_SGA_REG(base
, chan
) = -(size
);
559 case MQX_EDMA_PERI_TO_MEM
:
560 DMA_SOFF_REG(base
, chan
) = 0;
561 DMA_DOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
562 DMA_SLAST_REG(base
, chan
) = 0;
563 DMA_DLAST_SGA_REG(base
, chan
) = -(size
);
565 case MQX_EDMA_MEM_TO_PERI
:
566 DMA_SOFF_REG(base
, chan
) = edma_trans_size
[trans_size
];
567 DMA_DOFF_REG(base
, chan
) = 0;
568 DMA_SLAST_REG(base
, chan
) = -(size
);
569 DMA_DLAST_SGA_REG(base
, chan
) = 0;
572 return MQX_INVALID_PARAMETER
;
575 /* config the nbytes, only minor loop disable offset is support*/
576 DMA_NBYTES_MLNO_REG(base
, chan
) = burst_size
;
577 DMA_CITER_ELINKNO_REG(base
, chan
) = DMA_CITER_ELINKNO_CITER(size
/burst_size
);
578 DMA_BITER_ELINKNO_REG(base
, chan
) = DMA_BITER_ELINKNO_BITER(size
/burst_size
);
580 DMA_CSR_REG(base
, chan
) = DMA_CSR_INTHALF_MASK
| DMA_CSR_INTMAJOR_MASK
;
582 /*Config the DMA request and set DMAMUX */
583 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = 0;
584 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = DMAMUX_CHCFG_ENBL_MASK
| request
;
590 _mqx_uint
edma_start(uint_8 channel
)
596 /* check if the channel is allocated */
597 if (edma_verify_chan(channel
) != MQX_OK
)
598 return MQX_INVALID_PARAMETER
;
600 base
= _bsp_get_DMACTRL_base_address(channel
);
601 chan
= _bsp_get_DMACTRL_chan(channel
);
603 /* Enable interrupt */
604 DMA_SERQ_REG(base
) = chan
;
609 _mqx_uint
edma_close_chan(uint_8 channel
)
612 DMAMUX_MemMapPtr base_mux
;
613 uint_32 chan
, chan_mux
;
615 if (edma_verify_chan(channel
))
618 base
= _bsp_get_DMACTRL_base_address(channel
);
619 base_mux
= _bsp_get_DMAMUX_base_address(channel
);
620 chan
= _bsp_get_DMACTRL_chan(channel
);
621 chan_mux
= _bsp_get_DMAMUX_chan(channel
);
623 /*Disable the channel and DMA MUX*/
624 edma_force_stop(channel
);
625 #if PSP_MQX_CPU_IS_ARM_CORTEX_M4 || PSP_MQX_CPU_IS_ARM_CORTEX_M0P
626 _bsp_int_disable(_bsp_get_edma_done_vector(channel
));
628 DMAMUX_CHCFG_REG(base_mux
, chan_mux
) = 0;
629 if (g_edma
->context
[channel
]->tcd_buf
)
630 _mem_free(g_edma
->context
[channel
]->tcd_buf
);
631 _mem_free(g_edma
->context
[channel
]);
633 _lwsem_wait((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
634 EDMA_CLR_CHANNEL(channel
);
635 _lwsem_post((LWSEM_STRUCT_PTR
)&g_edma
->chan_sem
);
640 uint_32
edma_force_stop(uint_8 channel
)
646 base
= _bsp_get_DMACTRL_base_address(channel
);
647 chan
= _bsp_get_DMACTRL_chan(channel
);
649 /*forcely disable the channel */
651 DMA_CERQ_REG(base
) = chan
;
652 } while(DMA_CSR_REG(base
, chan
) & DMA_CSR_ACTIVE_MASK
);
654 edma_get_status(channel
, &size
);
658 uint_32
edma_get_status(uint_8 channel
, uint_32
*num
)
663 base
= _bsp_get_DMACTRL_base_address(channel
);
664 chan
= _bsp_get_DMACTRL_chan(channel
);
666 /*wait until the channel is not running */
667 while (DMA_CSR_REG(base
, chan
) & DMA_CSR_ACTIVE_MASK
) {};
668 if (DMA_CSR_REG(base
, chan
) & DMA_CSR_DONE_MASK
)
670 *num
= DMA_NBYTES_MLNO_REG(base
, chan
) * DMA_GET_BITTER(base
, chan
);
675 *num
= DMA_NBYTES_MLNO_REG(base
, chan
) * (DMA_GET_BITTER(base
, chan
) - DMA_GET_CITTER(base
, chan
));
680 void edma_software_trigger(uint_8 channel
)
685 base
= _bsp_get_DMACTRL_base_address(channel
);
686 chan
= _bsp_get_DMACTRL_chan(channel
);
688 DMA_SSRT_REG(base
) = chan
;
691 #if PSP_MQX_CPU_IS_ARM_CORTEX_A5
692 static void edma_interrupt_handler(pointer parameter
)
695 MQX_EDMA_CHAN_CONTEXT_PTR chan_info
;
697 uint_32 interrupt
, i
, j
;
699 for (i
=0; i
<EDMA_MODULES
; i
++)
701 base
= _bsp_get_DMACTRL_base_address(i
* EDMA_CHANNELS
);
702 interrupt
= DMA_INT_REG(base
);
707 for (j
= 0; j
< EDMA_CHANNELS
; j
++)
709 if (interrupt
& (1<<j
))
711 chan_info
= g_edma
->context
[j
+ i
* EDMA_CHANNELS
];
712 if (chan_info
&& chan_info
->callback
)
713 chan_info
->callback(chan_info
->parameter
, chan_info
->channel
);
715 DMA_CINT_REG(base
) = j
;
716 DMA_CDNE_REG(base
) = j
;
725 #elif (MQX_CPU == PSP_CPU_MK70F120M) || (MQX_CPU == PSP_CPU_MK70F150M) || (MQX_CPU == PSP_CPU_MK70DP256)
726 static void edma_interrupt_handler(pointer parameter
)
728 MQX_EDMA_CHAN_CONTEXT_PTR chan_info
= (MQX_EDMA_CHAN_CONTEXT_PTR
)parameter
;
730 uint_32 interrupt
, channel
, base_channel
;
732 base
= _bsp_get_DMACTRL_base_address(chan_info
->channel
);
733 interrupt
= DMA_INT_REG(base
);
735 base_channel
= chan_info
->channel
- chan_info
->channel
% EDMA_CHANNELS
;
737 for (channel
= chan_info
->channel
% DMAMUX_CHANNLES
; channel
< EDMA_CHANNELS
; channel
+= DMAMUX_CHANNLES
)
739 chan_info
= g_edma
->context
[channel
+ base_channel
];
742 if (interrupt
& (1 << channel
))
744 DMA_CINT_REG(base
) = channel
;
745 DMA_CDNE_REG(base
) = channel
;
747 if (edma_verify_chan(chan_info
->channel
) != MQX_OK
)
749 if (chan_info
->callback
)
750 chan_info
->callback(chan_info
->parameter
, chan_info
->channel
);
757 static void edma_interrupt_handler(pointer parameter
)
759 MQX_EDMA_CHAN_CONTEXT_PTR chan_info
= (MQX_EDMA_CHAN_CONTEXT_PTR
)parameter
;
762 base
= _bsp_get_DMACTRL_base_address(chan_info
->channel
);
764 DMA_CINT_REG(base
) = chan_info
->channel
;
765 DMA_CDNE_REG(base
) = chan_info
->channel
;
767 if (edma_verify_chan(chan_info
->channel
) != MQX_OK
)
769 if (chan_info
->callback
)
770 chan_info
->callback(chan_info
->parameter
, chan_info
->channel
);
776 static void edma_err_interrupt_handler(pointer parameter
)
780 // uint_8 chan = (DMA_ES >> 8) & 0xf;
781 // uint_32 err = DMA_ES;
783 // edma_force_stop(chan);
784 // DMA_CR &= DMA_CR & (~(DMA_CR_HALT_MASK));
786 // if (g_edma->context[chan].err_callback)
787 // g_edma->context[chan].err_callback(g_edma->context[chan].err_parameter, err);
794 DMAMUX_MemMapPtr
_bsp_get_DMAMUX_base_address(uint_8 channel
)
798 m
= channel
/ DMAMUX_CHANNLES
;
800 return dmamux_base
[m
];
803 DMA_MemMapPtr
_bsp_get_DMACTRL_base_address(uint_8 channel
)
807 m
= channel
/ EDMA_CHANNELS
;
813 uint_8
_bsp_get_DMAMUX_chan(uint_8 channel
)
815 return channel
% DMAMUX_CHANNLES
;
818 uint_8
_bsp_get_DMACTRL_chan(uint_8 channel
)
820 return channel
% EDMA_CHANNELS
;