1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright © 2006, Intel Corporation.
7 #include <linux/types.h>
9 #include <linux/platform_data/dma-iop32x.h>
11 /* Memory copy units */
12 #define DMA_CCR(chan) (chan->mmr_base + 0x0)
13 #define DMA_CSR(chan) (chan->mmr_base + 0x4)
14 #define DMA_DAR(chan) (chan->mmr_base + 0xc)
15 #define DMA_NDAR(chan) (chan->mmr_base + 0x10)
16 #define DMA_PADR(chan) (chan->mmr_base + 0x14)
17 #define DMA_PUADR(chan) (chan->mmr_base + 0x18)
18 #define DMA_LADR(chan) (chan->mmr_base + 0x1c)
19 #define DMA_BCR(chan) (chan->mmr_base + 0x20)
20 #define DMA_DCR(chan) (chan->mmr_base + 0x24)
22 /* Application accelerator unit */
23 #define AAU_ACR(chan) (chan->mmr_base + 0x0)
24 #define AAU_ASR(chan) (chan->mmr_base + 0x4)
25 #define AAU_ADAR(chan) (chan->mmr_base + 0x8)
26 #define AAU_ANDAR(chan) (chan->mmr_base + 0xc)
27 #define AAU_SAR(src, chan) (chan->mmr_base + (0x10 + ((src) << 2)))
28 #define AAU_DAR(chan) (chan->mmr_base + 0x20)
29 #define AAU_ABCR(chan) (chan->mmr_base + 0x24)
30 #define AAU_ADCR(chan) (chan->mmr_base + 0x28)
31 #define AAU_SAR_EDCR(src_edc) (chan->mmr_base + (0x02c + ((src_edc-4) << 2)))
32 #define AAU_EDCR0_IDX 8
33 #define AAU_EDCR1_IDX 17
34 #define AAU_EDCR2_IDX 26
36 struct iop3xx_aau_desc_ctrl
{
37 unsigned int int_en
:1;
38 unsigned int blk1_cmd_ctrl
:3;
39 unsigned int blk2_cmd_ctrl
:3;
40 unsigned int blk3_cmd_ctrl
:3;
41 unsigned int blk4_cmd_ctrl
:3;
42 unsigned int blk5_cmd_ctrl
:3;
43 unsigned int blk6_cmd_ctrl
:3;
44 unsigned int blk7_cmd_ctrl
:3;
45 unsigned int blk8_cmd_ctrl
:3;
46 unsigned int blk_ctrl
:2;
47 unsigned int dual_xor_en
:1;
48 unsigned int tx_complete
:1;
49 unsigned int zero_result_err
:1;
50 unsigned int zero_result_en
:1;
51 unsigned int dest_write_en
:1;
54 struct iop3xx_aau_e_desc_ctrl
{
55 unsigned int reserved
:1;
56 unsigned int blk1_cmd_ctrl
:3;
57 unsigned int blk2_cmd_ctrl
:3;
58 unsigned int blk3_cmd_ctrl
:3;
59 unsigned int blk4_cmd_ctrl
:3;
60 unsigned int blk5_cmd_ctrl
:3;
61 unsigned int blk6_cmd_ctrl
:3;
62 unsigned int blk7_cmd_ctrl
:3;
63 unsigned int blk8_cmd_ctrl
:3;
64 unsigned int reserved2
:7;
67 struct iop3xx_dma_desc_ctrl
{
68 unsigned int pci_transaction
:4;
69 unsigned int int_en
:1;
70 unsigned int dac_cycle_en
:1;
71 unsigned int mem_to_mem_en
:1;
72 unsigned int crc_data_tx_en
:1;
73 unsigned int crc_gen_en
:1;
74 unsigned int crc_seed_dis
:1;
75 unsigned int reserved
:21;
76 unsigned int crc_tx_complete
:1;
79 struct iop3xx_desc_dma
{
87 u32 upper_pci_src_addr
;
88 u32 upper_pci_dest_addr
;
91 u32 local_pci_src_addr
;
92 u32 local_pci_dest_addr
;
98 struct iop3xx_dma_desc_ctrl desc_ctrl_field
;
103 struct iop3xx_desc_aau
{
110 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
115 struct iop3xx_aau_e_desc_ctrl e_desc_ctrl_field
;
119 struct iop3xx_aau_gfmr
{
120 unsigned int gfmr1
:8;
121 unsigned int gfmr2
:8;
122 unsigned int gfmr3
:8;
123 unsigned int gfmr4
:8;
126 struct iop3xx_desc_pq_xor
{
131 struct iop3xx_aau_gfmr data_mult1_field
;
137 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
142 struct iop3xx_aau_e_desc_ctrl e_desc_ctrl_field
;
144 struct iop3xx_aau_gfmr data_mult_field
;
149 struct iop3xx_desc_dual_xor
{
159 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
165 struct iop3xx_desc_aau
*aau
;
166 struct iop3xx_desc_dma
*dma
;
167 struct iop3xx_desc_pq_xor
*pq_xor
;
168 struct iop3xx_desc_dual_xor
*dual_xor
;
172 /* No support for p+q operations */
174 iop_chan_pq_slot_count(size_t len
, int src_cnt
, int *slots_per_op
)
181 iop_desc_init_pq(struct iop_adma_desc_slot
*desc
, int src_cnt
,
188 iop_desc_set_pq_addr(struct iop_adma_desc_slot
*desc
, dma_addr_t
*addr
)
194 iop_desc_set_pq_src_addr(struct iop_adma_desc_slot
*desc
, int src_idx
,
195 dma_addr_t addr
, unsigned char coef
)
201 iop_chan_pq_zero_sum_slot_count(size_t len
, int src_cnt
, int *slots_per_op
)
208 iop_desc_init_pq_zero_sum(struct iop_adma_desc_slot
*desc
, int src_cnt
,
215 iop_desc_set_pq_zero_sum_byte_count(struct iop_adma_desc_slot
*desc
, u32 len
)
220 #define iop_desc_set_pq_zero_sum_src_addr iop_desc_set_pq_src_addr
223 iop_desc_set_pq_zero_sum_addr(struct iop_adma_desc_slot
*desc
, int pq_idx
,
229 static inline int iop_adma_get_max_xor(void)
234 static inline int iop_adma_get_max_pq(void)
240 static inline u32
iop_chan_get_current_descriptor(struct iop_adma_chan
*chan
)
242 int id
= chan
->device
->id
;
247 return __raw_readl(DMA_DAR(chan
));
249 return __raw_readl(AAU_ADAR(chan
));
256 static inline void iop_chan_set_next_descriptor(struct iop_adma_chan
*chan
,
259 int id
= chan
->device
->id
;
264 __raw_writel(next_desc_addr
, DMA_NDAR(chan
));
267 __raw_writel(next_desc_addr
, AAU_ANDAR(chan
));
273 #define IOP_ADMA_STATUS_BUSY (1 << 10)
274 #define IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT (1024)
275 #define IOP_ADMA_XOR_MAX_BYTE_COUNT (16 * 1024 * 1024)
276 #define IOP_ADMA_MAX_BYTE_COUNT (16 * 1024 * 1024)
278 static inline int iop_chan_is_busy(struct iop_adma_chan
*chan
)
280 u32 status
= __raw_readl(DMA_CSR(chan
));
281 return (status
& IOP_ADMA_STATUS_BUSY
) ? 1 : 0;
284 static inline int iop_desc_is_aligned(struct iop_adma_desc_slot
*desc
,
287 /* num_slots will only ever be 1, 2, 4, or 8 */
288 return (desc
->idx
& (num_slots
- 1)) ? 0 : 1;
291 /* to do: support large (i.e. > hw max) buffer sizes */
292 static inline int iop_chan_memcpy_slot_count(size_t len
, int *slots_per_op
)
298 /* to do: support large (i.e. > hw max) buffer sizes */
299 static inline int iop_chan_memset_slot_count(size_t len
, int *slots_per_op
)
305 static inline int iop3xx_aau_xor_slot_count(size_t len
, int src_cnt
,
308 static const char slot_count_table
[] = {
309 1, 1, 1, 1, /* 01 - 04 */
310 2, 2, 2, 2, /* 05 - 08 */
311 4, 4, 4, 4, /* 09 - 12 */
312 4, 4, 4, 4, /* 13 - 16 */
313 8, 8, 8, 8, /* 17 - 20 */
314 8, 8, 8, 8, /* 21 - 24 */
315 8, 8, 8, 8, /* 25 - 28 */
316 8, 8, 8, 8, /* 29 - 32 */
318 *slots_per_op
= slot_count_table
[src_cnt
- 1];
319 return *slots_per_op
;
323 iop_chan_interrupt_slot_count(int *slots_per_op
, struct iop_adma_chan
*chan
)
325 switch (chan
->device
->id
) {
328 return iop_chan_memcpy_slot_count(0, slots_per_op
);
330 return iop3xx_aau_xor_slot_count(0, 2, slots_per_op
);
337 static inline int iop_chan_xor_slot_count(size_t len
, int src_cnt
,
340 int slot_cnt
= iop3xx_aau_xor_slot_count(len
, src_cnt
, slots_per_op
);
342 if (len
<= IOP_ADMA_XOR_MAX_BYTE_COUNT
)
345 len
-= IOP_ADMA_XOR_MAX_BYTE_COUNT
;
346 while (len
> IOP_ADMA_XOR_MAX_BYTE_COUNT
) {
347 len
-= IOP_ADMA_XOR_MAX_BYTE_COUNT
;
348 slot_cnt
+= *slots_per_op
;
351 slot_cnt
+= *slots_per_op
;
356 /* zero sum on iop3xx is limited to 1k at a time so it requires multiple
359 static inline int iop_chan_zero_sum_slot_count(size_t len
, int src_cnt
,
362 int slot_cnt
= iop3xx_aau_xor_slot_count(len
, src_cnt
, slots_per_op
);
364 if (len
<= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
)
367 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
368 while (len
> IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
369 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
370 slot_cnt
+= *slots_per_op
;
373 slot_cnt
+= *slots_per_op
;
378 static inline u32
iop_desc_get_byte_count(struct iop_adma_desc_slot
*desc
,
379 struct iop_adma_chan
*chan
)
381 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
383 switch (chan
->device
->id
) {
386 return hw_desc
.dma
->byte_count
;
388 return hw_desc
.aau
->byte_count
;
395 /* translate the src_idx to a descriptor word index */
396 static inline int __desc_idx(int src_idx
)
398 static const int desc_idx_table
[] = { 0, 0, 0, 0,
408 return desc_idx_table
[src_idx
];
411 static inline u32
iop_desc_get_src_addr(struct iop_adma_desc_slot
*desc
,
412 struct iop_adma_chan
*chan
,
415 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
417 switch (chan
->device
->id
) {
420 return hw_desc
.dma
->src_addr
;
428 return hw_desc
.aau
->src
[src_idx
];
430 return hw_desc
.aau
->src_edc
[__desc_idx(src_idx
)].src_addr
;
433 static inline void iop3xx_aau_desc_set_src_addr(struct iop3xx_desc_aau
*hw_desc
,
434 int src_idx
, dma_addr_t addr
)
437 hw_desc
->src
[src_idx
] = addr
;
439 hw_desc
->src_edc
[__desc_idx(src_idx
)].src_addr
= addr
;
443 iop_desc_init_memcpy(struct iop_adma_desc_slot
*desc
, unsigned long flags
)
445 struct iop3xx_desc_dma
*hw_desc
= desc
->hw_desc
;
448 struct iop3xx_dma_desc_ctrl field
;
451 u_desc_ctrl
.value
= 0;
452 u_desc_ctrl
.field
.mem_to_mem_en
= 1;
453 u_desc_ctrl
.field
.pci_transaction
= 0xe; /* memory read block */
454 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
455 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
456 hw_desc
->upper_pci_src_addr
= 0;
457 hw_desc
->crc_addr
= 0;
461 iop_desc_init_memset(struct iop_adma_desc_slot
*desc
, unsigned long flags
)
463 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
466 struct iop3xx_aau_desc_ctrl field
;
469 u_desc_ctrl
.value
= 0;
470 u_desc_ctrl
.field
.blk1_cmd_ctrl
= 0x2; /* memory block fill */
471 u_desc_ctrl
.field
.dest_write_en
= 1;
472 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
473 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
477 iop3xx_desc_init_xor(struct iop3xx_desc_aau
*hw_desc
, int src_cnt
,
484 struct iop3xx_aau_desc_ctrl field
;
487 u_desc_ctrl
.value
= 0;
490 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
493 for (i
= 24; i
< src_cnt
; i
++) {
494 edcr
|= (1 << shift
);
497 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= edcr
;
501 if (!u_desc_ctrl
.field
.blk_ctrl
) {
502 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
503 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
507 for (i
= 16; i
< src_cnt
; i
++) {
508 edcr
|= (1 << shift
);
511 hw_desc
->src_edc
[AAU_EDCR1_IDX
].e_desc_ctrl
= edcr
;
515 if (!u_desc_ctrl
.field
.blk_ctrl
)
516 u_desc_ctrl
.field
.blk_ctrl
= 0x2; /* use EDCR0 */
519 for (i
= 8; i
< src_cnt
; i
++) {
520 edcr
|= (1 << shift
);
523 hw_desc
->src_edc
[AAU_EDCR0_IDX
].e_desc_ctrl
= edcr
;
528 for (i
= 0; i
< src_cnt
; i
++) {
529 u_desc_ctrl
.value
|= (1 << shift
);
533 if (!u_desc_ctrl
.field
.blk_ctrl
&& src_cnt
> 4)
534 u_desc_ctrl
.field
.blk_ctrl
= 0x1; /* use mini-desc */
537 u_desc_ctrl
.field
.dest_write_en
= 1;
538 u_desc_ctrl
.field
.blk1_cmd_ctrl
= 0x7; /* direct fill */
539 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
540 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
542 return u_desc_ctrl
.value
;
546 iop_desc_init_xor(struct iop_adma_desc_slot
*desc
, int src_cnt
,
549 iop3xx_desc_init_xor(desc
->hw_desc
, src_cnt
, flags
);
552 /* return the number of operations */
554 iop_desc_init_zero_sum(struct iop_adma_desc_slot
*desc
, int src_cnt
,
557 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
558 struct iop3xx_desc_aau
*hw_desc
, *prev_hw_desc
, *iter
;
561 struct iop3xx_aau_desc_ctrl field
;
565 hw_desc
= desc
->hw_desc
;
567 for (i
= 0, j
= 0; (slot_cnt
-= slots_per_op
) >= 0;
568 i
+= slots_per_op
, j
++) {
569 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
570 u_desc_ctrl
.value
= iop3xx_desc_init_xor(iter
, src_cnt
, flags
);
571 u_desc_ctrl
.field
.dest_write_en
= 0;
572 u_desc_ctrl
.field
.zero_result_en
= 1;
573 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
574 iter
->desc_ctrl
= u_desc_ctrl
.value
;
576 /* for the subsequent descriptors preserve the store queue
577 * and chain them together
581 iop_hw_desc_slot_idx(hw_desc
, i
- slots_per_op
);
582 prev_hw_desc
->next_desc
=
583 (u32
) (desc
->async_tx
.phys
+ (i
<< 5));
591 iop_desc_init_null_xor(struct iop_adma_desc_slot
*desc
, int src_cnt
,
594 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
597 struct iop3xx_aau_desc_ctrl field
;
600 u_desc_ctrl
.value
= 0;
603 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
604 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
607 if (!u_desc_ctrl
.field
.blk_ctrl
) {
608 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
609 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
611 hw_desc
->src_edc
[AAU_EDCR1_IDX
].e_desc_ctrl
= 0;
614 if (!u_desc_ctrl
.field
.blk_ctrl
)
615 u_desc_ctrl
.field
.blk_ctrl
= 0x2; /* use EDCR0 */
616 hw_desc
->src_edc
[AAU_EDCR0_IDX
].e_desc_ctrl
= 0;
619 if (!u_desc_ctrl
.field
.blk_ctrl
&& src_cnt
> 4)
620 u_desc_ctrl
.field
.blk_ctrl
= 0x1; /* use mini-desc */
623 u_desc_ctrl
.field
.dest_write_en
= 0;
624 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
625 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
628 static inline void iop_desc_set_byte_count(struct iop_adma_desc_slot
*desc
,
629 struct iop_adma_chan
*chan
,
632 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
634 switch (chan
->device
->id
) {
637 hw_desc
.dma
->byte_count
= byte_count
;
640 hw_desc
.aau
->byte_count
= byte_count
;
648 iop_desc_init_interrupt(struct iop_adma_desc_slot
*desc
,
649 struct iop_adma_chan
*chan
)
651 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
653 switch (chan
->device
->id
) {
656 iop_desc_init_memcpy(desc
, 1);
657 hw_desc
.dma
->byte_count
= 0;
658 hw_desc
.dma
->dest_addr
= 0;
659 hw_desc
.dma
->src_addr
= 0;
662 iop_desc_init_null_xor(desc
, 2, 1);
663 hw_desc
.aau
->byte_count
= 0;
664 hw_desc
.aau
->dest_addr
= 0;
665 hw_desc
.aau
->src
[0] = 0;
666 hw_desc
.aau
->src
[1] = 0;
674 iop_desc_set_zero_sum_byte_count(struct iop_adma_desc_slot
*desc
, u32 len
)
676 int slots_per_op
= desc
->slots_per_op
;
677 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
680 if (len
<= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
681 hw_desc
->byte_count
= len
;
684 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
685 iter
->byte_count
= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
686 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
688 } while (len
> IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
);
690 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
691 iter
->byte_count
= len
;
695 static inline void iop_desc_set_dest_addr(struct iop_adma_desc_slot
*desc
,
696 struct iop_adma_chan
*chan
,
699 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
701 switch (chan
->device
->id
) {
704 hw_desc
.dma
->dest_addr
= addr
;
707 hw_desc
.aau
->dest_addr
= addr
;
714 static inline void iop_desc_set_memcpy_src_addr(struct iop_adma_desc_slot
*desc
,
717 struct iop3xx_desc_dma
*hw_desc
= desc
->hw_desc
;
718 hw_desc
->src_addr
= addr
;
722 iop_desc_set_zero_sum_src_addr(struct iop_adma_desc_slot
*desc
, int src_idx
,
726 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
727 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
730 for (i
= 0; (slot_cnt
-= slots_per_op
) >= 0;
731 i
+= slots_per_op
, addr
+= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
732 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
733 iop3xx_aau_desc_set_src_addr(iter
, src_idx
, addr
);
737 static inline void iop_desc_set_xor_src_addr(struct iop_adma_desc_slot
*desc
,
738 int src_idx
, dma_addr_t addr
)
741 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
742 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
745 for (i
= 0; (slot_cnt
-= slots_per_op
) >= 0;
746 i
+= slots_per_op
, addr
+= IOP_ADMA_XOR_MAX_BYTE_COUNT
) {
747 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
748 iop3xx_aau_desc_set_src_addr(iter
, src_idx
, addr
);
752 static inline void iop_desc_set_next_desc(struct iop_adma_desc_slot
*desc
,
755 /* hw_desc->next_desc is the same location for all channels */
756 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
758 iop_paranoia(hw_desc
.dma
->next_desc
);
759 hw_desc
.dma
->next_desc
= next_desc_addr
;
762 static inline u32
iop_desc_get_next_desc(struct iop_adma_desc_slot
*desc
)
764 /* hw_desc->next_desc is the same location for all channels */
765 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
766 return hw_desc
.dma
->next_desc
;
769 static inline void iop_desc_clear_next_desc(struct iop_adma_desc_slot
*desc
)
771 /* hw_desc->next_desc is the same location for all channels */
772 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
773 hw_desc
.dma
->next_desc
= 0;
776 static inline void iop_desc_set_block_fill_val(struct iop_adma_desc_slot
*desc
,
779 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
780 hw_desc
->src
[0] = val
;
783 static inline enum sum_check_flags
784 iop_desc_get_zero_result(struct iop_adma_desc_slot
*desc
)
786 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
787 struct iop3xx_aau_desc_ctrl desc_ctrl
= hw_desc
->desc_ctrl_field
;
789 iop_paranoia(!(desc_ctrl
.tx_complete
&& desc_ctrl
.zero_result_en
));
790 return desc_ctrl
.zero_result_err
<< SUM_CHECK_P
;
793 static inline void iop_chan_append(struct iop_adma_chan
*chan
)
797 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
798 dma_chan_ctrl
|= 0x2;
799 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
802 static inline u32
iop_chan_get_status(struct iop_adma_chan
*chan
)
804 return __raw_readl(DMA_CSR(chan
));
807 static inline void iop_chan_disable(struct iop_adma_chan
*chan
)
809 u32 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
811 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
814 static inline void iop_chan_enable(struct iop_adma_chan
*chan
)
816 u32 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
819 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
822 static inline void iop_adma_device_clear_eot_status(struct iop_adma_chan
*chan
)
824 u32 status
= __raw_readl(DMA_CSR(chan
));
826 __raw_writel(status
, DMA_CSR(chan
));
829 static inline void iop_adma_device_clear_eoc_status(struct iop_adma_chan
*chan
)
831 u32 status
= __raw_readl(DMA_CSR(chan
));
833 __raw_writel(status
, DMA_CSR(chan
));
836 static inline void iop_adma_device_clear_err_status(struct iop_adma_chan
*chan
)
838 u32 status
= __raw_readl(DMA_CSR(chan
));
840 switch (chan
->device
->id
) {
843 status
&= (1 << 5) | (1 << 3) | (1 << 2) | (1 << 1);
852 __raw_writel(status
, DMA_CSR(chan
));
856 iop_is_err_int_parity(unsigned long status
, struct iop_adma_chan
*chan
)
862 iop_is_err_mcu_abort(unsigned long status
, struct iop_adma_chan
*chan
)
868 iop_is_err_int_tabort(unsigned long status
, struct iop_adma_chan
*chan
)
874 iop_is_err_int_mabort(unsigned long status
, struct iop_adma_chan
*chan
)
876 return test_bit(5, &status
);
880 iop_is_err_pci_tabort(unsigned long status
, struct iop_adma_chan
*chan
)
882 switch (chan
->device
->id
) {
885 return test_bit(2, &status
);
892 iop_is_err_pci_mabort(unsigned long status
, struct iop_adma_chan
*chan
)
894 switch (chan
->device
->id
) {
897 return test_bit(3, &status
);
904 iop_is_err_split_tx(unsigned long status
, struct iop_adma_chan
*chan
)
906 switch (chan
->device
->id
) {
909 return test_bit(1, &status
);