2 * Copyright © 2006, Intel Corporation.
4 * This program is free software; you can redistribute it and/or modify it
5 * under the terms and conditions of the GNU General Public License,
6 * version 2, as published by the Free Software Foundation.
8 * This program is distributed in the hope it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * You should have received a copy of the GNU General Public License along with
14 * this program; if not, write to the Free Software Foundation, Inc.,
15 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
20 #include <linux/types.h>
22 #include <mach/hardware.h>
23 #include <asm/hardware/iop_adma.h>
25 /* Memory copy units */
26 #define DMA_CCR(chan) (chan->mmr_base + 0x0)
27 #define DMA_CSR(chan) (chan->mmr_base + 0x4)
28 #define DMA_DAR(chan) (chan->mmr_base + 0xc)
29 #define DMA_NDAR(chan) (chan->mmr_base + 0x10)
30 #define DMA_PADR(chan) (chan->mmr_base + 0x14)
31 #define DMA_PUADR(chan) (chan->mmr_base + 0x18)
32 #define DMA_LADR(chan) (chan->mmr_base + 0x1c)
33 #define DMA_BCR(chan) (chan->mmr_base + 0x20)
34 #define DMA_DCR(chan) (chan->mmr_base + 0x24)
36 /* Application accelerator unit */
37 #define AAU_ACR(chan) (chan->mmr_base + 0x0)
38 #define AAU_ASR(chan) (chan->mmr_base + 0x4)
39 #define AAU_ADAR(chan) (chan->mmr_base + 0x8)
40 #define AAU_ANDAR(chan) (chan->mmr_base + 0xc)
41 #define AAU_SAR(src, chan) (chan->mmr_base + (0x10 + ((src) << 2)))
42 #define AAU_DAR(chan) (chan->mmr_base + 0x20)
43 #define AAU_ABCR(chan) (chan->mmr_base + 0x24)
44 #define AAU_ADCR(chan) (chan->mmr_base + 0x28)
45 #define AAU_SAR_EDCR(src_edc) (chan->mmr_base + (0x02c + ((src_edc-4) << 2)))
46 #define AAU_EDCR0_IDX 8
47 #define AAU_EDCR1_IDX 17
48 #define AAU_EDCR2_IDX 26
54 struct iop3xx_aau_desc_ctrl
{
55 unsigned int int_en
:1;
56 unsigned int blk1_cmd_ctrl
:3;
57 unsigned int blk2_cmd_ctrl
:3;
58 unsigned int blk3_cmd_ctrl
:3;
59 unsigned int blk4_cmd_ctrl
:3;
60 unsigned int blk5_cmd_ctrl
:3;
61 unsigned int blk6_cmd_ctrl
:3;
62 unsigned int blk7_cmd_ctrl
:3;
63 unsigned int blk8_cmd_ctrl
:3;
64 unsigned int blk_ctrl
:2;
65 unsigned int dual_xor_en
:1;
66 unsigned int tx_complete
:1;
67 unsigned int zero_result_err
:1;
68 unsigned int zero_result_en
:1;
69 unsigned int dest_write_en
:1;
72 struct iop3xx_aau_e_desc_ctrl
{
73 unsigned int reserved
:1;
74 unsigned int blk1_cmd_ctrl
:3;
75 unsigned int blk2_cmd_ctrl
:3;
76 unsigned int blk3_cmd_ctrl
:3;
77 unsigned int blk4_cmd_ctrl
:3;
78 unsigned int blk5_cmd_ctrl
:3;
79 unsigned int blk6_cmd_ctrl
:3;
80 unsigned int blk7_cmd_ctrl
:3;
81 unsigned int blk8_cmd_ctrl
:3;
82 unsigned int reserved2
:7;
85 struct iop3xx_dma_desc_ctrl
{
86 unsigned int pci_transaction
:4;
87 unsigned int int_en
:1;
88 unsigned int dac_cycle_en
:1;
89 unsigned int mem_to_mem_en
:1;
90 unsigned int crc_data_tx_en
:1;
91 unsigned int crc_gen_en
:1;
92 unsigned int crc_seed_dis
:1;
93 unsigned int reserved
:21;
94 unsigned int crc_tx_complete
:1;
97 struct iop3xx_desc_dma
{
105 u32 upper_pci_src_addr
;
106 u32 upper_pci_dest_addr
;
109 u32 local_pci_src_addr
;
110 u32 local_pci_dest_addr
;
116 struct iop3xx_dma_desc_ctrl desc_ctrl_field
;
121 struct iop3xx_desc_aau
{
128 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
133 struct iop3xx_aau_e_desc_ctrl e_desc_ctrl_field
;
137 struct iop3xx_aau_gfmr
{
138 unsigned int gfmr1
:8;
139 unsigned int gfmr2
:8;
140 unsigned int gfmr3
:8;
141 unsigned int gfmr4
:8;
144 struct iop3xx_desc_pq_xor
{
149 struct iop3xx_aau_gfmr data_mult1_field
;
155 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
160 struct iop3xx_aau_e_desc_ctrl e_desc_ctrl_field
;
162 struct iop3xx_aau_gfmr data_mult_field
;
167 struct iop3xx_desc_dual_xor
{
177 struct iop3xx_aau_desc_ctrl desc_ctrl_field
;
183 struct iop3xx_desc_aau
*aau
;
184 struct iop3xx_desc_dma
*dma
;
185 struct iop3xx_desc_pq_xor
*pq_xor
;
186 struct iop3xx_desc_dual_xor
*dual_xor
;
190 /* No support for p+q operations */
192 iop_chan_pq_slot_count(size_t len
, int src_cnt
, int *slots_per_op
)
199 iop_desc_init_pq(struct iop_adma_desc_slot
*desc
, int src_cnt
,
206 iop_desc_set_pq_addr(struct iop_adma_desc_slot
*desc
, dma_addr_t
*addr
)
212 iop_desc_set_pq_src_addr(struct iop_adma_desc_slot
*desc
, int src_idx
,
213 dma_addr_t addr
, unsigned char coef
)
219 iop_chan_pq_zero_sum_slot_count(size_t len
, int src_cnt
, int *slots_per_op
)
226 iop_desc_init_pq_zero_sum(struct iop_adma_desc_slot
*desc
, int src_cnt
,
233 iop_desc_set_pq_zero_sum_byte_count(struct iop_adma_desc_slot
*desc
, u32 len
)
238 #define iop_desc_set_pq_zero_sum_src_addr iop_desc_set_pq_src_addr
241 iop_desc_set_pq_zero_sum_addr(struct iop_adma_desc_slot
*desc
, int pq_idx
,
247 static inline int iop_adma_get_max_xor(void)
252 static inline int iop_adma_get_max_pq(void)
258 static inline u32
iop_chan_get_current_descriptor(struct iop_adma_chan
*chan
)
260 int id
= chan
->device
->id
;
265 return __raw_readl(DMA_DAR(chan
));
267 return __raw_readl(AAU_ADAR(chan
));
274 static inline void iop_chan_set_next_descriptor(struct iop_adma_chan
*chan
,
277 int id
= chan
->device
->id
;
282 __raw_writel(next_desc_addr
, DMA_NDAR(chan
));
285 __raw_writel(next_desc_addr
, AAU_ANDAR(chan
));
291 #define IOP_ADMA_STATUS_BUSY (1 << 10)
292 #define IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT (1024)
293 #define IOP_ADMA_XOR_MAX_BYTE_COUNT (16 * 1024 * 1024)
294 #define IOP_ADMA_MAX_BYTE_COUNT (16 * 1024 * 1024)
296 static inline int iop_chan_is_busy(struct iop_adma_chan
*chan
)
298 u32 status
= __raw_readl(DMA_CSR(chan
));
299 return (status
& IOP_ADMA_STATUS_BUSY
) ? 1 : 0;
302 static inline int iop_desc_is_aligned(struct iop_adma_desc_slot
*desc
,
305 /* num_slots will only ever be 1, 2, 4, or 8 */
306 return (desc
->idx
& (num_slots
- 1)) ? 0 : 1;
309 /* to do: support large (i.e. > hw max) buffer sizes */
310 static inline int iop_chan_memcpy_slot_count(size_t len
, int *slots_per_op
)
316 /* to do: support large (i.e. > hw max) buffer sizes */
317 static inline int iop_chan_memset_slot_count(size_t len
, int *slots_per_op
)
323 static inline int iop3xx_aau_xor_slot_count(size_t len
, int src_cnt
,
326 static const char slot_count_table
[] = {
327 1, 1, 1, 1, /* 01 - 04 */
328 2, 2, 2, 2, /* 05 - 08 */
329 4, 4, 4, 4, /* 09 - 12 */
330 4, 4, 4, 4, /* 13 - 16 */
331 8, 8, 8, 8, /* 17 - 20 */
332 8, 8, 8, 8, /* 21 - 24 */
333 8, 8, 8, 8, /* 25 - 28 */
334 8, 8, 8, 8, /* 29 - 32 */
336 *slots_per_op
= slot_count_table
[src_cnt
- 1];
337 return *slots_per_op
;
341 iop_chan_interrupt_slot_count(int *slots_per_op
, struct iop_adma_chan
*chan
)
343 switch (chan
->device
->id
) {
346 return iop_chan_memcpy_slot_count(0, slots_per_op
);
348 return iop3xx_aau_xor_slot_count(0, 2, slots_per_op
);
355 static inline int iop_chan_xor_slot_count(size_t len
, int src_cnt
,
358 int slot_cnt
= iop3xx_aau_xor_slot_count(len
, src_cnt
, slots_per_op
);
360 if (len
<= IOP_ADMA_XOR_MAX_BYTE_COUNT
)
363 len
-= IOP_ADMA_XOR_MAX_BYTE_COUNT
;
364 while (len
> IOP_ADMA_XOR_MAX_BYTE_COUNT
) {
365 len
-= IOP_ADMA_XOR_MAX_BYTE_COUNT
;
366 slot_cnt
+= *slots_per_op
;
369 slot_cnt
+= *slots_per_op
;
374 /* zero sum on iop3xx is limited to 1k at a time so it requires multiple
377 static inline int iop_chan_zero_sum_slot_count(size_t len
, int src_cnt
,
380 int slot_cnt
= iop3xx_aau_xor_slot_count(len
, src_cnt
, slots_per_op
);
382 if (len
<= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
)
385 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
386 while (len
> IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
387 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
388 slot_cnt
+= *slots_per_op
;
391 slot_cnt
+= *slots_per_op
;
396 static inline int iop_desc_is_pq(struct iop_adma_desc_slot
*desc
)
401 static inline u32
iop_desc_get_dest_addr(struct iop_adma_desc_slot
*desc
,
402 struct iop_adma_chan
*chan
)
404 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
406 switch (chan
->device
->id
) {
409 return hw_desc
.dma
->dest_addr
;
411 return hw_desc
.aau
->dest_addr
;
419 static inline u32
iop_desc_get_qdest_addr(struct iop_adma_desc_slot
*desc
,
420 struct iop_adma_chan
*chan
)
426 static inline u32
iop_desc_get_byte_count(struct iop_adma_desc_slot
*desc
,
427 struct iop_adma_chan
*chan
)
429 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
431 switch (chan
->device
->id
) {
434 return hw_desc
.dma
->byte_count
;
436 return hw_desc
.aau
->byte_count
;
443 /* translate the src_idx to a descriptor word index */
444 static inline int __desc_idx(int src_idx
)
446 static const int desc_idx_table
[] = { 0, 0, 0, 0,
456 return desc_idx_table
[src_idx
];
459 static inline u32
iop_desc_get_src_addr(struct iop_adma_desc_slot
*desc
,
460 struct iop_adma_chan
*chan
,
463 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
465 switch (chan
->device
->id
) {
468 return hw_desc
.dma
->src_addr
;
476 return hw_desc
.aau
->src
[src_idx
];
478 return hw_desc
.aau
->src_edc
[__desc_idx(src_idx
)].src_addr
;
481 static inline void iop3xx_aau_desc_set_src_addr(struct iop3xx_desc_aau
*hw_desc
,
482 int src_idx
, dma_addr_t addr
)
485 hw_desc
->src
[src_idx
] = addr
;
487 hw_desc
->src_edc
[__desc_idx(src_idx
)].src_addr
= addr
;
491 iop_desc_init_memcpy(struct iop_adma_desc_slot
*desc
, unsigned long flags
)
493 struct iop3xx_desc_dma
*hw_desc
= desc
->hw_desc
;
496 struct iop3xx_dma_desc_ctrl field
;
499 u_desc_ctrl
.value
= 0;
500 u_desc_ctrl
.field
.mem_to_mem_en
= 1;
501 u_desc_ctrl
.field
.pci_transaction
= 0xe; /* memory read block */
502 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
503 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
504 hw_desc
->upper_pci_src_addr
= 0;
505 hw_desc
->crc_addr
= 0;
509 iop_desc_init_memset(struct iop_adma_desc_slot
*desc
, unsigned long flags
)
511 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
514 struct iop3xx_aau_desc_ctrl field
;
517 u_desc_ctrl
.value
= 0;
518 u_desc_ctrl
.field
.blk1_cmd_ctrl
= 0x2; /* memory block fill */
519 u_desc_ctrl
.field
.dest_write_en
= 1;
520 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
521 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
525 iop3xx_desc_init_xor(struct iop3xx_desc_aau
*hw_desc
, int src_cnt
,
532 struct iop3xx_aau_desc_ctrl field
;
535 u_desc_ctrl
.value
= 0;
538 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
541 for (i
= 24; i
< src_cnt
; i
++) {
542 edcr
|= (1 << shift
);
545 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= edcr
;
549 if (!u_desc_ctrl
.field
.blk_ctrl
) {
550 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
551 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
555 for (i
= 16; i
< src_cnt
; i
++) {
556 edcr
|= (1 << shift
);
559 hw_desc
->src_edc
[AAU_EDCR1_IDX
].e_desc_ctrl
= edcr
;
563 if (!u_desc_ctrl
.field
.blk_ctrl
)
564 u_desc_ctrl
.field
.blk_ctrl
= 0x2; /* use EDCR0 */
567 for (i
= 8; i
< src_cnt
; i
++) {
568 edcr
|= (1 << shift
);
571 hw_desc
->src_edc
[AAU_EDCR0_IDX
].e_desc_ctrl
= edcr
;
576 for (i
= 0; i
< src_cnt
; i
++) {
577 u_desc_ctrl
.value
|= (1 << shift
);
581 if (!u_desc_ctrl
.field
.blk_ctrl
&& src_cnt
> 4)
582 u_desc_ctrl
.field
.blk_ctrl
= 0x1; /* use mini-desc */
585 u_desc_ctrl
.field
.dest_write_en
= 1;
586 u_desc_ctrl
.field
.blk1_cmd_ctrl
= 0x7; /* direct fill */
587 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
588 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
590 return u_desc_ctrl
.value
;
594 iop_desc_init_xor(struct iop_adma_desc_slot
*desc
, int src_cnt
,
597 iop3xx_desc_init_xor(desc
->hw_desc
, src_cnt
, flags
);
600 /* return the number of operations */
602 iop_desc_init_zero_sum(struct iop_adma_desc_slot
*desc
, int src_cnt
,
605 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
606 struct iop3xx_desc_aau
*hw_desc
, *prev_hw_desc
, *iter
;
609 struct iop3xx_aau_desc_ctrl field
;
613 hw_desc
= desc
->hw_desc
;
615 for (i
= 0, j
= 0; (slot_cnt
-= slots_per_op
) >= 0;
616 i
+= slots_per_op
, j
++) {
617 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
618 u_desc_ctrl
.value
= iop3xx_desc_init_xor(iter
, src_cnt
, flags
);
619 u_desc_ctrl
.field
.dest_write_en
= 0;
620 u_desc_ctrl
.field
.zero_result_en
= 1;
621 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
622 iter
->desc_ctrl
= u_desc_ctrl
.value
;
624 /* for the subsequent descriptors preserve the store queue
625 * and chain them together
629 iop_hw_desc_slot_idx(hw_desc
, i
- slots_per_op
);
630 prev_hw_desc
->next_desc
=
631 (u32
) (desc
->async_tx
.phys
+ (i
<< 5));
639 iop_desc_init_null_xor(struct iop_adma_desc_slot
*desc
, int src_cnt
,
642 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
645 struct iop3xx_aau_desc_ctrl field
;
648 u_desc_ctrl
.value
= 0;
651 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
652 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
655 if (!u_desc_ctrl
.field
.blk_ctrl
) {
656 hw_desc
->src_edc
[AAU_EDCR2_IDX
].e_desc_ctrl
= 0;
657 u_desc_ctrl
.field
.blk_ctrl
= 0x3; /* use EDCR[2:0] */
659 hw_desc
->src_edc
[AAU_EDCR1_IDX
].e_desc_ctrl
= 0;
662 if (!u_desc_ctrl
.field
.blk_ctrl
)
663 u_desc_ctrl
.field
.blk_ctrl
= 0x2; /* use EDCR0 */
664 hw_desc
->src_edc
[AAU_EDCR0_IDX
].e_desc_ctrl
= 0;
667 if (!u_desc_ctrl
.field
.blk_ctrl
&& src_cnt
> 4)
668 u_desc_ctrl
.field
.blk_ctrl
= 0x1; /* use mini-desc */
671 u_desc_ctrl
.field
.dest_write_en
= 0;
672 u_desc_ctrl
.field
.int_en
= flags
& DMA_PREP_INTERRUPT
;
673 hw_desc
->desc_ctrl
= u_desc_ctrl
.value
;
676 static inline void iop_desc_set_byte_count(struct iop_adma_desc_slot
*desc
,
677 struct iop_adma_chan
*chan
,
680 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
682 switch (chan
->device
->id
) {
685 hw_desc
.dma
->byte_count
= byte_count
;
688 hw_desc
.aau
->byte_count
= byte_count
;
696 iop_desc_init_interrupt(struct iop_adma_desc_slot
*desc
,
697 struct iop_adma_chan
*chan
)
699 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
701 switch (chan
->device
->id
) {
704 iop_desc_init_memcpy(desc
, 1);
705 hw_desc
.dma
->byte_count
= 0;
706 hw_desc
.dma
->dest_addr
= 0;
707 hw_desc
.dma
->src_addr
= 0;
710 iop_desc_init_null_xor(desc
, 2, 1);
711 hw_desc
.aau
->byte_count
= 0;
712 hw_desc
.aau
->dest_addr
= 0;
713 hw_desc
.aau
->src
[0] = 0;
714 hw_desc
.aau
->src
[1] = 0;
722 iop_desc_set_zero_sum_byte_count(struct iop_adma_desc_slot
*desc
, u32 len
)
724 int slots_per_op
= desc
->slots_per_op
;
725 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
728 if (len
<= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
729 hw_desc
->byte_count
= len
;
732 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
733 iter
->byte_count
= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
734 len
-= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
;
736 } while (len
> IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
);
738 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
739 iter
->byte_count
= len
;
743 static inline void iop_desc_set_dest_addr(struct iop_adma_desc_slot
*desc
,
744 struct iop_adma_chan
*chan
,
747 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
749 switch (chan
->device
->id
) {
752 hw_desc
.dma
->dest_addr
= addr
;
755 hw_desc
.aau
->dest_addr
= addr
;
762 static inline void iop_desc_set_memcpy_src_addr(struct iop_adma_desc_slot
*desc
,
765 struct iop3xx_desc_dma
*hw_desc
= desc
->hw_desc
;
766 hw_desc
->src_addr
= addr
;
770 iop_desc_set_zero_sum_src_addr(struct iop_adma_desc_slot
*desc
, int src_idx
,
774 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
775 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
778 for (i
= 0; (slot_cnt
-= slots_per_op
) >= 0;
779 i
+= slots_per_op
, addr
+= IOP_ADMA_ZERO_SUM_MAX_BYTE_COUNT
) {
780 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
781 iop3xx_aau_desc_set_src_addr(iter
, src_idx
, addr
);
785 static inline void iop_desc_set_xor_src_addr(struct iop_adma_desc_slot
*desc
,
786 int src_idx
, dma_addr_t addr
)
789 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
, *iter
;
790 int slot_cnt
= desc
->slot_cnt
, slots_per_op
= desc
->slots_per_op
;
793 for (i
= 0; (slot_cnt
-= slots_per_op
) >= 0;
794 i
+= slots_per_op
, addr
+= IOP_ADMA_XOR_MAX_BYTE_COUNT
) {
795 iter
= iop_hw_desc_slot_idx(hw_desc
, i
);
796 iop3xx_aau_desc_set_src_addr(iter
, src_idx
, addr
);
800 static inline void iop_desc_set_next_desc(struct iop_adma_desc_slot
*desc
,
803 /* hw_desc->next_desc is the same location for all channels */
804 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
806 iop_paranoia(hw_desc
.dma
->next_desc
);
807 hw_desc
.dma
->next_desc
= next_desc_addr
;
810 static inline u32
iop_desc_get_next_desc(struct iop_adma_desc_slot
*desc
)
812 /* hw_desc->next_desc is the same location for all channels */
813 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
814 return hw_desc
.dma
->next_desc
;
817 static inline void iop_desc_clear_next_desc(struct iop_adma_desc_slot
*desc
)
819 /* hw_desc->next_desc is the same location for all channels */
820 union iop3xx_desc hw_desc
= { .ptr
= desc
->hw_desc
, };
821 hw_desc
.dma
->next_desc
= 0;
824 static inline void iop_desc_set_block_fill_val(struct iop_adma_desc_slot
*desc
,
827 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
828 hw_desc
->src
[0] = val
;
831 static inline enum sum_check_flags
832 iop_desc_get_zero_result(struct iop_adma_desc_slot
*desc
)
834 struct iop3xx_desc_aau
*hw_desc
= desc
->hw_desc
;
835 struct iop3xx_aau_desc_ctrl desc_ctrl
= hw_desc
->desc_ctrl_field
;
837 iop_paranoia(!(desc_ctrl
.tx_complete
&& desc_ctrl
.zero_result_en
));
838 return desc_ctrl
.zero_result_err
<< SUM_CHECK_P
;
841 static inline void iop_chan_append(struct iop_adma_chan
*chan
)
845 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
846 dma_chan_ctrl
|= 0x2;
847 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
850 static inline u32
iop_chan_get_status(struct iop_adma_chan
*chan
)
852 return __raw_readl(DMA_CSR(chan
));
855 static inline void iop_chan_disable(struct iop_adma_chan
*chan
)
857 u32 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
859 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
862 static inline void iop_chan_enable(struct iop_adma_chan
*chan
)
864 u32 dma_chan_ctrl
= __raw_readl(DMA_CCR(chan
));
867 __raw_writel(dma_chan_ctrl
, DMA_CCR(chan
));
870 static inline void iop_adma_device_clear_eot_status(struct iop_adma_chan
*chan
)
872 u32 status
= __raw_readl(DMA_CSR(chan
));
874 __raw_writel(status
, DMA_CSR(chan
));
877 static inline void iop_adma_device_clear_eoc_status(struct iop_adma_chan
*chan
)
879 u32 status
= __raw_readl(DMA_CSR(chan
));
881 __raw_writel(status
, DMA_CSR(chan
));
884 static inline void iop_adma_device_clear_err_status(struct iop_adma_chan
*chan
)
886 u32 status
= __raw_readl(DMA_CSR(chan
));
888 switch (chan
->device
->id
) {
891 status
&= (1 << 5) | (1 << 3) | (1 << 2) | (1 << 1);
900 __raw_writel(status
, DMA_CSR(chan
));
904 iop_is_err_int_parity(unsigned long status
, struct iop_adma_chan
*chan
)
910 iop_is_err_mcu_abort(unsigned long status
, struct iop_adma_chan
*chan
)
916 iop_is_err_int_tabort(unsigned long status
, struct iop_adma_chan
*chan
)
922 iop_is_err_int_mabort(unsigned long status
, struct iop_adma_chan
*chan
)
924 return test_bit(5, &status
);
928 iop_is_err_pci_tabort(unsigned long status
, struct iop_adma_chan
*chan
)
930 switch (chan
->device
->id
) {
933 return test_bit(2, &status
);
940 iop_is_err_pci_mabort(unsigned long status
, struct iop_adma_chan
*chan
)
942 switch (chan
->device
->id
) {
945 return test_bit(3, &status
);
952 iop_is_err_split_tx(unsigned long status
, struct iop_adma_chan
*chan
)
954 switch (chan
->device
->id
) {
957 return test_bit(1, &status
);