2 * Omnitek Scatter-Gather DMA Controller
4 * Copyright 2012-2015 Cisco Systems, Inc. and/or its affiliates.
7 * This program is free software; you may redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; version 2 of the License.
11 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
12 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
13 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
14 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
15 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
16 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
17 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 #include <linux/string.h>
23 #include <linux/pci_regs.h>
24 #include <linux/spinlock.h>
26 #include "cobalt-driver.h"
27 #include "cobalt-omnitek.h"
30 #define END_OF_CHAIN (1 << 1)
31 #define INTERRUPT_ENABLE (1 << 2)
32 #define WRITE_TO_PCI (1 << 3)
33 #define READ_FROM_PCI (0 << 3)
34 #define DESCRIPTOR_FLAG_MSK (END_OF_CHAIN | INTERRUPT_ENABLE | WRITE_TO_PCI)
35 #define NEXT_ADRS_MSK 0xffffffe0
37 /* control/status register */
38 #define ENABLE (1 << 0)
39 #define START (1 << 1)
40 #define ABORT (1 << 2)
42 #define SG_INTERRUPT (1 << 5)
43 #define EVENT_INTERRUPT (1 << 6)
44 #define SCATTER_GATHER_MODE (1 << 8)
45 #define DISABLE_VIDEO_RESYNC (1 << 9)
46 #define EVENT_INTERRUPT_ENABLE (1 << 10)
47 #define DIRECTIONAL_MSK (3 << 16)
48 #define INPUT_ONLY (0 << 16)
49 #define OUTPUT_ONLY (1 << 16)
50 #define BIDIRECTIONAL (2 << 16)
51 #define DMA_TYPE_MEMORY (0 << 18)
52 #define DMA_TYPE_FIFO (1 << 18)
54 #define BASE (cobalt->bar0)
55 #define CAPABILITY_HEADER (BASE)
56 #define CAPABILITY_REGISTER (BASE + 0x04)
57 #define PCI_64BIT (1 << 8)
58 #define LOCAL_64BIT (1 << 9)
59 #define INTERRUPT_STATUS (BASE + 0x08)
60 #define PCI(c) (BASE + 0x40 + ((c) * 0x40))
61 #define SIZE(c) (BASE + 0x58 + ((c) * 0x40))
62 #define DESCRIPTOR(c) (BASE + 0x50 + ((c) * 0x40))
63 #define CS_REG(c) (BASE + 0x60 + ((c) * 0x40))
64 #define BYTES_TRANSFERRED(c) (BASE + 0x64 + ((c) * 0x40))
67 static char *get_dma_direction(u32 status
)
69 switch (status
& DIRECTIONAL_MSK
) {
70 case INPUT_ONLY
: return "Input";
71 case OUTPUT_ONLY
: return "Output";
72 case BIDIRECTIONAL
: return "Bidirectional";
77 static void show_dma_capability(struct cobalt
*cobalt
)
79 u32 header
= ioread32(CAPABILITY_HEADER
);
80 u32 capa
= ioread32(CAPABILITY_REGISTER
);
83 cobalt_info("Omnitek DMA capability: ID 0x%02x Version 0x%02x Next 0x%x Size 0x%x\n",
84 header
& 0xff, (header
>> 8) & 0xff,
85 (header
>> 16) & 0xffff, (capa
>> 24) & 0xff);
87 switch ((capa
>> 8) & 0x3) {
89 cobalt_info("Omnitek DMA: 32 bits PCIe and Local\n");
92 cobalt_info("Omnitek DMA: 64 bits PCIe, 32 bits Local\n");
95 cobalt_info("Omnitek DMA: 64 bits PCIe and Local\n");
99 for (i
= 0; i
< (capa
& 0xf); i
++) {
100 u32 status
= ioread32(CS_REG(i
));
102 cobalt_info("Omnitek DMA channel #%d: %s %s\n", i
,
103 status
& DMA_TYPE_FIFO
? "FIFO" : "MEMORY",
104 get_dma_direction(status
));
108 void omni_sg_dma_start(struct cobalt_stream
*s
, struct sg_dma_desc_info
*desc
)
110 struct cobalt
*cobalt
= s
->cobalt
;
112 iowrite32((u32
)((u64
)desc
->bus
>> 32), DESCRIPTOR(s
->dma_channel
) + 4);
113 iowrite32((u32
)desc
->bus
& NEXT_ADRS_MSK
, DESCRIPTOR(s
->dma_channel
));
114 iowrite32(ENABLE
| SCATTER_GATHER_MODE
| START
, CS_REG(s
->dma_channel
));
117 bool is_dma_done(struct cobalt_stream
*s
)
119 struct cobalt
*cobalt
= s
->cobalt
;
121 if (ioread32(CS_REG(s
->dma_channel
)) & DONE
)
127 void omni_sg_dma_abort_channel(struct cobalt_stream
*s
)
129 struct cobalt
*cobalt
= s
->cobalt
;
131 if (is_dma_done(s
) == false)
132 iowrite32(ABORT
, CS_REG(s
->dma_channel
));
135 int omni_sg_dma_init(struct cobalt
*cobalt
)
137 u32 capa
= ioread32(CAPABILITY_REGISTER
);
140 cobalt
->first_fifo_channel
= 0;
141 cobalt
->dma_channels
= capa
& 0xf;
142 if (capa
& PCI_64BIT
)
143 cobalt
->pci_32_bit
= false;
145 cobalt
->pci_32_bit
= true;
147 for (i
= 0; i
< cobalt
->dma_channels
; i
++) {
148 u32 status
= ioread32(CS_REG(i
));
149 u32 ctrl
= ioread32(CS_REG(i
));
152 iowrite32(ABORT
, CS_REG(i
));
154 if (!(status
& DMA_TYPE_FIFO
))
155 cobalt
->first_fifo_channel
++;
157 show_dma_capability(cobalt
);
161 int descriptor_list_create(struct cobalt
*cobalt
,
162 struct scatterlist
*scatter_list
, bool to_pci
, unsigned sglen
,
163 unsigned size
, unsigned width
, unsigned stride
,
164 struct sg_dma_desc_info
*desc
)
166 struct sg_dma_descriptor
*d
= (struct sg_dma_descriptor
*)desc
->virt
;
167 dma_addr_t next
= desc
->bus
;
169 unsigned copy_bytes
= width
;
173 /* Must be 4-byte aligned */
174 WARN_ON(sg_dma_address(scatter_list
) & 3);
178 WARN_ON(stride
< width
);
180 copy_bytes
= stride
= size
;
183 dma_addr_t addr
= sg_dma_address(scatter_list
) + offset
;
188 if (cobalt
->pci_32_bit
) {
189 WARN_ON((u64
)addr
>> 32);
195 d
->pci_l
= addr
& 0xffffffff;
196 /* If dma_addr_t is 32 bits, then addr >> 32 is actually the
197 equivalent of addr >> 0 in gcc. So must cast to u64. */
198 d
->pci_h
= (u64
)addr
>> 32;
200 /* Sync to start of streaming frame */
205 bytes
= min(sg_dma_len(scatter_list
) - offset
,
206 copy_bytes
- copied
);
210 d
->local
= 0x11111111;
213 /* Make sure there are always at least two
215 d
->bytes
= (bytes
/ 2) & ~3;
221 next
+= sizeof(struct sg_dma_descriptor
);
222 d
->next_h
= (u32
)((u64
)next
>> 32);
223 d
->next_l
= (u32
)next
|
224 (to_pci
? WRITE_TO_PCI
: 0);
228 d
->pci_l
= addr
& 0xffffffff;
229 /* If dma_addr_t is 32 bits, then addr >> 32
230 * is actually the equivalent of addr >> 0 in
231 * gcc. So must cast to u64. */
232 d
->pci_h
= (u64
)addr
>> 32;
234 /* Sync to start of streaming frame */
246 if (copied
== copy_bytes
) {
247 while (copied
< stride
) {
248 bytes
= min(sg_dma_len(scatter_list
) - offset
,
253 if (sg_dma_len(scatter_list
) == offset
) {
255 scatter_list
= sg_next(scatter_list
);
261 scatter_list
= sg_next(scatter_list
);
264 /* Next descriptor + control bits */
265 next
+= sizeof(struct sg_dma_descriptor
);
267 /* Loopback to the first descriptor */
268 d
->next_h
= (u32
)((u64
)desc
->bus
>> 32);
269 d
->next_l
= (u32
)desc
->bus
|
270 (to_pci
? WRITE_TO_PCI
: 0) | INTERRUPT_ENABLE
;
272 d
->local
= 0x22222222;
273 desc
->last_desc_virt
= d
;
275 d
->next_h
= (u32
)((u64
)next
>> 32);
276 d
->next_l
= (u32
)next
| (to_pci
? WRITE_TO_PCI
: 0);
283 void descriptor_list_chain(struct sg_dma_desc_info
*this,
284 struct sg_dma_desc_info
*next
)
286 struct sg_dma_descriptor
*d
= this->last_desc_virt
;
287 u32 direction
= d
->next_l
& WRITE_TO_PCI
;
291 d
->next_l
= direction
| INTERRUPT_ENABLE
| END_OF_CHAIN
;
293 d
->next_h
= (u32
)((u64
)next
->bus
>> 32);
294 d
->next_l
= (u32
)next
->bus
| direction
| INTERRUPT_ENABLE
;
298 void *descriptor_list_allocate(struct sg_dma_desc_info
*desc
, size_t bytes
)
301 desc
->virt
= dma_alloc_coherent(desc
->dev
, bytes
,
302 &desc
->bus
, GFP_KERNEL
);
306 void descriptor_list_free(struct sg_dma_desc_info
*desc
)
309 dma_free_coherent(desc
->dev
, desc
->size
,
310 desc
->virt
, desc
->bus
);
314 void descriptor_list_interrupt_enable(struct sg_dma_desc_info
*desc
)
316 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
318 d
->next_l
|= INTERRUPT_ENABLE
;
321 void descriptor_list_interrupt_disable(struct sg_dma_desc_info
*desc
)
323 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
325 d
->next_l
&= ~INTERRUPT_ENABLE
;
328 void descriptor_list_loopback(struct sg_dma_desc_info
*desc
)
330 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
332 d
->next_h
= (u32
)((u64
)desc
->bus
>> 32);
333 d
->next_l
= (u32
)desc
->bus
| (d
->next_l
& DESCRIPTOR_FLAG_MSK
);
336 void descriptor_list_end_of_chain(struct sg_dma_desc_info
*desc
)
338 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
340 d
->next_l
|= END_OF_CHAIN
;