1 // SPDX-License-Identifier: GPL-2.0-only
3 * Omnitek Scatter-Gather DMA Controller
5 * Copyright 2012-2015 Cisco Systems, Inc. and/or its affiliates.
9 #include <linux/string.h>
11 #include <linux/pci_regs.h>
12 #include <linux/spinlock.h>
14 #include "cobalt-driver.h"
15 #include "cobalt-omnitek.h"
18 #define END_OF_CHAIN (1 << 1)
19 #define INTERRUPT_ENABLE (1 << 2)
20 #define WRITE_TO_PCI (1 << 3)
21 #define READ_FROM_PCI (0 << 3)
22 #define DESCRIPTOR_FLAG_MSK (END_OF_CHAIN | INTERRUPT_ENABLE | WRITE_TO_PCI)
23 #define NEXT_ADRS_MSK 0xffffffe0
25 /* control/status register */
26 #define ENABLE (1 << 0)
27 #define START (1 << 1)
28 #define ABORT (1 << 2)
30 #define SG_INTERRUPT (1 << 5)
31 #define EVENT_INTERRUPT (1 << 6)
32 #define SCATTER_GATHER_MODE (1 << 8)
33 #define DISABLE_VIDEO_RESYNC (1 << 9)
34 #define EVENT_INTERRUPT_ENABLE (1 << 10)
35 #define DIRECTIONAL_MSK (3 << 16)
36 #define INPUT_ONLY (0 << 16)
37 #define OUTPUT_ONLY (1 << 16)
38 #define BIDIRECTIONAL (2 << 16)
39 #define DMA_TYPE_MEMORY (0 << 18)
40 #define DMA_TYPE_FIFO (1 << 18)
42 #define BASE (cobalt->bar0)
43 #define CAPABILITY_HEADER (BASE)
44 #define CAPABILITY_REGISTER (BASE + 0x04)
45 #define PCI_64BIT (1 << 8)
46 #define LOCAL_64BIT (1 << 9)
47 #define INTERRUPT_STATUS (BASE + 0x08)
48 #define PCI(c) (BASE + 0x40 + ((c) * 0x40))
49 #define SIZE(c) (BASE + 0x58 + ((c) * 0x40))
50 #define DESCRIPTOR(c) (BASE + 0x50 + ((c) * 0x40))
51 #define CS_REG(c) (BASE + 0x60 + ((c) * 0x40))
52 #define BYTES_TRANSFERRED(c) (BASE + 0x64 + ((c) * 0x40))
55 static char *get_dma_direction(u32 status
)
57 switch (status
& DIRECTIONAL_MSK
) {
58 case INPUT_ONLY
: return "Input";
59 case OUTPUT_ONLY
: return "Output";
60 case BIDIRECTIONAL
: return "Bidirectional";
65 static void show_dma_capability(struct cobalt
*cobalt
)
67 u32 header
= ioread32(CAPABILITY_HEADER
);
68 u32 capa
= ioread32(CAPABILITY_REGISTER
);
71 cobalt_info("Omnitek DMA capability: ID 0x%02x Version 0x%02x Next 0x%x Size 0x%x\n",
72 header
& 0xff, (header
>> 8) & 0xff,
73 (header
>> 16) & 0xffff, (capa
>> 24) & 0xff);
75 switch ((capa
>> 8) & 0x3) {
77 cobalt_info("Omnitek DMA: 32 bits PCIe and Local\n");
80 cobalt_info("Omnitek DMA: 64 bits PCIe, 32 bits Local\n");
83 cobalt_info("Omnitek DMA: 64 bits PCIe and Local\n");
87 for (i
= 0; i
< (capa
& 0xf); i
++) {
88 u32 status
= ioread32(CS_REG(i
));
90 cobalt_info("Omnitek DMA channel #%d: %s %s\n", i
,
91 status
& DMA_TYPE_FIFO
? "FIFO" : "MEMORY",
92 get_dma_direction(status
));
96 void omni_sg_dma_start(struct cobalt_stream
*s
, struct sg_dma_desc_info
*desc
)
98 struct cobalt
*cobalt
= s
->cobalt
;
100 iowrite32((u32
)((u64
)desc
->bus
>> 32), DESCRIPTOR(s
->dma_channel
) + 4);
101 iowrite32((u32
)desc
->bus
& NEXT_ADRS_MSK
, DESCRIPTOR(s
->dma_channel
));
102 iowrite32(ENABLE
| SCATTER_GATHER_MODE
| START
, CS_REG(s
->dma_channel
));
105 bool is_dma_done(struct cobalt_stream
*s
)
107 struct cobalt
*cobalt
= s
->cobalt
;
109 if (ioread32(CS_REG(s
->dma_channel
)) & DONE
)
115 void omni_sg_dma_abort_channel(struct cobalt_stream
*s
)
117 struct cobalt
*cobalt
= s
->cobalt
;
120 iowrite32(ABORT
, CS_REG(s
->dma_channel
));
123 int omni_sg_dma_init(struct cobalt
*cobalt
)
125 u32 capa
= ioread32(CAPABILITY_REGISTER
);
128 cobalt
->first_fifo_channel
= 0;
129 cobalt
->dma_channels
= capa
& 0xf;
130 if (capa
& PCI_64BIT
)
131 cobalt
->pci_32_bit
= false;
133 cobalt
->pci_32_bit
= true;
135 for (i
= 0; i
< cobalt
->dma_channels
; i
++) {
136 u32 status
= ioread32(CS_REG(i
));
137 u32 ctrl
= ioread32(CS_REG(i
));
140 iowrite32(ABORT
, CS_REG(i
));
142 if (!(status
& DMA_TYPE_FIFO
))
143 cobalt
->first_fifo_channel
++;
145 show_dma_capability(cobalt
);
149 int descriptor_list_create(struct cobalt
*cobalt
,
150 struct scatterlist
*scatter_list
, bool to_pci
, unsigned sglen
,
151 unsigned size
, unsigned width
, unsigned stride
,
152 struct sg_dma_desc_info
*desc
)
154 struct sg_dma_descriptor
*d
= (struct sg_dma_descriptor
*)desc
->virt
;
155 dma_addr_t next
= desc
->bus
;
157 unsigned copy_bytes
= width
;
161 /* Must be 4-byte aligned */
162 WARN_ON(sg_dma_address(scatter_list
) & 3);
166 WARN_ON(stride
< width
);
168 copy_bytes
= stride
= size
;
171 dma_addr_t addr
= sg_dma_address(scatter_list
) + offset
;
176 if (cobalt
->pci_32_bit
) {
177 WARN_ON((u64
)addr
>> 32);
183 d
->pci_l
= addr
& 0xffffffff;
184 /* If dma_addr_t is 32 bits, then addr >> 32 is actually the
185 equivalent of addr >> 0 in gcc. So must cast to u64. */
186 d
->pci_h
= (u64
)addr
>> 32;
188 /* Sync to start of streaming frame */
193 bytes
= min(sg_dma_len(scatter_list
) - offset
,
194 copy_bytes
- copied
);
198 d
->local
= 0x11111111;
201 /* Make sure there are always at least two
203 d
->bytes
= (bytes
/ 2) & ~3;
209 next
+= sizeof(struct sg_dma_descriptor
);
210 d
->next_h
= (u32
)((u64
)next
>> 32);
211 d
->next_l
= (u32
)next
|
212 (to_pci
? WRITE_TO_PCI
: 0);
216 d
->pci_l
= addr
& 0xffffffff;
217 /* If dma_addr_t is 32 bits, then addr >> 32
218 * is actually the equivalent of addr >> 0 in
219 * gcc. So must cast to u64. */
220 d
->pci_h
= (u64
)addr
>> 32;
222 /* Sync to start of streaming frame */
234 if (copied
== copy_bytes
) {
235 while (copied
< stride
) {
236 bytes
= min(sg_dma_len(scatter_list
) - offset
,
241 if (sg_dma_len(scatter_list
) == offset
) {
243 scatter_list
= sg_next(scatter_list
);
249 scatter_list
= sg_next(scatter_list
);
252 /* Next descriptor + control bits */
253 next
+= sizeof(struct sg_dma_descriptor
);
255 /* Loopback to the first descriptor */
256 d
->next_h
= (u32
)((u64
)desc
->bus
>> 32);
257 d
->next_l
= (u32
)desc
->bus
|
258 (to_pci
? WRITE_TO_PCI
: 0) | INTERRUPT_ENABLE
;
260 d
->local
= 0x22222222;
261 desc
->last_desc_virt
= d
;
263 d
->next_h
= (u32
)((u64
)next
>> 32);
264 d
->next_l
= (u32
)next
| (to_pci
? WRITE_TO_PCI
: 0);
271 void descriptor_list_chain(struct sg_dma_desc_info
*this,
272 struct sg_dma_desc_info
*next
)
274 struct sg_dma_descriptor
*d
= this->last_desc_virt
;
275 u32 direction
= d
->next_l
& WRITE_TO_PCI
;
279 d
->next_l
= direction
| INTERRUPT_ENABLE
| END_OF_CHAIN
;
281 d
->next_h
= (u32
)((u64
)next
->bus
>> 32);
282 d
->next_l
= (u32
)next
->bus
| direction
| INTERRUPT_ENABLE
;
286 void *descriptor_list_allocate(struct sg_dma_desc_info
*desc
, size_t bytes
)
289 desc
->virt
= dma_alloc_coherent(desc
->dev
, bytes
,
290 &desc
->bus
, GFP_KERNEL
);
294 void descriptor_list_free(struct sg_dma_desc_info
*desc
)
297 dma_free_coherent(desc
->dev
, desc
->size
,
298 desc
->virt
, desc
->bus
);
302 void descriptor_list_interrupt_enable(struct sg_dma_desc_info
*desc
)
304 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
306 d
->next_l
|= INTERRUPT_ENABLE
;
309 void descriptor_list_interrupt_disable(struct sg_dma_desc_info
*desc
)
311 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
313 d
->next_l
&= ~INTERRUPT_ENABLE
;
316 void descriptor_list_loopback(struct sg_dma_desc_info
*desc
)
318 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
320 d
->next_h
= (u32
)((u64
)desc
->bus
>> 32);
321 d
->next_l
= (u32
)desc
->bus
| (d
->next_l
& DESCRIPTOR_FLAG_MSK
);
324 void descriptor_list_end_of_chain(struct sg_dma_desc_info
*desc
)
326 struct sg_dma_descriptor
*d
= desc
->last_desc_virt
;
328 d
->next_l
|= END_OF_CHAIN
;