1 // SPDX-License-Identifier: GPL-2.0
2 /* ebus.c: EBUS DMA library code.
4 * Copyright (C) 1997 Eddie C. Dost (ecd@skynet.be)
5 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
8 #include <linux/export.h>
9 #include <linux/kernel.h>
10 #include <linux/types.h>
11 #include <linux/interrupt.h>
12 #include <linux/delay.h>
14 #include <asm/ebus_dma.h>
17 #define EBDMA_CSR 0x00UL /* Control/Status */
18 #define EBDMA_ADDR 0x04UL /* DMA Address */
19 #define EBDMA_COUNT 0x08UL /* DMA Count */
21 #define EBDMA_CSR_INT_PEND 0x00000001
22 #define EBDMA_CSR_ERR_PEND 0x00000002
23 #define EBDMA_CSR_DRAIN 0x00000004
24 #define EBDMA_CSR_INT_EN 0x00000010
25 #define EBDMA_CSR_RESET 0x00000080
26 #define EBDMA_CSR_WRITE 0x00000100
27 #define EBDMA_CSR_EN_DMA 0x00000200
28 #define EBDMA_CSR_CYC_PEND 0x00000400
29 #define EBDMA_CSR_DIAG_RD_DONE 0x00000800
30 #define EBDMA_CSR_DIAG_WR_DONE 0x00001000
31 #define EBDMA_CSR_EN_CNT 0x00002000
32 #define EBDMA_CSR_TC 0x00004000
33 #define EBDMA_CSR_DIS_CSR_DRN 0x00010000
34 #define EBDMA_CSR_BURST_SZ_MASK 0x000c0000
35 #define EBDMA_CSR_BURST_SZ_1 0x00080000
36 #define EBDMA_CSR_BURST_SZ_4 0x00000000
37 #define EBDMA_CSR_BURST_SZ_8 0x00040000
38 #define EBDMA_CSR_BURST_SZ_16 0x000c0000
39 #define EBDMA_CSR_DIAG_EN 0x00100000
40 #define EBDMA_CSR_DIS_ERR_PEND 0x00400000
41 #define EBDMA_CSR_TCI_DIS 0x00800000
42 #define EBDMA_CSR_EN_NEXT 0x01000000
43 #define EBDMA_CSR_DMA_ON 0x02000000
44 #define EBDMA_CSR_A_LOADED 0x04000000
45 #define EBDMA_CSR_NA_LOADED 0x08000000
46 #define EBDMA_CSR_DEV_ID_MASK 0xf0000000
48 #define EBUS_DMA_RESET_TIMEOUT 10000
50 static void __ebus_dma_reset(struct ebus_dma_info
*p
, int no_drain
)
55 writel(EBDMA_CSR_RESET
, p
->regs
+ EBDMA_CSR
);
61 for (i
= EBUS_DMA_RESET_TIMEOUT
; i
> 0; i
--) {
62 val
= readl(p
->regs
+ EBDMA_CSR
);
64 if (!(val
& (EBDMA_CSR_DRAIN
| EBDMA_CSR_CYC_PEND
)))
70 static irqreturn_t
ebus_dma_irq(int irq
, void *dev_id
)
72 struct ebus_dma_info
*p
= dev_id
;
76 spin_lock_irqsave(&p
->lock
, flags
);
77 csr
= readl(p
->regs
+ EBDMA_CSR
);
78 writel(csr
, p
->regs
+ EBDMA_CSR
);
79 spin_unlock_irqrestore(&p
->lock
, flags
);
81 if (csr
& EBDMA_CSR_ERR_PEND
) {
82 printk(KERN_CRIT
"ebus_dma(%s): DMA error!\n", p
->name
);
83 p
->callback(p
, EBUS_DMA_EVENT_ERROR
, p
->client_cookie
);
85 } else if (csr
& EBDMA_CSR_INT_PEND
) {
87 (csr
& EBDMA_CSR_TC
) ?
88 EBUS_DMA_EVENT_DMA
: EBUS_DMA_EVENT_DEVICE
,
97 int ebus_dma_register(struct ebus_dma_info
*p
)
103 if (p
->flags
& ~(EBUS_DMA_FLAG_USE_EBDMA_HANDLER
|
104 EBUS_DMA_FLAG_TCI_DISABLE
))
106 if ((p
->flags
& EBUS_DMA_FLAG_USE_EBDMA_HANDLER
) && !p
->callback
)
108 if (!strlen(p
->name
))
111 __ebus_dma_reset(p
, 1);
113 csr
= EBDMA_CSR_BURST_SZ_16
| EBDMA_CSR_EN_CNT
;
115 if (p
->flags
& EBUS_DMA_FLAG_TCI_DISABLE
)
116 csr
|= EBDMA_CSR_TCI_DIS
;
118 writel(csr
, p
->regs
+ EBDMA_CSR
);
122 EXPORT_SYMBOL(ebus_dma_register
);
124 int ebus_dma_irq_enable(struct ebus_dma_info
*p
, int on
)
130 if (p
->flags
& EBUS_DMA_FLAG_USE_EBDMA_HANDLER
) {
131 if (request_irq(p
->irq
, ebus_dma_irq
, IRQF_SHARED
, p
->name
, p
))
135 spin_lock_irqsave(&p
->lock
, flags
);
136 csr
= readl(p
->regs
+ EBDMA_CSR
);
137 csr
|= EBDMA_CSR_INT_EN
;
138 writel(csr
, p
->regs
+ EBDMA_CSR
);
139 spin_unlock_irqrestore(&p
->lock
, flags
);
141 spin_lock_irqsave(&p
->lock
, flags
);
142 csr
= readl(p
->regs
+ EBDMA_CSR
);
143 csr
&= ~EBDMA_CSR_INT_EN
;
144 writel(csr
, p
->regs
+ EBDMA_CSR
);
145 spin_unlock_irqrestore(&p
->lock
, flags
);
147 if (p
->flags
& EBUS_DMA_FLAG_USE_EBDMA_HANDLER
) {
154 EXPORT_SYMBOL(ebus_dma_irq_enable
);
156 void ebus_dma_unregister(struct ebus_dma_info
*p
)
162 spin_lock_irqsave(&p
->lock
, flags
);
163 csr
= readl(p
->regs
+ EBDMA_CSR
);
164 if (csr
& EBDMA_CSR_INT_EN
) {
165 csr
&= ~EBDMA_CSR_INT_EN
;
166 writel(csr
, p
->regs
+ EBDMA_CSR
);
169 spin_unlock_irqrestore(&p
->lock
, flags
);
174 EXPORT_SYMBOL(ebus_dma_unregister
);
176 int ebus_dma_request(struct ebus_dma_info
*p
, dma_addr_t bus_addr
, size_t len
)
182 if (len
>= (1 << 24))
185 spin_lock_irqsave(&p
->lock
, flags
);
186 csr
= readl(p
->regs
+ EBDMA_CSR
);
188 if (!(csr
& EBDMA_CSR_EN_DMA
))
191 if (csr
& EBDMA_CSR_NA_LOADED
)
194 writel(len
, p
->regs
+ EBDMA_COUNT
);
195 writel(bus_addr
, p
->regs
+ EBDMA_ADDR
);
199 spin_unlock_irqrestore(&p
->lock
, flags
);
203 EXPORT_SYMBOL(ebus_dma_request
);
205 void ebus_dma_prepare(struct ebus_dma_info
*p
, int write
)
210 spin_lock_irqsave(&p
->lock
, flags
);
211 __ebus_dma_reset(p
, 0);
213 csr
= (EBDMA_CSR_INT_EN
|
215 EBDMA_CSR_BURST_SZ_16
|
219 csr
|= EBDMA_CSR_WRITE
;
220 if (p
->flags
& EBUS_DMA_FLAG_TCI_DISABLE
)
221 csr
|= EBDMA_CSR_TCI_DIS
;
223 writel(csr
, p
->regs
+ EBDMA_CSR
);
225 spin_unlock_irqrestore(&p
->lock
, flags
);
227 EXPORT_SYMBOL(ebus_dma_prepare
);
229 unsigned int ebus_dma_residue(struct ebus_dma_info
*p
)
231 return readl(p
->regs
+ EBDMA_COUNT
);
233 EXPORT_SYMBOL(ebus_dma_residue
);
235 unsigned int ebus_dma_addr(struct ebus_dma_info
*p
)
237 return readl(p
->regs
+ EBDMA_ADDR
);
239 EXPORT_SYMBOL(ebus_dma_addr
);
241 void ebus_dma_enable(struct ebus_dma_info
*p
, int on
)
246 spin_lock_irqsave(&p
->lock
, flags
);
247 orig_csr
= csr
= readl(p
->regs
+ EBDMA_CSR
);
249 csr
|= EBDMA_CSR_EN_DMA
;
251 csr
&= ~EBDMA_CSR_EN_DMA
;
252 if ((orig_csr
& EBDMA_CSR_EN_DMA
) !=
253 (csr
& EBDMA_CSR_EN_DMA
))
254 writel(csr
, p
->regs
+ EBDMA_CSR
);
255 spin_unlock_irqrestore(&p
->lock
, flags
);
257 EXPORT_SYMBOL(ebus_dma_enable
);