Linux 3.12.28
[linux/fpc-iii.git] / arch / blackfin / include / asm / dma.h
blob40e9c2bbc6e37f26ef8030316556a16c05c0d776
1 /*
2 * dma.h - Blackfin DMA defines/structures/etc...
4 * Copyright 2004-2008 Analog Devices Inc.
5 * Licensed under the GPL-2 or later.
6 */
8 #ifndef _BLACKFIN_DMA_H_
9 #define _BLACKFIN_DMA_H_
11 #include <linux/interrupt.h>
12 #include <mach/dma.h>
13 #include <linux/atomic.h>
14 #include <asm/blackfin.h>
15 #include <asm/page.h>
16 #include <asm-generic/dma.h>
17 #include <asm/bfin_dma.h>
19 /*-------------------------
20 * config reg bits value
21 *-------------------------*/
22 #define DATA_SIZE_8 0
23 #define DATA_SIZE_16 1
24 #define DATA_SIZE_32 2
25 #ifdef CONFIG_BF60x
26 #define DATA_SIZE_64 3
27 #endif
29 #define DMA_FLOW_STOP 0
30 #define DMA_FLOW_AUTO 1
31 #ifdef CONFIG_BF60x
32 #define DMA_FLOW_LIST 4
33 #define DMA_FLOW_ARRAY 5
34 #define DMA_FLOW_LIST_DEMAND 6
35 #define DMA_FLOW_ARRAY_DEMAND 7
36 #else
37 #define DMA_FLOW_ARRAY 4
38 #define DMA_FLOW_SMALL 6
39 #define DMA_FLOW_LARGE 7
40 #endif
42 #define DIMENSION_LINEAR 0
43 #define DIMENSION_2D 1
45 #define DIR_READ 0
46 #define DIR_WRITE 1
48 #define INTR_DISABLE 0
49 #ifdef CONFIG_BF60x
50 #define INTR_ON_PERI 1
51 #endif
52 #define INTR_ON_BUF 2
53 #define INTR_ON_ROW 3
55 #define DMA_NOSYNC_KEEP_DMA_BUF 0
56 #define DMA_SYNC_RESTART 1
58 #ifdef DMA_MMR_SIZE_32
59 #define DMA_MMR_SIZE_TYPE long
60 #define DMA_MMR_READ bfin_read32
61 #define DMA_MMR_WRITE bfin_write32
62 #else
63 #define DMA_MMR_SIZE_TYPE short
64 #define DMA_MMR_READ bfin_read16
65 #define DMA_MMR_WRITE bfin_write16
66 #endif
68 struct dma_desc_array {
69 unsigned long start_addr;
70 unsigned DMA_MMR_SIZE_TYPE cfg;
71 unsigned DMA_MMR_SIZE_TYPE x_count;
72 DMA_MMR_SIZE_TYPE x_modify;
73 } __attribute__((packed));
75 struct dmasg {
76 void *next_desc_addr;
77 unsigned long start_addr;
78 unsigned DMA_MMR_SIZE_TYPE cfg;
79 unsigned DMA_MMR_SIZE_TYPE x_count;
80 DMA_MMR_SIZE_TYPE x_modify;
81 unsigned DMA_MMR_SIZE_TYPE y_count;
82 DMA_MMR_SIZE_TYPE y_modify;
83 } __attribute__((packed));
85 struct dma_register {
86 void *next_desc_ptr; /* DMA Next Descriptor Pointer register */
87 unsigned long start_addr; /* DMA Start address register */
88 #ifdef CONFIG_BF60x
89 unsigned long cfg; /* DMA Configuration register */
91 unsigned long x_count; /* DMA x_count register */
93 long x_modify; /* DMA x_modify register */
95 unsigned long y_count; /* DMA y_count register */
97 long y_modify; /* DMA y_modify register */
99 unsigned long reserved;
100 unsigned long reserved2;
102 void *curr_desc_ptr; /* DMA Current Descriptor Pointer
103 register */
104 void *prev_desc_ptr; /* DMA previous initial Descriptor Pointer
105 register */
106 unsigned long curr_addr_ptr; /* DMA Current Address Pointer
107 register */
108 unsigned long irq_status; /* DMA irq status register */
110 unsigned long curr_x_count; /* DMA Current x-count register */
112 unsigned long curr_y_count; /* DMA Current y-count register */
114 unsigned long reserved3;
116 unsigned long bw_limit_count; /* DMA band width limit count register */
117 unsigned long curr_bw_limit_count; /* DMA Current band width limit
118 count register */
119 unsigned long bw_monitor_count; /* DMA band width limit count register */
120 unsigned long curr_bw_monitor_count; /* DMA Current band width limit
121 count register */
122 #else
123 unsigned short cfg; /* DMA Configuration register */
124 unsigned short dummy1; /* DMA Configuration register */
126 unsigned long reserved;
128 unsigned short x_count; /* DMA x_count register */
129 unsigned short dummy2;
131 short x_modify; /* DMA x_modify register */
132 unsigned short dummy3;
134 unsigned short y_count; /* DMA y_count register */
135 unsigned short dummy4;
137 short y_modify; /* DMA y_modify register */
138 unsigned short dummy5;
140 void *curr_desc_ptr; /* DMA Current Descriptor Pointer
141 register */
142 unsigned long curr_addr_ptr; /* DMA Current Address Pointer
143 register */
144 unsigned short irq_status; /* DMA irq status register */
145 unsigned short dummy6;
147 unsigned short peripheral_map; /* DMA peripheral map register */
148 unsigned short dummy7;
150 unsigned short curr_x_count; /* DMA Current x-count register */
151 unsigned short dummy8;
153 unsigned long reserved2;
155 unsigned short curr_y_count; /* DMA Current y-count register */
156 unsigned short dummy9;
158 unsigned long reserved3;
159 #endif
163 struct dma_channel {
164 const char *device_id;
165 atomic_t chan_status;
166 volatile struct dma_register *regs;
167 struct dmasg *sg; /* large mode descriptor */
168 unsigned int irq;
169 void *data;
170 #ifdef CONFIG_PM
171 unsigned short saved_peripheral_map;
172 #endif
175 #ifdef CONFIG_PM
176 int blackfin_dma_suspend(void);
177 void blackfin_dma_resume(void);
178 #endif
180 /*******************************************************************************
181 * DMA API's
182 *******************************************************************************/
183 extern struct dma_channel dma_ch[MAX_DMA_CHANNELS];
184 extern struct dma_register * const dma_io_base_addr[MAX_DMA_CHANNELS];
185 extern int channel2irq(unsigned int channel);
187 static inline void set_dma_start_addr(unsigned int channel, unsigned long addr)
189 dma_ch[channel].regs->start_addr = addr;
191 static inline void set_dma_next_desc_addr(unsigned int channel, void *addr)
193 dma_ch[channel].regs->next_desc_ptr = addr;
195 static inline void set_dma_curr_desc_addr(unsigned int channel, void *addr)
197 dma_ch[channel].regs->curr_desc_ptr = addr;
199 static inline void set_dma_x_count(unsigned int channel, unsigned DMA_MMR_SIZE_TYPE x_count)
201 dma_ch[channel].regs->x_count = x_count;
203 static inline void set_dma_y_count(unsigned int channel, unsigned DMA_MMR_SIZE_TYPE y_count)
205 dma_ch[channel].regs->y_count = y_count;
207 static inline void set_dma_x_modify(unsigned int channel, DMA_MMR_SIZE_TYPE x_modify)
209 dma_ch[channel].regs->x_modify = x_modify;
211 static inline void set_dma_y_modify(unsigned int channel, DMA_MMR_SIZE_TYPE y_modify)
213 dma_ch[channel].regs->y_modify = y_modify;
215 static inline void set_dma_config(unsigned int channel, unsigned DMA_MMR_SIZE_TYPE config)
217 dma_ch[channel].regs->cfg = config;
219 static inline void set_dma_curr_addr(unsigned int channel, unsigned long addr)
221 dma_ch[channel].regs->curr_addr_ptr = addr;
224 #ifdef CONFIG_BF60x
225 static inline unsigned long
226 set_bfin_dma_config2(char direction, char flow_mode, char intr_mode,
227 char dma_mode, char mem_width, char syncmode, char peri_width)
229 unsigned long config = 0;
231 switch (intr_mode) {
232 case INTR_ON_BUF:
233 if (dma_mode == DIMENSION_2D)
234 config = DI_EN_Y;
235 else
236 config = DI_EN_X;
237 break;
238 case INTR_ON_ROW:
239 config = DI_EN_X;
240 break;
241 case INTR_ON_PERI:
242 config = DI_EN_P;
243 break;
246 return config | (direction << 1) | (mem_width << 8) | (dma_mode << 26) |
247 (flow_mode << 12) | (syncmode << 2) | (peri_width << 4);
249 #endif
251 static inline unsigned DMA_MMR_SIZE_TYPE
252 set_bfin_dma_config(char direction, char flow_mode,
253 char intr_mode, char dma_mode, char mem_width, char syncmode)
255 #ifdef CONFIG_BF60x
256 return set_bfin_dma_config2(direction, flow_mode, intr_mode, dma_mode,
257 mem_width, syncmode, mem_width);
258 #else
259 return (direction << 1) | (mem_width << 2) | (dma_mode << 4) |
260 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5);
261 #endif
264 static inline unsigned DMA_MMR_SIZE_TYPE get_dma_curr_irqstat(unsigned int channel)
266 return dma_ch[channel].regs->irq_status;
268 static inline unsigned DMA_MMR_SIZE_TYPE get_dma_curr_xcount(unsigned int channel)
270 return dma_ch[channel].regs->curr_x_count;
272 static inline unsigned DMA_MMR_SIZE_TYPE get_dma_curr_ycount(unsigned int channel)
274 return dma_ch[channel].regs->curr_y_count;
276 static inline void *get_dma_next_desc_ptr(unsigned int channel)
278 return dma_ch[channel].regs->next_desc_ptr;
280 static inline void *get_dma_curr_desc_ptr(unsigned int channel)
282 return dma_ch[channel].regs->curr_desc_ptr;
284 static inline unsigned DMA_MMR_SIZE_TYPE get_dma_config(unsigned int channel)
286 return dma_ch[channel].regs->cfg;
288 static inline unsigned long get_dma_curr_addr(unsigned int channel)
290 return dma_ch[channel].regs->curr_addr_ptr;
293 static inline void set_dma_sg(unsigned int channel, struct dmasg *sg, int ndsize)
295 /* Make sure the internal data buffers in the core are drained
296 * so that the DMA descriptors are completely written when the
297 * DMA engine goes to fetch them below.
299 SSYNC();
301 dma_ch[channel].regs->next_desc_ptr = sg;
302 dma_ch[channel].regs->cfg =
303 (dma_ch[channel].regs->cfg & ~NDSIZE) |
304 ((ndsize << NDSIZE_OFFSET) & NDSIZE);
307 static inline int dma_channel_active(unsigned int channel)
309 return atomic_read(&dma_ch[channel].chan_status);
312 static inline void disable_dma(unsigned int channel)
314 dma_ch[channel].regs->cfg &= ~DMAEN;
315 SSYNC();
317 static inline void enable_dma(unsigned int channel)
319 dma_ch[channel].regs->curr_x_count = 0;
320 dma_ch[channel].regs->curr_y_count = 0;
321 dma_ch[channel].regs->cfg |= DMAEN;
323 int set_dma_callback(unsigned int channel, irq_handler_t callback, void *data);
325 static inline void dma_disable_irq(unsigned int channel)
327 disable_irq(dma_ch[channel].irq);
329 static inline void dma_disable_irq_nosync(unsigned int channel)
331 disable_irq_nosync(dma_ch[channel].irq);
333 static inline void dma_enable_irq(unsigned int channel)
335 enable_irq(dma_ch[channel].irq);
337 static inline void clear_dma_irqstat(unsigned int channel)
339 dma_ch[channel].regs->irq_status = DMA_DONE | DMA_ERR | DMA_PIRQ;
342 void *dma_memcpy(void *dest, const void *src, size_t count);
343 void *dma_memcpy_nocache(void *dest, const void *src, size_t count);
344 void *safe_dma_memcpy(void *dest, const void *src, size_t count);
345 void blackfin_dma_early_init(void);
346 void early_dma_memcpy(void *dest, const void *src, size_t count);
347 void early_dma_memcpy_done(void);
349 #endif