PM / Domains: Try power off masters in error path of __pm_genpd_poweron()
[linux/fpc-iii.git] / drivers / dma / at_hdmac_regs.h
blobbc8d5ebedd192f12f9ed32f220c45892c064a8d0
1 /*
2 * Header file for the Atmel AHB DMA Controller driver
4 * Copyright (C) 2008 Atmel Corporation
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
11 #ifndef AT_HDMAC_REGS_H
12 #define AT_HDMAC_REGS_H
14 #include <linux/platform_data/dma-atmel.h>
16 #define AT_DMA_MAX_NR_CHANNELS 8
19 #define AT_DMA_GCFG 0x00 /* Global Configuration Register */
20 #define AT_DMA_IF_BIGEND(i) (0x1 << (i)) /* AHB-Lite Interface i in Big-endian mode */
21 #define AT_DMA_ARB_CFG (0x1 << 4) /* Arbiter mode. */
22 #define AT_DMA_ARB_CFG_FIXED (0x0 << 4)
23 #define AT_DMA_ARB_CFG_ROUND_ROBIN (0x1 << 4)
25 #define AT_DMA_EN 0x04 /* Controller Enable Register */
26 #define AT_DMA_ENABLE (0x1 << 0)
28 #define AT_DMA_SREQ 0x08 /* Software Single Request Register */
29 #define AT_DMA_SSREQ(x) (0x1 << ((x) << 1)) /* Request a source single transfer on channel x */
30 #define AT_DMA_DSREQ(x) (0x1 << (1 + ((x) << 1))) /* Request a destination single transfer on channel x */
32 #define AT_DMA_CREQ 0x0C /* Software Chunk Transfer Request Register */
33 #define AT_DMA_SCREQ(x) (0x1 << ((x) << 1)) /* Request a source chunk transfer on channel x */
34 #define AT_DMA_DCREQ(x) (0x1 << (1 + ((x) << 1))) /* Request a destination chunk transfer on channel x */
36 #define AT_DMA_LAST 0x10 /* Software Last Transfer Flag Register */
37 #define AT_DMA_SLAST(x) (0x1 << ((x) << 1)) /* This src rq is last tx of buffer on channel x */
38 #define AT_DMA_DLAST(x) (0x1 << (1 + ((x) << 1))) /* This dst rq is last tx of buffer on channel x */
40 #define AT_DMA_SYNC 0x14 /* Request Synchronization Register */
41 #define AT_DMA_SYR(h) (0x1 << (h)) /* Synchronize handshake line h */
43 /* Error, Chained Buffer transfer completed and Buffer transfer completed Interrupt registers */
44 #define AT_DMA_EBCIER 0x18 /* Enable register */
45 #define AT_DMA_EBCIDR 0x1C /* Disable register */
46 #define AT_DMA_EBCIMR 0x20 /* Mask Register */
47 #define AT_DMA_EBCISR 0x24 /* Status Register */
48 #define AT_DMA_CBTC_OFFSET 8
49 #define AT_DMA_ERR_OFFSET 16
50 #define AT_DMA_BTC(x) (0x1 << (x))
51 #define AT_DMA_CBTC(x) (0x1 << (AT_DMA_CBTC_OFFSET + (x)))
52 #define AT_DMA_ERR(x) (0x1 << (AT_DMA_ERR_OFFSET + (x)))
54 #define AT_DMA_CHER 0x28 /* Channel Handler Enable Register */
55 #define AT_DMA_ENA(x) (0x1 << (x))
56 #define AT_DMA_SUSP(x) (0x1 << ( 8 + (x)))
57 #define AT_DMA_KEEP(x) (0x1 << (24 + (x)))
59 #define AT_DMA_CHDR 0x2C /* Channel Handler Disable Register */
60 #define AT_DMA_DIS(x) (0x1 << (x))
61 #define AT_DMA_RES(x) (0x1 << ( 8 + (x)))
63 #define AT_DMA_CHSR 0x30 /* Channel Handler Status Register */
64 #define AT_DMA_EMPT(x) (0x1 << (16 + (x)))
65 #define AT_DMA_STAL(x) (0x1 << (24 + (x)))
68 #define AT_DMA_CH_REGS_BASE 0x3C /* Channel registers base address */
69 #define ch_regs(x) (AT_DMA_CH_REGS_BASE + (x) * 0x28) /* Channel x base addr */
71 /* Hardware register offset for each channel */
72 #define ATC_SADDR_OFFSET 0x00 /* Source Address Register */
73 #define ATC_DADDR_OFFSET 0x04 /* Destination Address Register */
74 #define ATC_DSCR_OFFSET 0x08 /* Descriptor Address Register */
75 #define ATC_CTRLA_OFFSET 0x0C /* Control A Register */
76 #define ATC_CTRLB_OFFSET 0x10 /* Control B Register */
77 #define ATC_CFG_OFFSET 0x14 /* Configuration Register */
78 #define ATC_SPIP_OFFSET 0x18 /* Src PIP Configuration Register */
79 #define ATC_DPIP_OFFSET 0x1C /* Dst PIP Configuration Register */
82 /* Bitfield definitions */
84 /* Bitfields in DSCR */
85 #define ATC_DSCR_IF(i) (0x3 & (i)) /* Dsc feched via AHB-Lite Interface i */
87 /* Bitfields in CTRLA */
88 #define ATC_BTSIZE_MAX 0xFFFFUL /* Maximum Buffer Transfer Size */
89 #define ATC_BTSIZE(x) (ATC_BTSIZE_MAX & (x)) /* Buffer Transfer Size */
90 #define ATC_SCSIZE_MASK (0x7 << 16) /* Source Chunk Transfer Size */
91 #define ATC_SCSIZE(x) (ATC_SCSIZE_MASK & ((x) << 16))
92 #define ATC_SCSIZE_1 (0x0 << 16)
93 #define ATC_SCSIZE_4 (0x1 << 16)
94 #define ATC_SCSIZE_8 (0x2 << 16)
95 #define ATC_SCSIZE_16 (0x3 << 16)
96 #define ATC_SCSIZE_32 (0x4 << 16)
97 #define ATC_SCSIZE_64 (0x5 << 16)
98 #define ATC_SCSIZE_128 (0x6 << 16)
99 #define ATC_SCSIZE_256 (0x7 << 16)
100 #define ATC_DCSIZE_MASK (0x7 << 20) /* Destination Chunk Transfer Size */
101 #define ATC_DCSIZE(x) (ATC_DCSIZE_MASK & ((x) << 20))
102 #define ATC_DCSIZE_1 (0x0 << 20)
103 #define ATC_DCSIZE_4 (0x1 << 20)
104 #define ATC_DCSIZE_8 (0x2 << 20)
105 #define ATC_DCSIZE_16 (0x3 << 20)
106 #define ATC_DCSIZE_32 (0x4 << 20)
107 #define ATC_DCSIZE_64 (0x5 << 20)
108 #define ATC_DCSIZE_128 (0x6 << 20)
109 #define ATC_DCSIZE_256 (0x7 << 20)
110 #define ATC_SRC_WIDTH_MASK (0x3 << 24) /* Source Single Transfer Size */
111 #define ATC_SRC_WIDTH(x) ((x) << 24)
112 #define ATC_SRC_WIDTH_BYTE (0x0 << 24)
113 #define ATC_SRC_WIDTH_HALFWORD (0x1 << 24)
114 #define ATC_SRC_WIDTH_WORD (0x2 << 24)
115 #define ATC_DST_WIDTH_MASK (0x3 << 28) /* Destination Single Transfer Size */
116 #define ATC_DST_WIDTH(x) ((x) << 28)
117 #define ATC_DST_WIDTH_BYTE (0x0 << 28)
118 #define ATC_DST_WIDTH_HALFWORD (0x1 << 28)
119 #define ATC_DST_WIDTH_WORD (0x2 << 28)
120 #define ATC_DONE (0x1 << 31) /* Tx Done (only written back in descriptor) */
122 /* Bitfields in CTRLB */
123 #define ATC_SIF(i) (0x3 & (i)) /* Src tx done via AHB-Lite Interface i */
124 #define ATC_DIF(i) ((0x3 & (i)) << 4) /* Dst tx done via AHB-Lite Interface i */
125 /* Specify AHB interfaces */
126 #define AT_DMA_MEM_IF 0 /* interface 0 as memory interface */
127 #define AT_DMA_PER_IF 1 /* interface 1 as peripheral interface */
129 #define ATC_SRC_PIP (0x1 << 8) /* Source Picture-in-Picture enabled */
130 #define ATC_DST_PIP (0x1 << 12) /* Destination Picture-in-Picture enabled */
131 #define ATC_SRC_DSCR_DIS (0x1 << 16) /* Src Descriptor fetch disable */
132 #define ATC_DST_DSCR_DIS (0x1 << 20) /* Dst Descriptor fetch disable */
133 #define ATC_FC_MASK (0x7 << 21) /* Choose Flow Controller */
134 #define ATC_FC_MEM2MEM (0x0 << 21) /* Mem-to-Mem (DMA) */
135 #define ATC_FC_MEM2PER (0x1 << 21) /* Mem-to-Periph (DMA) */
136 #define ATC_FC_PER2MEM (0x2 << 21) /* Periph-to-Mem (DMA) */
137 #define ATC_FC_PER2PER (0x3 << 21) /* Periph-to-Periph (DMA) */
138 #define ATC_FC_PER2MEM_PER (0x4 << 21) /* Periph-to-Mem (Peripheral) */
139 #define ATC_FC_MEM2PER_PER (0x5 << 21) /* Mem-to-Periph (Peripheral) */
140 #define ATC_FC_PER2PER_SRCPER (0x6 << 21) /* Periph-to-Periph (Src Peripheral) */
141 #define ATC_FC_PER2PER_DSTPER (0x7 << 21) /* Periph-to-Periph (Dst Peripheral) */
142 #define ATC_SRC_ADDR_MODE_MASK (0x3 << 24)
143 #define ATC_SRC_ADDR_MODE_INCR (0x0 << 24) /* Incrementing Mode */
144 #define ATC_SRC_ADDR_MODE_DECR (0x1 << 24) /* Decrementing Mode */
145 #define ATC_SRC_ADDR_MODE_FIXED (0x2 << 24) /* Fixed Mode */
146 #define ATC_DST_ADDR_MODE_MASK (0x3 << 28)
147 #define ATC_DST_ADDR_MODE_INCR (0x0 << 28) /* Incrementing Mode */
148 #define ATC_DST_ADDR_MODE_DECR (0x1 << 28) /* Decrementing Mode */
149 #define ATC_DST_ADDR_MODE_FIXED (0x2 << 28) /* Fixed Mode */
150 #define ATC_IEN (0x1 << 30) /* BTC interrupt enable (active low) */
151 #define ATC_AUTO (0x1 << 31) /* Auto multiple buffer tx enable */
153 /* Bitfields in CFG */
154 /* are in at_hdmac.h */
156 /* Bitfields in SPIP */
157 #define ATC_SPIP_HOLE(x) (0xFFFFU & (x))
158 #define ATC_SPIP_BOUNDARY(x) ((0x3FF & (x)) << 16)
160 /* Bitfields in DPIP */
161 #define ATC_DPIP_HOLE(x) (0xFFFFU & (x))
162 #define ATC_DPIP_BOUNDARY(x) ((0x3FF & (x)) << 16)
165 /*-- descriptors -----------------------------------------------------*/
167 /* LLI == Linked List Item; aka DMA buffer descriptor */
168 struct at_lli {
169 /* values that are not changed by hardware */
170 dma_addr_t saddr;
171 dma_addr_t daddr;
172 /* value that may get written back: */
173 u32 ctrla;
174 /* more values that are not changed by hardware */
175 u32 ctrlb;
176 dma_addr_t dscr; /* chain to next lli */
180 * struct at_desc - software descriptor
181 * @at_lli: hardware lli structure
182 * @txd: support for the async_tx api
183 * @desc_node: node on the channed descriptors list
184 * @len: descriptor byte count
185 * @tx_width: transfer width
186 * @total_len: total transaction byte count
188 struct at_desc {
189 /* FIRST values the hardware uses */
190 struct at_lli lli;
192 /* THEN values for driver housekeeping */
193 struct list_head tx_list;
194 struct dma_async_tx_descriptor txd;
195 struct list_head desc_node;
196 size_t len;
197 u32 tx_width;
198 size_t total_len;
200 /* Interleaved data */
201 size_t boundary;
202 size_t dst_hole;
203 size_t src_hole;
206 static inline struct at_desc *
207 txd_to_at_desc(struct dma_async_tx_descriptor *txd)
209 return container_of(txd, struct at_desc, txd);
213 /*-- Channels --------------------------------------------------------*/
216 * atc_status - information bits stored in channel status flag
218 * Manipulated with atomic operations.
220 enum atc_status {
221 ATC_IS_ERROR = 0,
222 ATC_IS_PAUSED = 1,
223 ATC_IS_CYCLIC = 24,
227 * struct at_dma_chan - internal representation of an Atmel HDMAC channel
228 * @chan_common: common dmaengine channel object members
229 * @device: parent device
230 * @ch_regs: memory mapped register base
231 * @mask: channel index in a mask
232 * @per_if: peripheral interface
233 * @mem_if: memory interface
234 * @status: transmit status information from irq/prep* functions
235 * to tasklet (use atomic operations)
236 * @tasklet: bottom half to finish transaction work
237 * @save_cfg: configuration register that is saved on suspend/resume cycle
238 * @save_dscr: for cyclic operations, preserve next descriptor address in
239 * the cyclic list on suspend/resume cycle
240 * @dma_sconfig: configuration for slave transfers, passed via
241 * .device_config
242 * @lock: serializes enqueue/dequeue operations to descriptors lists
243 * @active_list: list of descriptors dmaengine is being running on
244 * @queue: list of descriptors ready to be submitted to engine
245 * @free_list: list of descriptors usable by the channel
246 * @descs_allocated: records the actual size of the descriptor pool
248 struct at_dma_chan {
249 struct dma_chan chan_common;
250 struct at_dma *device;
251 void __iomem *ch_regs;
252 u8 mask;
253 u8 per_if;
254 u8 mem_if;
255 unsigned long status;
256 struct tasklet_struct tasklet;
257 u32 save_cfg;
258 u32 save_dscr;
259 struct dma_slave_config dma_sconfig;
261 spinlock_t lock;
263 /* these other elements are all protected by lock */
264 struct list_head active_list;
265 struct list_head queue;
266 struct list_head free_list;
267 unsigned int descs_allocated;
270 #define channel_readl(atchan, name) \
271 __raw_readl((atchan)->ch_regs + ATC_##name##_OFFSET)
273 #define channel_writel(atchan, name, val) \
274 __raw_writel((val), (atchan)->ch_regs + ATC_##name##_OFFSET)
276 static inline struct at_dma_chan *to_at_dma_chan(struct dma_chan *dchan)
278 return container_of(dchan, struct at_dma_chan, chan_common);
282 * Fix sconfig's burst size according to at_hdmac. We need to convert them as:
283 * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3, 32 -> 4, 64 -> 5, 128 -> 6, 256 -> 7.
285 * This can be done by finding most significant bit set.
287 static inline void convert_burst(u32 *maxburst)
289 if (*maxburst > 1)
290 *maxburst = fls(*maxburst) - 2;
291 else
292 *maxburst = 0;
296 * Fix sconfig's bus width according to at_hdmac.
297 * 1 byte -> 0, 2 bytes -> 1, 4 bytes -> 2.
299 static inline u8 convert_buswidth(enum dma_slave_buswidth addr_width)
301 switch (addr_width) {
302 case DMA_SLAVE_BUSWIDTH_2_BYTES:
303 return 1;
304 case DMA_SLAVE_BUSWIDTH_4_BYTES:
305 return 2;
306 default:
307 /* For 1 byte width or fallback */
308 return 0;
312 /*-- Controller ------------------------------------------------------*/
315 * struct at_dma - internal representation of an Atmel HDMA Controller
316 * @chan_common: common dmaengine dma_device object members
317 * @atdma_devtype: identifier of DMA controller compatibility
318 * @ch_regs: memory mapped register base
319 * @clk: dma controller clock
320 * @save_imr: interrupt mask register that is saved on suspend/resume cycle
321 * @all_chan_mask: all channels availlable in a mask
322 * @dma_desc_pool: base of DMA descriptor region (DMA address)
323 * @chan: channels table to store at_dma_chan structures
325 struct at_dma {
326 struct dma_device dma_common;
327 void __iomem *regs;
328 struct clk *clk;
329 u32 save_imr;
331 u8 all_chan_mask;
333 struct dma_pool *dma_desc_pool;
334 /* AT THE END channels table */
335 struct at_dma_chan chan[0];
338 #define dma_readl(atdma, name) \
339 __raw_readl((atdma)->regs + AT_DMA_##name)
340 #define dma_writel(atdma, name, val) \
341 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
343 static inline struct at_dma *to_at_dma(struct dma_device *ddev)
345 return container_of(ddev, struct at_dma, dma_common);
349 /*-- Helper functions ------------------------------------------------*/
351 static struct device *chan2dev(struct dma_chan *chan)
353 return &chan->dev->device;
356 #if defined(VERBOSE_DEBUG)
357 static void vdbg_dump_regs(struct at_dma_chan *atchan)
359 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
361 dev_err(chan2dev(&atchan->chan_common),
362 " channel %d : imr = 0x%x, chsr = 0x%x\n",
363 atchan->chan_common.chan_id,
364 dma_readl(atdma, EBCIMR),
365 dma_readl(atdma, CHSR));
367 dev_err(chan2dev(&atchan->chan_common),
368 " channel: s0x%x d0x%x ctrl0x%x:0x%x cfg0x%x l0x%x\n",
369 channel_readl(atchan, SADDR),
370 channel_readl(atchan, DADDR),
371 channel_readl(atchan, CTRLA),
372 channel_readl(atchan, CTRLB),
373 channel_readl(atchan, CFG),
374 channel_readl(atchan, DSCR));
376 #else
377 static void vdbg_dump_regs(struct at_dma_chan *atchan) {}
378 #endif
380 static void atc_dump_lli(struct at_dma_chan *atchan, struct at_lli *lli)
382 dev_crit(chan2dev(&atchan->chan_common),
383 " desc: s0x%x d0x%x ctrl0x%x:0x%x l0x%x\n",
384 lli->saddr, lli->daddr,
385 lli->ctrla, lli->ctrlb, lli->dscr);
389 static void atc_setup_irq(struct at_dma *atdma, int chan_id, int on)
391 u32 ebci;
393 /* enable interrupts on buffer transfer completion & error */
394 ebci = AT_DMA_BTC(chan_id)
395 | AT_DMA_ERR(chan_id);
396 if (on)
397 dma_writel(atdma, EBCIER, ebci);
398 else
399 dma_writel(atdma, EBCIDR, ebci);
402 static void atc_enable_chan_irq(struct at_dma *atdma, int chan_id)
404 atc_setup_irq(atdma, chan_id, 1);
407 static void atc_disable_chan_irq(struct at_dma *atdma, int chan_id)
409 atc_setup_irq(atdma, chan_id, 0);
414 * atc_chan_is_enabled - test if given channel is enabled
415 * @atchan: channel we want to test status
417 static inline int atc_chan_is_enabled(struct at_dma_chan *atchan)
419 struct at_dma *atdma = to_at_dma(atchan->chan_common.device);
421 return !!(dma_readl(atdma, CHSR) & atchan->mask);
425 * atc_chan_is_paused - test channel pause/resume status
426 * @atchan: channel we want to test status
428 static inline int atc_chan_is_paused(struct at_dma_chan *atchan)
430 return test_bit(ATC_IS_PAUSED, &atchan->status);
434 * atc_chan_is_cyclic - test if given channel has cyclic property set
435 * @atchan: channel we want to test status
437 static inline int atc_chan_is_cyclic(struct at_dma_chan *atchan)
439 return test_bit(ATC_IS_CYCLIC, &atchan->status);
443 * set_desc_eol - set end-of-link to descriptor so it will end transfer
444 * @desc: descriptor, signle or at the end of a chain, to end chain on
446 static void set_desc_eol(struct at_desc *desc)
448 u32 ctrlb = desc->lli.ctrlb;
450 ctrlb &= ~ATC_IEN;
451 ctrlb |= ATC_SRC_DSCR_DIS | ATC_DST_DSCR_DIS;
453 desc->lli.ctrlb = ctrlb;
454 desc->lli.dscr = 0;
457 #endif /* AT_HDMAC_REGS_H */