2 * drivers/usb/musb/ux500_dma.c
4 * U8500 and U5500 DMA support code
6 * Copyright (C) 2009 STMicroelectronics
7 * Copyright (C) 2011 ST-Ericsson SA
9 * Mian Yousaf Kaukab <mian.yousaf.kaukab@stericsson.com>
10 * Praveena Nadahally <praveen.nadahally@stericsson.com>
11 * Rajaram Regupathy <ragupathy.rajaram@stericsson.com>
13 * This program is free software: you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation, either version 2 of the License, or
16 * (at your option) any later version.
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
23 * You should have received a copy of the GNU General Public License
24 * along with this program. If not, see <http://www.gnu.org/licenses/>.
27 #include <linux/device.h>
28 #include <linux/interrupt.h>
29 #include <linux/platform_device.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/dmaengine.h>
32 #include <linux/pfn.h>
34 #include "musb_core.h"
36 struct ux500_dma_channel
{
37 struct dma_channel channel
;
38 struct ux500_dma_controller
*controller
;
39 struct musb_hw_ep
*hw_ep
;
40 struct dma_chan
*dma_chan
;
48 struct ux500_dma_controller
{
49 struct dma_controller controller
;
50 struct ux500_dma_channel rx_channel
[UX500_MUSB_DMA_NUM_RX_CHANNELS
];
51 struct ux500_dma_channel tx_channel
[UX500_MUSB_DMA_NUM_TX_CHANNELS
];
58 /* Work function invoked from DMA callback to handle rx transfers. */
59 void ux500_dma_callback(void *private_data
)
61 struct dma_channel
*channel
= private_data
;
62 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
63 struct musb_hw_ep
*hw_ep
= ux500_channel
->hw_ep
;
64 struct musb
*musb
= hw_ep
->musb
;
67 dev_dbg(musb
->controller
, "DMA rx transfer done on hw_ep=%d\n",
70 spin_lock_irqsave(&musb
->lock
, flags
);
71 ux500_channel
->channel
.actual_len
= ux500_channel
->cur_len
;
72 ux500_channel
->channel
.status
= MUSB_DMA_STATUS_FREE
;
73 musb_dma_completion(musb
, hw_ep
->epnum
,
74 ux500_channel
->is_tx
);
75 spin_unlock_irqrestore(&musb
->lock
, flags
);
79 static bool ux500_configure_channel(struct dma_channel
*channel
,
80 u16 packet_sz
, u8 mode
,
81 dma_addr_t dma_addr
, u32 len
)
83 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
84 struct musb_hw_ep
*hw_ep
= ux500_channel
->hw_ep
;
85 struct dma_chan
*dma_chan
= ux500_channel
->dma_chan
;
86 struct dma_async_tx_descriptor
*dma_desc
;
87 enum dma_transfer_direction direction
;
88 struct scatterlist sg
;
89 struct dma_slave_config slave_conf
;
90 enum dma_slave_buswidth addr_width
;
91 dma_addr_t usb_fifo_addr
= (MUSB_FIFO_OFFSET(hw_ep
->epnum
) +
92 ux500_channel
->controller
->phy_base
);
93 struct musb
*musb
= ux500_channel
->controller
->private_data
;
95 dev_dbg(musb
->controller
,
96 "packet_sz=%d, mode=%d, dma_addr=0x%x, len=%d is_tx=%d\n",
97 packet_sz
, mode
, dma_addr
, len
, ux500_channel
->is_tx
);
99 ux500_channel
->cur_len
= len
;
101 sg_init_table(&sg
, 1);
102 sg_set_page(&sg
, pfn_to_page(PFN_DOWN(dma_addr
)), len
,
103 offset_in_page(dma_addr
));
104 sg_dma_address(&sg
) = dma_addr
;
105 sg_dma_len(&sg
) = len
;
107 direction
= ux500_channel
->is_tx
? DMA_MEM_TO_DEV
: DMA_DEV_TO_MEM
;
108 addr_width
= (len
& 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE
:
109 DMA_SLAVE_BUSWIDTH_4_BYTES
;
111 slave_conf
.direction
= direction
;
112 slave_conf
.src_addr
= usb_fifo_addr
;
113 slave_conf
.src_addr_width
= addr_width
;
114 slave_conf
.src_maxburst
= 16;
115 slave_conf
.dst_addr
= usb_fifo_addr
;
116 slave_conf
.dst_addr_width
= addr_width
;
117 slave_conf
.dst_maxburst
= 16;
119 dma_chan
->device
->device_control(dma_chan
, DMA_SLAVE_CONFIG
,
120 (unsigned long) &slave_conf
);
122 dma_desc
= dma_chan
->device
->
123 device_prep_slave_sg(dma_chan
, &sg
, 1, direction
,
124 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
128 dma_desc
->callback
= ux500_dma_callback
;
129 dma_desc
->callback_param
= channel
;
130 ux500_channel
->cookie
= dma_desc
->tx_submit(dma_desc
);
132 dma_async_issue_pending(dma_chan
);
137 static struct dma_channel
*ux500_dma_channel_allocate(struct dma_controller
*c
,
138 struct musb_hw_ep
*hw_ep
, u8 is_tx
)
140 struct ux500_dma_controller
*controller
= container_of(c
,
141 struct ux500_dma_controller
, controller
);
142 struct ux500_dma_channel
*ux500_channel
= NULL
;
143 struct musb
*musb
= controller
->private_data
;
144 u8 ch_num
= hw_ep
->epnum
- 1;
147 /* Max 8 DMA channels (0 - 7). Each DMA channel can only be allocated
148 * to specified hw_ep. For example DMA channel 0 can only be allocated
154 max_ch
= is_tx
? controller
->num_tx_channels
:
155 controller
->num_rx_channels
;
157 if (ch_num
>= max_ch
)
160 ux500_channel
= is_tx
? &(controller
->tx_channel
[ch_num
]) :
161 &(controller
->rx_channel
[ch_num
]) ;
163 /* Check if channel is already used. */
164 if (ux500_channel
->is_allocated
)
167 ux500_channel
->hw_ep
= hw_ep
;
168 ux500_channel
->is_allocated
= 1;
170 dev_dbg(musb
->controller
, "hw_ep=%d, is_tx=0x%x, channel=%d\n",
171 hw_ep
->epnum
, is_tx
, ch_num
);
173 return &(ux500_channel
->channel
);
176 static void ux500_dma_channel_release(struct dma_channel
*channel
)
178 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
179 struct musb
*musb
= ux500_channel
->controller
->private_data
;
181 dev_dbg(musb
->controller
, "channel=%d\n", ux500_channel
->ch_num
);
183 if (ux500_channel
->is_allocated
) {
184 ux500_channel
->is_allocated
= 0;
185 channel
->status
= MUSB_DMA_STATUS_FREE
;
186 channel
->actual_len
= 0;
190 static int ux500_dma_is_compatible(struct dma_channel
*channel
,
191 u16 maxpacket
, void *buf
, u32 length
)
193 if ((maxpacket
& 0x3) ||
202 static int ux500_dma_channel_program(struct dma_channel
*channel
,
203 u16 packet_sz
, u8 mode
,
204 dma_addr_t dma_addr
, u32 len
)
208 BUG_ON(channel
->status
== MUSB_DMA_STATUS_UNKNOWN
||
209 channel
->status
== MUSB_DMA_STATUS_BUSY
);
211 if (!ux500_dma_is_compatible(channel
, packet_sz
, (void *)dma_addr
, len
))
214 channel
->status
= MUSB_DMA_STATUS_BUSY
;
215 channel
->actual_len
= 0;
216 ret
= ux500_configure_channel(channel
, packet_sz
, mode
, dma_addr
, len
);
218 channel
->status
= MUSB_DMA_STATUS_FREE
;
223 static int ux500_dma_channel_abort(struct dma_channel
*channel
)
225 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
226 struct ux500_dma_controller
*controller
= ux500_channel
->controller
;
227 struct musb
*musb
= controller
->private_data
;
228 void __iomem
*epio
= musb
->endpoints
[ux500_channel
->hw_ep
->epnum
].regs
;
231 dev_dbg(musb
->controller
, "channel=%d, is_tx=%d\n",
232 ux500_channel
->ch_num
, ux500_channel
->is_tx
);
234 if (channel
->status
== MUSB_DMA_STATUS_BUSY
) {
235 if (ux500_channel
->is_tx
) {
236 csr
= musb_readw(epio
, MUSB_TXCSR
);
237 csr
&= ~(MUSB_TXCSR_AUTOSET
|
240 musb_writew(epio
, MUSB_TXCSR
, csr
);
242 csr
= musb_readw(epio
, MUSB_RXCSR
);
243 csr
&= ~(MUSB_RXCSR_AUTOCLEAR
|
246 musb_writew(epio
, MUSB_RXCSR
, csr
);
249 ux500_channel
->dma_chan
->device
->
250 device_control(ux500_channel
->dma_chan
,
251 DMA_TERMINATE_ALL
, 0);
252 channel
->status
= MUSB_DMA_STATUS_FREE
;
257 static int ux500_dma_controller_stop(struct dma_controller
*c
)
259 struct ux500_dma_controller
*controller
= container_of(c
,
260 struct ux500_dma_controller
, controller
);
261 struct ux500_dma_channel
*ux500_channel
;
262 struct dma_channel
*channel
;
265 for (ch_num
= 0; ch_num
< controller
->num_rx_channels
; ch_num
++) {
266 channel
= &controller
->rx_channel
[ch_num
].channel
;
267 ux500_channel
= channel
->private_data
;
269 ux500_dma_channel_release(channel
);
271 if (ux500_channel
->dma_chan
)
272 dma_release_channel(ux500_channel
->dma_chan
);
275 for (ch_num
= 0; ch_num
< controller
->num_tx_channels
; ch_num
++) {
276 channel
= &controller
->tx_channel
[ch_num
].channel
;
277 ux500_channel
= channel
->private_data
;
279 ux500_dma_channel_release(channel
);
281 if (ux500_channel
->dma_chan
)
282 dma_release_channel(ux500_channel
->dma_chan
);
288 static int ux500_dma_controller_start(struct dma_controller
*c
)
290 struct ux500_dma_controller
*controller
= container_of(c
,
291 struct ux500_dma_controller
, controller
);
292 struct ux500_dma_channel
*ux500_channel
= NULL
;
293 struct musb
*musb
= controller
->private_data
;
294 struct device
*dev
= musb
->controller
;
295 struct musb_hdrc_platform_data
*plat
= dev
->platform_data
;
296 struct ux500_musb_board_data
*data
= plat
->board_data
;
297 struct dma_channel
*dma_channel
= NULL
;
303 struct ux500_dma_channel
*channel_array
;
307 if ((data
->num_rx_channels
> UX500_MUSB_DMA_NUM_RX_CHANNELS
) ||
308 (data
->num_tx_channels
> UX500_MUSB_DMA_NUM_TX_CHANNELS
))
311 controller
->num_rx_channels
= data
->num_rx_channels
;
312 controller
->num_tx_channels
= data
->num_tx_channels
;
315 dma_cap_set(DMA_SLAVE
, mask
);
317 /* Prepare the loop for RX channels */
318 channel_array
= controller
->rx_channel
;
319 ch_count
= data
->num_rx_channels
;
320 param_array
= data
->dma_rx_param_array
;
322 for (dir
= 0; dir
< 2; dir
++) {
323 for (ch_num
= 0; ch_num
< ch_count
; ch_num
++) {
324 ux500_channel
= &channel_array
[ch_num
];
325 ux500_channel
->controller
= controller
;
326 ux500_channel
->ch_num
= ch_num
;
327 ux500_channel
->is_tx
= is_tx
;
329 dma_channel
= &(ux500_channel
->channel
);
330 dma_channel
->private_data
= ux500_channel
;
331 dma_channel
->status
= MUSB_DMA_STATUS_FREE
;
332 dma_channel
->max_len
= SZ_16M
;
334 ux500_channel
->dma_chan
= dma_request_channel(mask
,
336 param_array
[ch_num
]);
337 if (!ux500_channel
->dma_chan
) {
338 ERR("Dma pipe allocation error dir=%d ch=%d\n",
341 /* Release already allocated channels */
342 ux500_dma_controller_stop(c
);
349 /* Prepare the loop for TX channels */
350 channel_array
= controller
->tx_channel
;
351 ch_count
= data
->num_tx_channels
;
352 param_array
= data
->dma_tx_param_array
;
359 void dma_controller_destroy(struct dma_controller
*c
)
361 struct ux500_dma_controller
*controller
= container_of(c
,
362 struct ux500_dma_controller
, controller
);
367 struct dma_controller
*__init
368 dma_controller_create(struct musb
*musb
, void __iomem
*base
)
370 struct ux500_dma_controller
*controller
;
371 struct platform_device
*pdev
= to_platform_device(musb
->controller
);
372 struct resource
*iomem
;
374 controller
= kzalloc(sizeof(*controller
), GFP_KERNEL
);
378 controller
->private_data
= musb
;
380 /* Save physical address for DMA controller. */
381 iomem
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
382 controller
->phy_base
= (dma_addr_t
) iomem
->start
;
384 controller
->controller
.start
= ux500_dma_controller_start
;
385 controller
->controller
.stop
= ux500_dma_controller_stop
;
386 controller
->controller
.channel_alloc
= ux500_dma_channel_allocate
;
387 controller
->controller
.channel_release
= ux500_dma_channel_release
;
388 controller
->controller
.channel_program
= ux500_dma_channel_program
;
389 controller
->controller
.channel_abort
= ux500_dma_channel_abort
;
390 controller
->controller
.is_compatible
= ux500_dma_is_compatible
;
392 return &controller
->controller
;