2 * drivers/usb/musb/ux500_dma.c
4 * U8500 and U5500 DMA support code
6 * Copyright (C) 2009 STMicroelectronics
7 * Copyright (C) 2011 ST-Ericsson SA
9 * Mian Yousaf Kaukab <mian.yousaf.kaukab@stericsson.com>
10 * Praveena Nadahally <praveen.nadahally@stericsson.com>
11 * Rajaram Regupathy <ragupathy.rajaram@stericsson.com>
13 * This program is free software: you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation, either version 2 of the License, or
16 * (at your option) any later version.
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
23 * You should have received a copy of the GNU General Public License
24 * along with this program. If not, see <http://www.gnu.org/licenses/>.
27 #include <linux/device.h>
28 #include <linux/interrupt.h>
29 #include <linux/platform_device.h>
30 #include <linux/dma-mapping.h>
31 #include <linux/dmaengine.h>
32 #include <linux/pfn.h>
34 #include "musb_core.h"
36 struct ux500_dma_channel
{
37 struct dma_channel channel
;
38 struct ux500_dma_controller
*controller
;
39 struct musb_hw_ep
*hw_ep
;
40 struct work_struct channel_work
;
41 struct dma_chan
*dma_chan
;
49 struct ux500_dma_controller
{
50 struct dma_controller controller
;
51 struct ux500_dma_channel rx_channel
[UX500_MUSB_DMA_NUM_RX_CHANNELS
];
52 struct ux500_dma_channel tx_channel
[UX500_MUSB_DMA_NUM_TX_CHANNELS
];
59 /* Work function invoked from DMA callback to handle tx transfers. */
60 static void ux500_tx_work(struct work_struct
*data
)
62 struct ux500_dma_channel
*ux500_channel
= container_of(data
,
63 struct ux500_dma_channel
, channel_work
);
64 struct musb_hw_ep
*hw_ep
= ux500_channel
->hw_ep
;
65 struct musb
*musb
= hw_ep
->musb
;
68 DBG(4, "DMA tx transfer done on hw_ep=%d\n", hw_ep
->epnum
);
70 spin_lock_irqsave(&musb
->lock
, flags
);
71 ux500_channel
->channel
.actual_len
= ux500_channel
->cur_len
;
72 ux500_channel
->channel
.status
= MUSB_DMA_STATUS_FREE
;
73 musb_dma_completion(musb
, hw_ep
->epnum
,
74 ux500_channel
->is_tx
);
75 spin_unlock_irqrestore(&musb
->lock
, flags
);
78 /* Work function invoked from DMA callback to handle rx transfers. */
79 static void ux500_rx_work(struct work_struct
*data
)
81 struct ux500_dma_channel
*ux500_channel
= container_of(data
,
82 struct ux500_dma_channel
, channel_work
);
83 struct musb_hw_ep
*hw_ep
= ux500_channel
->hw_ep
;
84 struct musb
*musb
= hw_ep
->musb
;
87 DBG(4, "DMA rx transfer done on hw_ep=%d\n", hw_ep
->epnum
);
89 spin_lock_irqsave(&musb
->lock
, flags
);
90 ux500_channel
->channel
.actual_len
= ux500_channel
->cur_len
;
91 ux500_channel
->channel
.status
= MUSB_DMA_STATUS_FREE
;
92 musb_dma_completion(musb
, hw_ep
->epnum
,
93 ux500_channel
->is_tx
);
94 spin_unlock_irqrestore(&musb
->lock
, flags
);
97 void ux500_dma_callback(void *private_data
)
99 struct dma_channel
*channel
= (struct dma_channel
*)private_data
;
100 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
102 schedule_work(&ux500_channel
->channel_work
);
105 static bool ux500_configure_channel(struct dma_channel
*channel
,
106 u16 packet_sz
, u8 mode
,
107 dma_addr_t dma_addr
, u32 len
)
109 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
110 struct musb_hw_ep
*hw_ep
= ux500_channel
->hw_ep
;
111 struct dma_chan
*dma_chan
= ux500_channel
->dma_chan
;
112 struct dma_async_tx_descriptor
*dma_desc
;
113 enum dma_data_direction direction
;
114 struct scatterlist sg
;
115 struct dma_slave_config slave_conf
;
116 enum dma_slave_buswidth addr_width
;
117 dma_addr_t usb_fifo_addr
= (MUSB_FIFO_OFFSET(hw_ep
->epnum
) +
118 ux500_channel
->controller
->phy_base
);
120 DBG(4, "packet_sz=%d, mode=%d, dma_addr=0x%x, len=%d is_tx=%d\n",
121 packet_sz
, mode
, dma_addr
, len
, ux500_channel
->is_tx
);
123 ux500_channel
->cur_len
= len
;
125 sg_init_table(&sg
, 1);
126 sg_set_page(&sg
, pfn_to_page(PFN_DOWN(dma_addr
)), len
,
127 offset_in_page(dma_addr
));
128 sg_dma_address(&sg
) = dma_addr
;
129 sg_dma_len(&sg
) = len
;
131 direction
= ux500_channel
->is_tx
? DMA_TO_DEVICE
: DMA_FROM_DEVICE
;
132 addr_width
= (len
& 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE
:
133 DMA_SLAVE_BUSWIDTH_4_BYTES
;
135 slave_conf
.direction
= direction
;
136 if (direction
== DMA_FROM_DEVICE
) {
137 slave_conf
.src_addr
= usb_fifo_addr
;
138 slave_conf
.src_addr_width
= addr_width
;
139 slave_conf
.src_maxburst
= 16;
141 slave_conf
.dst_addr
= usb_fifo_addr
;
142 slave_conf
.dst_addr_width
= addr_width
;
143 slave_conf
.dst_maxburst
= 16;
145 dma_chan
->device
->device_control(dma_chan
, DMA_SLAVE_CONFIG
,
146 (unsigned long) &slave_conf
);
148 dma_desc
= dma_chan
->device
->
149 device_prep_slave_sg(dma_chan
, &sg
, 1, direction
,
150 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
154 dma_desc
->callback
= ux500_dma_callback
;
155 dma_desc
->callback_param
= channel
;
156 ux500_channel
->cookie
= dma_desc
->tx_submit(dma_desc
);
158 dma_async_issue_pending(dma_chan
);
163 static struct dma_channel
*ux500_dma_channel_allocate(struct dma_controller
*c
,
164 struct musb_hw_ep
*hw_ep
, u8 is_tx
)
166 struct ux500_dma_controller
*controller
= container_of(c
,
167 struct ux500_dma_controller
, controller
);
168 struct ux500_dma_channel
*ux500_channel
= NULL
;
169 u8 ch_num
= hw_ep
->epnum
- 1;
172 /* Max 8 DMA channels (0 - 7). Each DMA channel can only be allocated
173 * to specified hw_ep. For example DMA channel 0 can only be allocated
179 max_ch
= is_tx
? controller
->num_tx_channels
:
180 controller
->num_rx_channels
;
182 if (ch_num
>= max_ch
)
185 ux500_channel
= is_tx
? &(controller
->tx_channel
[ch_num
]) :
186 &(controller
->rx_channel
[ch_num
]) ;
188 /* Check if channel is already used. */
189 if (ux500_channel
->is_allocated
)
192 ux500_channel
->hw_ep
= hw_ep
;
193 ux500_channel
->is_allocated
= 1;
195 DBG(7, "hw_ep=%d, is_tx=0x%x, channel=%d\n",
196 hw_ep
->epnum
, is_tx
, ch_num
);
198 return &(ux500_channel
->channel
);
201 static void ux500_dma_channel_release(struct dma_channel
*channel
)
203 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
205 DBG(7, "channel=%d\n", ux500_channel
->ch_num
);
207 if (ux500_channel
->is_allocated
) {
208 ux500_channel
->is_allocated
= 0;
209 channel
->status
= MUSB_DMA_STATUS_FREE
;
210 channel
->actual_len
= 0;
214 static int ux500_dma_is_compatible(struct dma_channel
*channel
,
215 u16 maxpacket
, void *buf
, u32 length
)
217 if ((maxpacket
& 0x3) ||
226 static int ux500_dma_channel_program(struct dma_channel
*channel
,
227 u16 packet_sz
, u8 mode
,
228 dma_addr_t dma_addr
, u32 len
)
232 BUG_ON(channel
->status
== MUSB_DMA_STATUS_UNKNOWN
||
233 channel
->status
== MUSB_DMA_STATUS_BUSY
);
235 if (!ux500_dma_is_compatible(channel
, packet_sz
, (void *)dma_addr
, len
))
238 channel
->status
= MUSB_DMA_STATUS_BUSY
;
239 channel
->actual_len
= 0;
240 ret
= ux500_configure_channel(channel
, packet_sz
, mode
, dma_addr
, len
);
242 channel
->status
= MUSB_DMA_STATUS_FREE
;
247 static int ux500_dma_channel_abort(struct dma_channel
*channel
)
249 struct ux500_dma_channel
*ux500_channel
= channel
->private_data
;
250 struct ux500_dma_controller
*controller
= ux500_channel
->controller
;
251 struct musb
*musb
= controller
->private_data
;
252 void __iomem
*epio
= musb
->endpoints
[ux500_channel
->hw_ep
->epnum
].regs
;
255 DBG(4, "channel=%d, is_tx=%d\n", ux500_channel
->ch_num
,
256 ux500_channel
->is_tx
);
258 if (channel
->status
== MUSB_DMA_STATUS_BUSY
) {
259 if (ux500_channel
->is_tx
) {
260 csr
= musb_readw(epio
, MUSB_TXCSR
);
261 csr
&= ~(MUSB_TXCSR_AUTOSET
|
264 musb_writew(epio
, MUSB_TXCSR
, csr
);
266 csr
= musb_readw(epio
, MUSB_RXCSR
);
267 csr
&= ~(MUSB_RXCSR_AUTOCLEAR
|
270 musb_writew(epio
, MUSB_RXCSR
, csr
);
273 ux500_channel
->dma_chan
->device
->
274 device_control(ux500_channel
->dma_chan
,
275 DMA_TERMINATE_ALL
, 0);
276 channel
->status
= MUSB_DMA_STATUS_FREE
;
281 static int ux500_dma_controller_stop(struct dma_controller
*c
)
283 struct ux500_dma_controller
*controller
= container_of(c
,
284 struct ux500_dma_controller
, controller
);
285 struct ux500_dma_channel
*ux500_channel
;
286 struct dma_channel
*channel
;
289 for (ch_num
= 0; ch_num
< controller
->num_rx_channels
; ch_num
++) {
290 channel
= &controller
->rx_channel
[ch_num
].channel
;
291 ux500_channel
= channel
->private_data
;
293 ux500_dma_channel_release(channel
);
295 if (ux500_channel
->dma_chan
)
296 dma_release_channel(ux500_channel
->dma_chan
);
299 for (ch_num
= 0; ch_num
< controller
->num_tx_channels
; ch_num
++) {
300 channel
= &controller
->tx_channel
[ch_num
].channel
;
301 ux500_channel
= channel
->private_data
;
303 ux500_dma_channel_release(channel
);
305 if (ux500_channel
->dma_chan
)
306 dma_release_channel(ux500_channel
->dma_chan
);
312 static int ux500_dma_controller_start(struct dma_controller
*c
)
314 struct ux500_dma_controller
*controller
= container_of(c
,
315 struct ux500_dma_controller
, controller
);
316 struct ux500_dma_channel
*ux500_channel
= NULL
;
317 struct musb
*musb
= controller
->private_data
;
318 struct device
*dev
= musb
->controller
;
319 struct musb_hdrc_platform_data
*plat
= dev
->platform_data
;
320 struct ux500_musb_board_data
*data
= plat
->board_data
;
321 struct dma_channel
*dma_channel
= NULL
;
327 struct ux500_dma_channel
*channel_array
;
329 void (*musb_channel_work
)(struct work_struct
*);
332 if ((data
->num_rx_channels
> UX500_MUSB_DMA_NUM_RX_CHANNELS
) ||
333 (data
->num_tx_channels
> UX500_MUSB_DMA_NUM_TX_CHANNELS
))
336 controller
->num_rx_channels
= data
->num_rx_channels
;
337 controller
->num_tx_channels
= data
->num_tx_channels
;
340 dma_cap_set(DMA_SLAVE
, mask
);
342 /* Prepare the loop for RX channels */
343 channel_array
= controller
->rx_channel
;
344 ch_count
= data
->num_rx_channels
;
345 param_array
= data
->dma_rx_param_array
;
346 musb_channel_work
= ux500_rx_work
;
348 for (dir
= 0; dir
< 2; dir
++) {
349 for (ch_num
= 0; ch_num
< ch_count
; ch_num
++) {
350 ux500_channel
= &channel_array
[ch_num
];
351 ux500_channel
->controller
= controller
;
352 ux500_channel
->ch_num
= ch_num
;
353 ux500_channel
->is_tx
= is_tx
;
355 dma_channel
= &(ux500_channel
->channel
);
356 dma_channel
->private_data
= ux500_channel
;
357 dma_channel
->status
= MUSB_DMA_STATUS_FREE
;
358 dma_channel
->max_len
= SZ_16M
;
360 ux500_channel
->dma_chan
= dma_request_channel(mask
,
362 param_array
[ch_num
]);
363 if (!ux500_channel
->dma_chan
) {
364 ERR("Dma pipe allocation error dir=%d ch=%d\n",
367 /* Release already allocated channels */
368 ux500_dma_controller_stop(c
);
373 INIT_WORK(&ux500_channel
->channel_work
,
377 /* Prepare the loop for TX channels */
378 channel_array
= controller
->tx_channel
;
379 ch_count
= data
->num_tx_channels
;
380 param_array
= data
->dma_tx_param_array
;
381 musb_channel_work
= ux500_tx_work
;
388 void dma_controller_destroy(struct dma_controller
*c
)
390 struct ux500_dma_controller
*controller
= container_of(c
,
391 struct ux500_dma_controller
, controller
);
396 struct dma_controller
*__init
397 dma_controller_create(struct musb
*musb
, void __iomem
*base
)
399 struct ux500_dma_controller
*controller
;
400 struct platform_device
*pdev
= to_platform_device(musb
->controller
);
401 struct resource
*iomem
;
403 controller
= kzalloc(sizeof(*controller
), GFP_KERNEL
);
407 controller
->private_data
= musb
;
409 /* Save physical address for DMA controller. */
410 iomem
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
411 controller
->phy_base
= (dma_addr_t
) iomem
->start
;
413 controller
->controller
.start
= ux500_dma_controller_start
;
414 controller
->controller
.stop
= ux500_dma_controller_stop
;
415 controller
->controller
.channel_alloc
= ux500_dma_channel_allocate
;
416 controller
->controller
.channel_release
= ux500_dma_channel_release
;
417 controller
->controller
.channel_program
= ux500_dma_channel_program
;
418 controller
->controller
.channel_abort
= ux500_dma_channel_abort
;
419 controller
->controller
.is_compatible
= ux500_dma_is_compatible
;
421 return &controller
->controller
;