x86/topology: Update the 'cpu cores' field in /proc/cpuinfo correctly across CPU...
[cris-mirror.git] / drivers / tty / serial / 8250 / 8250_dma.c
blobbfa1a857f3ffab6bd039e532c68fcf85c1afb0d4
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * 8250_dma.c - DMA Engine API support for 8250.c
5 * Copyright (C) 2013 Intel Corporation
6 */
7 #include <linux/tty.h>
8 #include <linux/tty_flip.h>
9 #include <linux/serial_reg.h>
10 #include <linux/dma-mapping.h>
12 #include "8250.h"
14 static void __dma_tx_complete(void *param)
16 struct uart_8250_port *p = param;
17 struct uart_8250_dma *dma = p->dma;
18 struct circ_buf *xmit = &p->port.state->xmit;
19 unsigned long flags;
20 int ret;
22 dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr,
23 UART_XMIT_SIZE, DMA_TO_DEVICE);
25 spin_lock_irqsave(&p->port.lock, flags);
27 dma->tx_running = 0;
29 xmit->tail += dma->tx_size;
30 xmit->tail &= UART_XMIT_SIZE - 1;
31 p->port.icount.tx += dma->tx_size;
33 if (uart_circ_chars_pending(xmit) < WAKEUP_CHARS)
34 uart_write_wakeup(&p->port);
36 ret = serial8250_tx_dma(p);
37 if (ret) {
38 p->ier |= UART_IER_THRI;
39 serial_port_out(&p->port, UART_IER, p->ier);
42 spin_unlock_irqrestore(&p->port.lock, flags);
45 static void __dma_rx_complete(void *param)
47 struct uart_8250_port *p = param;
48 struct uart_8250_dma *dma = p->dma;
49 struct tty_port *tty_port = &p->port.state->port;
50 struct dma_tx_state state;
51 int count;
53 dma->rx_running = 0;
54 dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state);
56 count = dma->rx_size - state.residue;
58 tty_insert_flip_string(tty_port, dma->rx_buf, count);
59 p->port.icount.rx += count;
61 tty_flip_buffer_push(tty_port);
64 int serial8250_tx_dma(struct uart_8250_port *p)
66 struct uart_8250_dma *dma = p->dma;
67 struct circ_buf *xmit = &p->port.state->xmit;
68 struct dma_async_tx_descriptor *desc;
69 int ret;
71 if (dma->tx_running)
72 return 0;
74 if (uart_tx_stopped(&p->port) || uart_circ_empty(xmit)) {
75 /* We have been called from __dma_tx_complete() */
76 serial8250_rpm_put_tx(p);
77 return 0;
80 dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE);
82 desc = dmaengine_prep_slave_single(dma->txchan,
83 dma->tx_addr + xmit->tail,
84 dma->tx_size, DMA_MEM_TO_DEV,
85 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
86 if (!desc) {
87 ret = -EBUSY;
88 goto err;
91 dma->tx_running = 1;
92 desc->callback = __dma_tx_complete;
93 desc->callback_param = p;
95 dma->tx_cookie = dmaengine_submit(desc);
97 dma_sync_single_for_device(dma->txchan->device->dev, dma->tx_addr,
98 UART_XMIT_SIZE, DMA_TO_DEVICE);
100 dma_async_issue_pending(dma->txchan);
101 if (dma->tx_err) {
102 dma->tx_err = 0;
103 if (p->ier & UART_IER_THRI) {
104 p->ier &= ~UART_IER_THRI;
105 serial_out(p, UART_IER, p->ier);
108 return 0;
109 err:
110 dma->tx_err = 1;
111 return ret;
114 int serial8250_rx_dma(struct uart_8250_port *p)
116 struct uart_8250_dma *dma = p->dma;
117 struct dma_async_tx_descriptor *desc;
119 if (dma->rx_running)
120 return 0;
122 desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr,
123 dma->rx_size, DMA_DEV_TO_MEM,
124 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
125 if (!desc)
126 return -EBUSY;
128 dma->rx_running = 1;
129 desc->callback = __dma_rx_complete;
130 desc->callback_param = p;
132 dma->rx_cookie = dmaengine_submit(desc);
134 dma_async_issue_pending(dma->rxchan);
136 return 0;
139 void serial8250_rx_dma_flush(struct uart_8250_port *p)
141 struct uart_8250_dma *dma = p->dma;
143 if (dma->rx_running) {
144 dmaengine_pause(dma->rxchan);
145 __dma_rx_complete(p);
146 dmaengine_terminate_async(dma->rxchan);
149 EXPORT_SYMBOL_GPL(serial8250_rx_dma_flush);
151 int serial8250_request_dma(struct uart_8250_port *p)
153 struct uart_8250_dma *dma = p->dma;
154 phys_addr_t rx_dma_addr = dma->rx_dma_addr ?
155 dma->rx_dma_addr : p->port.mapbase;
156 phys_addr_t tx_dma_addr = dma->tx_dma_addr ?
157 dma->tx_dma_addr : p->port.mapbase;
158 dma_cap_mask_t mask;
159 struct dma_slave_caps caps;
160 int ret;
162 /* Default slave configuration parameters */
163 dma->rxconf.direction = DMA_DEV_TO_MEM;
164 dma->rxconf.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
165 dma->rxconf.src_addr = rx_dma_addr + UART_RX;
167 dma->txconf.direction = DMA_MEM_TO_DEV;
168 dma->txconf.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
169 dma->txconf.dst_addr = tx_dma_addr + UART_TX;
171 dma_cap_zero(mask);
172 dma_cap_set(DMA_SLAVE, mask);
174 /* Get a channel for RX */
175 dma->rxchan = dma_request_slave_channel_compat(mask,
176 dma->fn, dma->rx_param,
177 p->port.dev, "rx");
178 if (!dma->rxchan)
179 return -ENODEV;
181 /* 8250 rx dma requires dmaengine driver to support pause/terminate */
182 ret = dma_get_slave_caps(dma->rxchan, &caps);
183 if (ret)
184 goto release_rx;
185 if (!caps.cmd_pause || !caps.cmd_terminate ||
186 caps.residue_granularity == DMA_RESIDUE_GRANULARITY_DESCRIPTOR) {
187 ret = -EINVAL;
188 goto release_rx;
191 dmaengine_slave_config(dma->rxchan, &dma->rxconf);
193 /* Get a channel for TX */
194 dma->txchan = dma_request_slave_channel_compat(mask,
195 dma->fn, dma->tx_param,
196 p->port.dev, "tx");
197 if (!dma->txchan) {
198 ret = -ENODEV;
199 goto release_rx;
202 /* 8250 tx dma requires dmaengine driver to support terminate */
203 ret = dma_get_slave_caps(dma->txchan, &caps);
204 if (ret)
205 goto err;
206 if (!caps.cmd_terminate) {
207 ret = -EINVAL;
208 goto err;
211 dmaengine_slave_config(dma->txchan, &dma->txconf);
213 /* RX buffer */
214 if (!dma->rx_size)
215 dma->rx_size = PAGE_SIZE;
217 dma->rx_buf = dma_alloc_coherent(dma->rxchan->device->dev, dma->rx_size,
218 &dma->rx_addr, GFP_KERNEL);
219 if (!dma->rx_buf) {
220 ret = -ENOMEM;
221 goto err;
224 /* TX buffer */
225 dma->tx_addr = dma_map_single(dma->txchan->device->dev,
226 p->port.state->xmit.buf,
227 UART_XMIT_SIZE,
228 DMA_TO_DEVICE);
229 if (dma_mapping_error(dma->txchan->device->dev, dma->tx_addr)) {
230 dma_free_coherent(dma->rxchan->device->dev, dma->rx_size,
231 dma->rx_buf, dma->rx_addr);
232 ret = -ENOMEM;
233 goto err;
236 dev_dbg_ratelimited(p->port.dev, "got both dma channels\n");
238 return 0;
239 err:
240 dma_release_channel(dma->txchan);
241 release_rx:
242 dma_release_channel(dma->rxchan);
243 return ret;
245 EXPORT_SYMBOL_GPL(serial8250_request_dma);
247 void serial8250_release_dma(struct uart_8250_port *p)
249 struct uart_8250_dma *dma = p->dma;
251 if (!dma)
252 return;
254 /* Release RX resources */
255 dmaengine_terminate_sync(dma->rxchan);
256 dma_free_coherent(dma->rxchan->device->dev, dma->rx_size, dma->rx_buf,
257 dma->rx_addr);
258 dma_release_channel(dma->rxchan);
259 dma->rxchan = NULL;
261 /* Release TX resources */
262 dmaengine_terminate_sync(dma->txchan);
263 dma_unmap_single(dma->txchan->device->dev, dma->tx_addr,
264 UART_XMIT_SIZE, DMA_TO_DEVICE);
265 dma_release_channel(dma->txchan);
266 dma->txchan = NULL;
267 dma->tx_running = 0;
269 dev_dbg_ratelimited(p->port.dev, "dma channels released\n");
271 EXPORT_SYMBOL_GPL(serial8250_release_dma);