2 * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 and
6 * only version 2 as published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
14 #include <linux/dmaengine.h>
15 #include <crypto/scatterwalk.h>
19 int qce_dma_request(struct device
*dev
, struct qce_dma_data
*dma
)
23 dma
->txchan
= dma_request_slave_channel_reason(dev
, "tx");
24 if (IS_ERR(dma
->txchan
))
25 return PTR_ERR(dma
->txchan
);
27 dma
->rxchan
= dma_request_slave_channel_reason(dev
, "rx");
28 if (IS_ERR(dma
->rxchan
)) {
29 ret
= PTR_ERR(dma
->rxchan
);
33 dma
->result_buf
= kmalloc(QCE_RESULT_BUF_SZ
+ QCE_IGNORE_BUF_SZ
,
35 if (!dma
->result_buf
) {
40 dma
->ignore_buf
= dma
->result_buf
+ QCE_RESULT_BUF_SZ
;
44 dma_release_channel(dma
->rxchan
);
46 dma_release_channel(dma
->txchan
);
50 void qce_dma_release(struct qce_dma_data
*dma
)
52 dma_release_channel(dma
->txchan
);
53 dma_release_channel(dma
->rxchan
);
54 kfree(dma
->result_buf
);
58 qce_sgtable_add(struct sg_table
*sgt
, struct scatterlist
*new_sgl
)
60 struct scatterlist
*sg
= sgt
->sgl
, *sg_last
= NULL
;
69 return ERR_PTR(-EINVAL
);
71 while (new_sgl
&& sg
) {
72 sg_set_page(sg
, sg_page(new_sgl
), new_sgl
->length
,
76 new_sgl
= sg_next(new_sgl
);
82 static int qce_dma_prep_sg(struct dma_chan
*chan
, struct scatterlist
*sg
,
83 int nents
, unsigned long flags
,
84 enum dma_transfer_direction dir
,
85 dma_async_tx_callback cb
, void *cb_param
)
87 struct dma_async_tx_descriptor
*desc
;
93 desc
= dmaengine_prep_slave_sg(chan
, sg
, nents
, dir
, flags
);
98 desc
->callback_param
= cb_param
;
99 cookie
= dmaengine_submit(desc
);
101 return dma_submit_error(cookie
);
104 int qce_dma_prep_sgs(struct qce_dma_data
*dma
, struct scatterlist
*rx_sg
,
105 int rx_nents
, struct scatterlist
*tx_sg
, int tx_nents
,
106 dma_async_tx_callback cb
, void *cb_param
)
108 struct dma_chan
*rxchan
= dma
->rxchan
;
109 struct dma_chan
*txchan
= dma
->txchan
;
110 unsigned long flags
= DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
;
113 ret
= qce_dma_prep_sg(rxchan
, rx_sg
, rx_nents
, flags
, DMA_MEM_TO_DEV
,
118 return qce_dma_prep_sg(txchan
, tx_sg
, tx_nents
, flags
, DMA_DEV_TO_MEM
,
122 void qce_dma_issue_pending(struct qce_dma_data
*dma
)
124 dma_async_issue_pending(dma
->rxchan
);
125 dma_async_issue_pending(dma
->txchan
);
128 int qce_dma_terminate_all(struct qce_dma_data
*dma
)
132 ret
= dmaengine_terminate_all(dma
->rxchan
);
133 return ret
?: dmaengine_terminate_all(dma
->txchan
);