1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
6 #include <linux/dmaengine.h>
7 #include <crypto/scatterwalk.h>
11 int qce_dma_request(struct device
*dev
, struct qce_dma_data
*dma
)
15 dma
->txchan
= dma_request_chan(dev
, "tx");
16 if (IS_ERR(dma
->txchan
))
17 return PTR_ERR(dma
->txchan
);
19 dma
->rxchan
= dma_request_chan(dev
, "rx");
20 if (IS_ERR(dma
->rxchan
)) {
21 ret
= PTR_ERR(dma
->rxchan
);
25 dma
->result_buf
= kmalloc(QCE_RESULT_BUF_SZ
+ QCE_IGNORE_BUF_SZ
,
27 if (!dma
->result_buf
) {
32 dma
->ignore_buf
= dma
->result_buf
+ QCE_RESULT_BUF_SZ
;
36 dma_release_channel(dma
->rxchan
);
38 dma_release_channel(dma
->txchan
);
42 void qce_dma_release(struct qce_dma_data
*dma
)
44 dma_release_channel(dma
->txchan
);
45 dma_release_channel(dma
->rxchan
);
46 kfree(dma
->result_buf
);
50 qce_sgtable_add(struct sg_table
*sgt
, struct scatterlist
*new_sgl
,
53 struct scatterlist
*sg
= sgt
->sgl
, *sg_last
= NULL
;
63 return ERR_PTR(-EINVAL
);
65 while (new_sgl
&& sg
&& max_len
) {
66 new_len
= new_sgl
->length
> max_len
? max_len
: new_sgl
->length
;
67 sg_set_page(sg
, sg_page(new_sgl
), new_len
, new_sgl
->offset
);
70 new_sgl
= sg_next(new_sgl
);
77 static int qce_dma_prep_sg(struct dma_chan
*chan
, struct scatterlist
*sg
,
78 int nents
, unsigned long flags
,
79 enum dma_transfer_direction dir
,
80 dma_async_tx_callback cb
, void *cb_param
)
82 struct dma_async_tx_descriptor
*desc
;
88 desc
= dmaengine_prep_slave_sg(chan
, sg
, nents
, dir
, flags
);
93 desc
->callback_param
= cb_param
;
94 cookie
= dmaengine_submit(desc
);
96 return dma_submit_error(cookie
);
99 int qce_dma_prep_sgs(struct qce_dma_data
*dma
, struct scatterlist
*rx_sg
,
100 int rx_nents
, struct scatterlist
*tx_sg
, int tx_nents
,
101 dma_async_tx_callback cb
, void *cb_param
)
103 struct dma_chan
*rxchan
= dma
->rxchan
;
104 struct dma_chan
*txchan
= dma
->txchan
;
105 unsigned long flags
= DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
;
108 ret
= qce_dma_prep_sg(rxchan
, rx_sg
, rx_nents
, flags
, DMA_MEM_TO_DEV
,
113 return qce_dma_prep_sg(txchan
, tx_sg
, tx_nents
, flags
, DMA_DEV_TO_MEM
,
117 void qce_dma_issue_pending(struct qce_dma_data
*dma
)
119 dma_async_issue_pending(dma
->rxchan
);
120 dma_async_issue_pending(dma
->txchan
);
123 int qce_dma_terminate_all(struct qce_dma_data
*dma
)
127 ret
= dmaengine_terminate_all(dma
->rxchan
);
128 return ret
?: dmaengine_terminate_all(dma
->txchan
);