1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) M'boumba Cedric Madianga 2017
6 * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
11 /* Functions for DMA support */
12 struct stm32_i2c_dma
*stm32_i2c_dma_request(struct device
*dev
,
17 struct stm32_i2c_dma
*dma
;
18 struct dma_slave_config dma_sconfig
;
21 dma
= devm_kzalloc(dev
, sizeof(*dma
), GFP_KERNEL
);
23 return ERR_PTR(-ENOMEM
);
25 /* Request and configure I2C TX dma channel */
26 dma
->chan_tx
= dma_request_chan(dev
, "tx");
27 if (IS_ERR(dma
->chan_tx
)) {
28 dev_dbg(dev
, "can't request DMA tx channel\n");
29 ret
= PTR_ERR(dma
->chan_tx
);
33 memset(&dma_sconfig
, 0, sizeof(dma_sconfig
));
34 dma_sconfig
.dst_addr
= phy_addr
+ txdr_offset
;
35 dma_sconfig
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
36 dma_sconfig
.dst_maxburst
= 1;
37 dma_sconfig
.direction
= DMA_MEM_TO_DEV
;
38 ret
= dmaengine_slave_config(dma
->chan_tx
, &dma_sconfig
);
40 dev_err(dev
, "can't configure tx channel\n");
44 /* Request and configure I2C RX dma channel */
45 dma
->chan_rx
= dma_request_chan(dev
, "rx");
46 if (IS_ERR(dma
->chan_rx
)) {
47 dev_err(dev
, "can't request DMA rx channel\n");
48 ret
= PTR_ERR(dma
->chan_rx
);
52 memset(&dma_sconfig
, 0, sizeof(dma_sconfig
));
53 dma_sconfig
.src_addr
= phy_addr
+ rxdr_offset
;
54 dma_sconfig
.src_addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
55 dma_sconfig
.src_maxburst
= 1;
56 dma_sconfig
.direction
= DMA_DEV_TO_MEM
;
57 ret
= dmaengine_slave_config(dma
->chan_rx
, &dma_sconfig
);
59 dev_err(dev
, "can't configure rx channel\n");
63 init_completion(&dma
->dma_complete
);
65 dev_info(dev
, "using %s (tx) and %s (rx) for DMA transfers\n",
66 dma_chan_name(dma
->chan_tx
), dma_chan_name(dma
->chan_rx
));
71 dma_release_channel(dma
->chan_rx
);
73 dma_release_channel(dma
->chan_tx
);
76 dev_info(dev
, "can't use DMA\n");
81 void stm32_i2c_dma_free(struct stm32_i2c_dma
*dma
)
86 dma_release_channel(dma
->chan_tx
);
89 dma_release_channel(dma
->chan_rx
);
92 dma
->chan_using
= NULL
;
95 int stm32_i2c_prep_dma_xfer(struct device
*dev
, struct stm32_i2c_dma
*dma
,
96 bool rd_wr
, u32 len
, u8
*buf
,
97 dma_async_tx_callback callback
,
98 void *dma_async_param
)
100 struct dma_async_tx_descriptor
*txdesc
;
101 struct device
*chan_dev
;
105 dma
->chan_using
= dma
->chan_rx
;
106 dma
->dma_transfer_dir
= DMA_DEV_TO_MEM
;
107 dma
->dma_data_dir
= DMA_FROM_DEVICE
;
109 dma
->chan_using
= dma
->chan_tx
;
110 dma
->dma_transfer_dir
= DMA_MEM_TO_DEV
;
111 dma
->dma_data_dir
= DMA_TO_DEVICE
;
115 chan_dev
= dma
->chan_using
->device
->dev
;
117 dma
->dma_buf
= dma_map_single(chan_dev
, buf
, dma
->dma_len
,
119 if (dma_mapping_error(chan_dev
, dma
->dma_buf
)) {
120 dev_err(dev
, "DMA mapping failed\n");
124 txdesc
= dmaengine_prep_slave_single(dma
->chan_using
, dma
->dma_buf
,
126 dma
->dma_transfer_dir
,
129 dev_err(dev
, "Not able to get desc for DMA xfer\n");
134 reinit_completion(&dma
->dma_complete
);
136 txdesc
->callback
= callback
;
137 txdesc
->callback_param
= dma_async_param
;
138 ret
= dma_submit_error(dmaengine_submit(txdesc
));
140 dev_err(dev
, "DMA submit failed\n");
144 dma_async_issue_pending(dma
->chan_using
);
149 dma_unmap_single(chan_dev
, dma
->dma_buf
, dma
->dma_len
,