Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / drivers / iio / adc / at91-sama5d2_adc.c
blob4eff8351ce2994c51483964f2b4345e57b2e7056
1 /*
2 * Atmel ADC driver for SAMA5D2 devices and compatible.
4 * Copyright (C) 2015 Atmel,
5 * 2015 Ludovic Desroches <ludovic.desroches@atmel.com>
7 * This software is licensed under the terms of the GNU General Public
8 * License version 2, as published by the Free Software Foundation, and
9 * may be copied, distributed, and modified under those terms.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
17 #include <linux/bitops.h>
18 #include <linux/clk.h>
19 #include <linux/dma-mapping.h>
20 #include <linux/dmaengine.h>
21 #include <linux/interrupt.h>
22 #include <linux/io.h>
23 #include <linux/module.h>
24 #include <linux/of_device.h>
25 #include <linux/platform_device.h>
26 #include <linux/sched.h>
27 #include <linux/wait.h>
28 #include <linux/iio/iio.h>
29 #include <linux/iio/sysfs.h>
30 #include <linux/iio/buffer.h>
31 #include <linux/iio/trigger.h>
32 #include <linux/iio/trigger_consumer.h>
33 #include <linux/iio/triggered_buffer.h>
34 #include <linux/pinctrl/consumer.h>
35 #include <linux/regulator/consumer.h>
37 /* Control Register */
38 #define AT91_SAMA5D2_CR 0x00
39 /* Software Reset */
40 #define AT91_SAMA5D2_CR_SWRST BIT(0)
41 /* Start Conversion */
42 #define AT91_SAMA5D2_CR_START BIT(1)
43 /* Touchscreen Calibration */
44 #define AT91_SAMA5D2_CR_TSCALIB BIT(2)
45 /* Comparison Restart */
46 #define AT91_SAMA5D2_CR_CMPRST BIT(4)
48 /* Mode Register */
49 #define AT91_SAMA5D2_MR 0x04
50 /* Trigger Selection */
51 #define AT91_SAMA5D2_MR_TRGSEL(v) ((v) << 1)
52 /* ADTRG */
53 #define AT91_SAMA5D2_MR_TRGSEL_TRIG0 0
54 /* TIOA0 */
55 #define AT91_SAMA5D2_MR_TRGSEL_TRIG1 1
56 /* TIOA1 */
57 #define AT91_SAMA5D2_MR_TRGSEL_TRIG2 2
58 /* TIOA2 */
59 #define AT91_SAMA5D2_MR_TRGSEL_TRIG3 3
60 /* PWM event line 0 */
61 #define AT91_SAMA5D2_MR_TRGSEL_TRIG4 4
62 /* PWM event line 1 */
63 #define AT91_SAMA5D2_MR_TRGSEL_TRIG5 5
64 /* TIOA3 */
65 #define AT91_SAMA5D2_MR_TRGSEL_TRIG6 6
66 /* RTCOUT0 */
67 #define AT91_SAMA5D2_MR_TRGSEL_TRIG7 7
68 /* Sleep Mode */
69 #define AT91_SAMA5D2_MR_SLEEP BIT(5)
70 /* Fast Wake Up */
71 #define AT91_SAMA5D2_MR_FWUP BIT(6)
72 /* Prescaler Rate Selection */
73 #define AT91_SAMA5D2_MR_PRESCAL(v) ((v) << AT91_SAMA5D2_MR_PRESCAL_OFFSET)
74 #define AT91_SAMA5D2_MR_PRESCAL_OFFSET 8
75 #define AT91_SAMA5D2_MR_PRESCAL_MAX 0xff
76 #define AT91_SAMA5D2_MR_PRESCAL_MASK GENMASK(15, 8)
77 /* Startup Time */
78 #define AT91_SAMA5D2_MR_STARTUP(v) ((v) << 16)
79 #define AT91_SAMA5D2_MR_STARTUP_MASK GENMASK(19, 16)
80 /* Analog Change */
81 #define AT91_SAMA5D2_MR_ANACH BIT(23)
82 /* Tracking Time */
83 #define AT91_SAMA5D2_MR_TRACKTIM(v) ((v) << 24)
84 #define AT91_SAMA5D2_MR_TRACKTIM_MAX 0xff
85 /* Transfer Time */
86 #define AT91_SAMA5D2_MR_TRANSFER(v) ((v) << 28)
87 #define AT91_SAMA5D2_MR_TRANSFER_MAX 0x3
88 /* Use Sequence Enable */
89 #define AT91_SAMA5D2_MR_USEQ BIT(31)
91 /* Channel Sequence Register 1 */
92 #define AT91_SAMA5D2_SEQR1 0x08
93 /* Channel Sequence Register 2 */
94 #define AT91_SAMA5D2_SEQR2 0x0c
95 /* Channel Enable Register */
96 #define AT91_SAMA5D2_CHER 0x10
97 /* Channel Disable Register */
98 #define AT91_SAMA5D2_CHDR 0x14
99 /* Channel Status Register */
100 #define AT91_SAMA5D2_CHSR 0x18
101 /* Last Converted Data Register */
102 #define AT91_SAMA5D2_LCDR 0x20
103 /* Interrupt Enable Register */
104 #define AT91_SAMA5D2_IER 0x24
105 /* Interrupt Enable Register - general overrun error */
106 #define AT91_SAMA5D2_IER_GOVRE BIT(25)
107 /* Interrupt Disable Register */
108 #define AT91_SAMA5D2_IDR 0x28
109 /* Interrupt Mask Register */
110 #define AT91_SAMA5D2_IMR 0x2c
111 /* Interrupt Status Register */
112 #define AT91_SAMA5D2_ISR 0x30
113 /* Last Channel Trigger Mode Register */
114 #define AT91_SAMA5D2_LCTMR 0x34
115 /* Last Channel Compare Window Register */
116 #define AT91_SAMA5D2_LCCWR 0x38
117 /* Overrun Status Register */
118 #define AT91_SAMA5D2_OVER 0x3c
119 /* Extended Mode Register */
120 #define AT91_SAMA5D2_EMR 0x40
121 /* Compare Window Register */
122 #define AT91_SAMA5D2_CWR 0x44
123 /* Channel Gain Register */
124 #define AT91_SAMA5D2_CGR 0x48
126 /* Channel Offset Register */
127 #define AT91_SAMA5D2_COR 0x4c
128 #define AT91_SAMA5D2_COR_DIFF_OFFSET 16
130 /* Channel Data Register 0 */
131 #define AT91_SAMA5D2_CDR0 0x50
132 /* Analog Control Register */
133 #define AT91_SAMA5D2_ACR 0x94
134 /* Touchscreen Mode Register */
135 #define AT91_SAMA5D2_TSMR 0xb0
136 /* Touchscreen X Position Register */
137 #define AT91_SAMA5D2_XPOSR 0xb4
138 /* Touchscreen Y Position Register */
139 #define AT91_SAMA5D2_YPOSR 0xb8
140 /* Touchscreen Pressure Register */
141 #define AT91_SAMA5D2_PRESSR 0xbc
142 /* Trigger Register */
143 #define AT91_SAMA5D2_TRGR 0xc0
144 /* Mask for TRGMOD field of TRGR register */
145 #define AT91_SAMA5D2_TRGR_TRGMOD_MASK GENMASK(2, 0)
146 /* No trigger, only software trigger can start conversions */
147 #define AT91_SAMA5D2_TRGR_TRGMOD_NO_TRIGGER 0
148 /* Trigger Mode external trigger rising edge */
149 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_RISE 1
150 /* Trigger Mode external trigger falling edge */
151 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_FALL 2
152 /* Trigger Mode external trigger any edge */
153 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_ANY 3
155 /* Correction Select Register */
156 #define AT91_SAMA5D2_COSR 0xd0
157 /* Correction Value Register */
158 #define AT91_SAMA5D2_CVR 0xd4
159 /* Channel Error Correction Register */
160 #define AT91_SAMA5D2_CECR 0xd8
161 /* Write Protection Mode Register */
162 #define AT91_SAMA5D2_WPMR 0xe4
163 /* Write Protection Status Register */
164 #define AT91_SAMA5D2_WPSR 0xe8
165 /* Version Register */
166 #define AT91_SAMA5D2_VERSION 0xfc
168 #define AT91_SAMA5D2_HW_TRIG_CNT 3
169 #define AT91_SAMA5D2_SINGLE_CHAN_CNT 12
170 #define AT91_SAMA5D2_DIFF_CHAN_CNT 6
173 * Maximum number of bytes to hold conversion from all channels
174 * without the timestamp.
176 #define AT91_BUFFER_MAX_CONVERSION_BYTES ((AT91_SAMA5D2_SINGLE_CHAN_CNT + \
177 AT91_SAMA5D2_DIFF_CHAN_CNT) * 2)
179 /* This total must also include the timestamp */
180 #define AT91_BUFFER_MAX_BYTES (AT91_BUFFER_MAX_CONVERSION_BYTES + 8)
182 #define AT91_BUFFER_MAX_HWORDS (AT91_BUFFER_MAX_BYTES / 2)
184 #define AT91_HWFIFO_MAX_SIZE_STR "128"
185 #define AT91_HWFIFO_MAX_SIZE 128
187 #define AT91_SAMA5D2_CHAN_SINGLE(num, addr) \
189 .type = IIO_VOLTAGE, \
190 .channel = num, \
191 .address = addr, \
192 .scan_index = num, \
193 .scan_type = { \
194 .sign = 'u', \
195 .realbits = 12, \
196 .storagebits = 16, \
197 }, \
198 .info_mask_separate = BIT(IIO_CHAN_INFO_RAW), \
199 .info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE), \
200 .info_mask_shared_by_all = BIT(IIO_CHAN_INFO_SAMP_FREQ),\
201 .datasheet_name = "CH"#num, \
202 .indexed = 1, \
205 #define AT91_SAMA5D2_CHAN_DIFF(num, num2, addr) \
207 .type = IIO_VOLTAGE, \
208 .differential = 1, \
209 .channel = num, \
210 .channel2 = num2, \
211 .address = addr, \
212 .scan_index = num + AT91_SAMA5D2_SINGLE_CHAN_CNT, \
213 .scan_type = { \
214 .sign = 's', \
215 .realbits = 12, \
216 .storagebits = 16, \
217 }, \
218 .info_mask_separate = BIT(IIO_CHAN_INFO_RAW), \
219 .info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE), \
220 .info_mask_shared_by_all = BIT(IIO_CHAN_INFO_SAMP_FREQ),\
221 .datasheet_name = "CH"#num"-CH"#num2, \
222 .indexed = 1, \
225 #define at91_adc_readl(st, reg) readl_relaxed(st->base + reg)
226 #define at91_adc_writel(st, reg, val) writel_relaxed(val, st->base + reg)
228 struct at91_adc_soc_info {
229 unsigned startup_time;
230 unsigned min_sample_rate;
231 unsigned max_sample_rate;
234 struct at91_adc_trigger {
235 char *name;
236 unsigned int trgmod_value;
237 unsigned int edge_type;
238 bool hw_trig;
242 * at91_adc_dma - at91-sama5d2 dma information struct
243 * @dma_chan: the dma channel acquired
244 * @rx_buf: dma coherent allocated area
245 * @rx_dma_buf: dma handler for the buffer
246 * @phys_addr: physical address of the ADC base register
247 * @buf_idx: index inside the dma buffer where reading was last done
248 * @rx_buf_sz: size of buffer used by DMA operation
249 * @watermark: number of conversions to copy before DMA triggers irq
250 * @dma_ts: hold the start timestamp of dma operation
252 struct at91_adc_dma {
253 struct dma_chan *dma_chan;
254 u8 *rx_buf;
255 dma_addr_t rx_dma_buf;
256 phys_addr_t phys_addr;
257 int buf_idx;
258 int rx_buf_sz;
259 int watermark;
260 s64 dma_ts;
263 struct at91_adc_state {
264 void __iomem *base;
265 int irq;
266 struct clk *per_clk;
267 struct regulator *reg;
268 struct regulator *vref;
269 int vref_uv;
270 struct iio_trigger *trig;
271 const struct at91_adc_trigger *selected_trig;
272 const struct iio_chan_spec *chan;
273 bool conversion_done;
274 u32 conversion_value;
275 struct at91_adc_soc_info soc_info;
276 wait_queue_head_t wq_data_available;
277 struct at91_adc_dma dma_st;
278 u16 buffer[AT91_BUFFER_MAX_HWORDS];
280 * lock to prevent concurrent 'single conversion' requests through
281 * sysfs.
283 struct mutex lock;
286 static const struct at91_adc_trigger at91_adc_trigger_list[] = {
288 .name = "external_rising",
289 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_RISE,
290 .edge_type = IRQ_TYPE_EDGE_RISING,
291 .hw_trig = true,
294 .name = "external_falling",
295 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_FALL,
296 .edge_type = IRQ_TYPE_EDGE_FALLING,
297 .hw_trig = true,
300 .name = "external_any",
301 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_ANY,
302 .edge_type = IRQ_TYPE_EDGE_BOTH,
303 .hw_trig = true,
306 .name = "software",
307 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_NO_TRIGGER,
308 .edge_type = IRQ_TYPE_NONE,
309 .hw_trig = false,
313 static const struct iio_chan_spec at91_adc_channels[] = {
314 AT91_SAMA5D2_CHAN_SINGLE(0, 0x50),
315 AT91_SAMA5D2_CHAN_SINGLE(1, 0x54),
316 AT91_SAMA5D2_CHAN_SINGLE(2, 0x58),
317 AT91_SAMA5D2_CHAN_SINGLE(3, 0x5c),
318 AT91_SAMA5D2_CHAN_SINGLE(4, 0x60),
319 AT91_SAMA5D2_CHAN_SINGLE(5, 0x64),
320 AT91_SAMA5D2_CHAN_SINGLE(6, 0x68),
321 AT91_SAMA5D2_CHAN_SINGLE(7, 0x6c),
322 AT91_SAMA5D2_CHAN_SINGLE(8, 0x70),
323 AT91_SAMA5D2_CHAN_SINGLE(9, 0x74),
324 AT91_SAMA5D2_CHAN_SINGLE(10, 0x78),
325 AT91_SAMA5D2_CHAN_SINGLE(11, 0x7c),
326 AT91_SAMA5D2_CHAN_DIFF(0, 1, 0x50),
327 AT91_SAMA5D2_CHAN_DIFF(2, 3, 0x58),
328 AT91_SAMA5D2_CHAN_DIFF(4, 5, 0x60),
329 AT91_SAMA5D2_CHAN_DIFF(6, 7, 0x68),
330 AT91_SAMA5D2_CHAN_DIFF(8, 9, 0x70),
331 AT91_SAMA5D2_CHAN_DIFF(10, 11, 0x78),
332 IIO_CHAN_SOFT_TIMESTAMP(AT91_SAMA5D2_SINGLE_CHAN_CNT
333 + AT91_SAMA5D2_DIFF_CHAN_CNT + 1),
336 static int at91_adc_configure_trigger(struct iio_trigger *trig, bool state)
338 struct iio_dev *indio = iio_trigger_get_drvdata(trig);
339 struct at91_adc_state *st = iio_priv(indio);
340 u32 status = at91_adc_readl(st, AT91_SAMA5D2_TRGR);
341 u8 bit;
343 /* clear TRGMOD */
344 status &= ~AT91_SAMA5D2_TRGR_TRGMOD_MASK;
346 if (state)
347 status |= st->selected_trig->trgmod_value;
349 /* set/unset hw trigger */
350 at91_adc_writel(st, AT91_SAMA5D2_TRGR, status);
352 for_each_set_bit(bit, indio->active_scan_mask, indio->num_channels) {
353 struct iio_chan_spec const *chan = indio->channels + bit;
355 if (state) {
356 at91_adc_writel(st, AT91_SAMA5D2_CHER,
357 BIT(chan->channel));
358 /* enable irq only if not using DMA */
359 if (!st->dma_st.dma_chan) {
360 at91_adc_writel(st, AT91_SAMA5D2_IER,
361 BIT(chan->channel));
363 } else {
364 /* disable irq only if not using DMA */
365 if (!st->dma_st.dma_chan) {
366 at91_adc_writel(st, AT91_SAMA5D2_IDR,
367 BIT(chan->channel));
369 at91_adc_writel(st, AT91_SAMA5D2_CHDR,
370 BIT(chan->channel));
374 return 0;
377 static int at91_adc_reenable_trigger(struct iio_trigger *trig)
379 struct iio_dev *indio = iio_trigger_get_drvdata(trig);
380 struct at91_adc_state *st = iio_priv(indio);
382 /* if we are using DMA, we must not reenable irq after each trigger */
383 if (st->dma_st.dma_chan)
384 return 0;
386 enable_irq(st->irq);
388 /* Needed to ACK the DRDY interruption */
389 at91_adc_readl(st, AT91_SAMA5D2_LCDR);
390 return 0;
393 static const struct iio_trigger_ops at91_adc_trigger_ops = {
394 .set_trigger_state = &at91_adc_configure_trigger,
395 .try_reenable = &at91_adc_reenable_trigger,
396 .validate_device = iio_trigger_validate_own_device,
399 static int at91_adc_dma_size_done(struct at91_adc_state *st)
401 struct dma_tx_state state;
402 enum dma_status status;
403 int i, size;
405 status = dmaengine_tx_status(st->dma_st.dma_chan,
406 st->dma_st.dma_chan->cookie,
407 &state);
408 if (status != DMA_IN_PROGRESS)
409 return 0;
411 /* Transferred length is size in bytes from end of buffer */
412 i = st->dma_st.rx_buf_sz - state.residue;
414 /* Return available bytes */
415 if (i >= st->dma_st.buf_idx)
416 size = i - st->dma_st.buf_idx;
417 else
418 size = st->dma_st.rx_buf_sz + i - st->dma_st.buf_idx;
419 return size;
422 static void at91_dma_buffer_done(void *data)
424 struct iio_dev *indio_dev = data;
426 iio_trigger_poll_chained(indio_dev->trig);
429 static int at91_adc_dma_start(struct iio_dev *indio_dev)
431 struct at91_adc_state *st = iio_priv(indio_dev);
432 struct dma_async_tx_descriptor *desc;
433 dma_cookie_t cookie;
434 int ret;
435 u8 bit;
437 if (!st->dma_st.dma_chan)
438 return 0;
440 /* we start a new DMA, so set buffer index to start */
441 st->dma_st.buf_idx = 0;
444 * compute buffer size w.r.t. watermark and enabled channels.
445 * scan_bytes is aligned so we need an exact size for DMA
447 st->dma_st.rx_buf_sz = 0;
449 for_each_set_bit(bit, indio_dev->active_scan_mask,
450 indio_dev->num_channels) {
451 struct iio_chan_spec const *chan = indio_dev->channels + bit;
453 st->dma_st.rx_buf_sz += chan->scan_type.storagebits / 8;
455 st->dma_st.rx_buf_sz *= st->dma_st.watermark;
457 /* Prepare a DMA cyclic transaction */
458 desc = dmaengine_prep_dma_cyclic(st->dma_st.dma_chan,
459 st->dma_st.rx_dma_buf,
460 st->dma_st.rx_buf_sz,
461 st->dma_st.rx_buf_sz / 2,
462 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT);
464 if (!desc) {
465 dev_err(&indio_dev->dev, "cannot prepare DMA cyclic\n");
466 return -EBUSY;
469 desc->callback = at91_dma_buffer_done;
470 desc->callback_param = indio_dev;
472 cookie = dmaengine_submit(desc);
473 ret = dma_submit_error(cookie);
474 if (ret) {
475 dev_err(&indio_dev->dev, "cannot submit DMA cyclic\n");
476 dmaengine_terminate_async(st->dma_st.dma_chan);
477 return ret;
480 /* enable general overrun error signaling */
481 at91_adc_writel(st, AT91_SAMA5D2_IER, AT91_SAMA5D2_IER_GOVRE);
482 /* Issue pending DMA requests */
483 dma_async_issue_pending(st->dma_st.dma_chan);
485 /* consider current time as DMA start time for timestamps */
486 st->dma_st.dma_ts = iio_get_time_ns(indio_dev);
488 dev_dbg(&indio_dev->dev, "DMA cyclic started\n");
490 return 0;
493 static int at91_adc_buffer_postenable(struct iio_dev *indio_dev)
495 int ret;
497 ret = at91_adc_dma_start(indio_dev);
498 if (ret) {
499 dev_err(&indio_dev->dev, "buffer postenable failed\n");
500 return ret;
503 return iio_triggered_buffer_postenable(indio_dev);
506 static int at91_adc_buffer_predisable(struct iio_dev *indio_dev)
508 struct at91_adc_state *st = iio_priv(indio_dev);
509 int ret;
510 u8 bit;
512 ret = iio_triggered_buffer_predisable(indio_dev);
513 if (ret < 0)
514 dev_err(&indio_dev->dev, "buffer predisable failed\n");
516 if (!st->dma_st.dma_chan)
517 return ret;
519 /* if we are using DMA we must clear registers and end DMA */
520 dmaengine_terminate_sync(st->dma_st.dma_chan);
523 * For each enabled channel we must read the last converted value
524 * to clear EOC status and not get a possible interrupt later.
525 * This value is being read by DMA from LCDR anyway
527 for_each_set_bit(bit, indio_dev->active_scan_mask,
528 indio_dev->num_channels) {
529 struct iio_chan_spec const *chan = indio_dev->channels + bit;
531 if (st->dma_st.dma_chan)
532 at91_adc_readl(st, chan->address);
535 /* read overflow register to clear possible overflow status */
536 at91_adc_readl(st, AT91_SAMA5D2_OVER);
537 return ret;
540 static const struct iio_buffer_setup_ops at91_buffer_setup_ops = {
541 .postenable = &at91_adc_buffer_postenable,
542 .predisable = &at91_adc_buffer_predisable,
545 static struct iio_trigger *at91_adc_allocate_trigger(struct iio_dev *indio,
546 char *trigger_name)
548 struct iio_trigger *trig;
549 int ret;
551 trig = devm_iio_trigger_alloc(&indio->dev, "%s-dev%d-%s", indio->name,
552 indio->id, trigger_name);
553 if (!trig)
554 return NULL;
556 trig->dev.parent = indio->dev.parent;
557 iio_trigger_set_drvdata(trig, indio);
558 trig->ops = &at91_adc_trigger_ops;
560 ret = devm_iio_trigger_register(&indio->dev, trig);
561 if (ret)
562 return ERR_PTR(ret);
564 return trig;
567 static int at91_adc_trigger_init(struct iio_dev *indio)
569 struct at91_adc_state *st = iio_priv(indio);
571 st->trig = at91_adc_allocate_trigger(indio, st->selected_trig->name);
572 if (IS_ERR(st->trig)) {
573 dev_err(&indio->dev,
574 "could not allocate trigger\n");
575 return PTR_ERR(st->trig);
578 return 0;
581 static void at91_adc_trigger_handler_nodma(struct iio_dev *indio_dev,
582 struct iio_poll_func *pf)
584 struct at91_adc_state *st = iio_priv(indio_dev);
585 int i = 0;
586 u8 bit;
588 for_each_set_bit(bit, indio_dev->active_scan_mask,
589 indio_dev->num_channels) {
590 struct iio_chan_spec const *chan = indio_dev->channels + bit;
592 st->buffer[i] = at91_adc_readl(st, chan->address);
593 i++;
595 iio_push_to_buffers_with_timestamp(indio_dev, st->buffer,
596 pf->timestamp);
599 static void at91_adc_trigger_handler_dma(struct iio_dev *indio_dev)
601 struct at91_adc_state *st = iio_priv(indio_dev);
602 int transferred_len = at91_adc_dma_size_done(st);
603 s64 ns = iio_get_time_ns(indio_dev);
604 s64 interval;
605 int sample_index = 0, sample_count, sample_size;
607 u32 status = at91_adc_readl(st, AT91_SAMA5D2_ISR);
608 /* if we reached this point, we cannot sample faster */
609 if (status & AT91_SAMA5D2_IER_GOVRE)
610 pr_info_ratelimited("%s: conversion overrun detected\n",
611 indio_dev->name);
613 sample_size = div_s64(st->dma_st.rx_buf_sz, st->dma_st.watermark);
615 sample_count = div_s64(transferred_len, sample_size);
618 * interval between samples is total time since last transfer handling
619 * divided by the number of samples (total size divided by sample size)
621 interval = div_s64((ns - st->dma_st.dma_ts), sample_count);
623 while (transferred_len >= sample_size) {
624 iio_push_to_buffers_with_timestamp(indio_dev,
625 (st->dma_st.rx_buf + st->dma_st.buf_idx),
626 (st->dma_st.dma_ts + interval * sample_index));
627 /* adjust remaining length */
628 transferred_len -= sample_size;
629 /* adjust buffer index */
630 st->dma_st.buf_idx += sample_size;
631 /* in case of reaching end of buffer, reset index */
632 if (st->dma_st.buf_idx >= st->dma_st.rx_buf_sz)
633 st->dma_st.buf_idx = 0;
634 sample_index++;
636 /* adjust saved time for next transfer handling */
637 st->dma_st.dma_ts = iio_get_time_ns(indio_dev);
640 static irqreturn_t at91_adc_trigger_handler(int irq, void *p)
642 struct iio_poll_func *pf = p;
643 struct iio_dev *indio_dev = pf->indio_dev;
644 struct at91_adc_state *st = iio_priv(indio_dev);
646 if (st->dma_st.dma_chan)
647 at91_adc_trigger_handler_dma(indio_dev);
648 else
649 at91_adc_trigger_handler_nodma(indio_dev, pf);
651 iio_trigger_notify_done(indio_dev->trig);
653 return IRQ_HANDLED;
656 static int at91_adc_buffer_init(struct iio_dev *indio)
658 return devm_iio_triggered_buffer_setup(&indio->dev, indio,
659 &iio_pollfunc_store_time,
660 &at91_adc_trigger_handler, &at91_buffer_setup_ops);
663 static unsigned at91_adc_startup_time(unsigned startup_time_min,
664 unsigned adc_clk_khz)
666 static const unsigned int startup_lookup[] = {
667 0, 8, 16, 24,
668 64, 80, 96, 112,
669 512, 576, 640, 704,
670 768, 832, 896, 960
672 unsigned ticks_min, i;
675 * Since the adc frequency is checked before, there is no reason
676 * to not meet the startup time constraint.
679 ticks_min = startup_time_min * adc_clk_khz / 1000;
680 for (i = 0; i < ARRAY_SIZE(startup_lookup); i++)
681 if (startup_lookup[i] > ticks_min)
682 break;
684 return i;
687 static void at91_adc_setup_samp_freq(struct at91_adc_state *st, unsigned freq)
689 struct iio_dev *indio_dev = iio_priv_to_dev(st);
690 unsigned f_per, prescal, startup, mr;
692 f_per = clk_get_rate(st->per_clk);
693 prescal = (f_per / (2 * freq)) - 1;
695 startup = at91_adc_startup_time(st->soc_info.startup_time,
696 freq / 1000);
698 mr = at91_adc_readl(st, AT91_SAMA5D2_MR);
699 mr &= ~(AT91_SAMA5D2_MR_STARTUP_MASK | AT91_SAMA5D2_MR_PRESCAL_MASK);
700 mr |= AT91_SAMA5D2_MR_STARTUP(startup);
701 mr |= AT91_SAMA5D2_MR_PRESCAL(prescal);
702 at91_adc_writel(st, AT91_SAMA5D2_MR, mr);
704 dev_dbg(&indio_dev->dev, "freq: %u, startup: %u, prescal: %u\n",
705 freq, startup, prescal);
708 static unsigned at91_adc_get_sample_freq(struct at91_adc_state *st)
710 unsigned f_adc, f_per = clk_get_rate(st->per_clk);
711 unsigned mr, prescal;
713 mr = at91_adc_readl(st, AT91_SAMA5D2_MR);
714 prescal = (mr >> AT91_SAMA5D2_MR_PRESCAL_OFFSET)
715 & AT91_SAMA5D2_MR_PRESCAL_MAX;
716 f_adc = f_per / (2 * (prescal + 1));
718 return f_adc;
721 static irqreturn_t at91_adc_interrupt(int irq, void *private)
723 struct iio_dev *indio = private;
724 struct at91_adc_state *st = iio_priv(indio);
725 u32 status = at91_adc_readl(st, AT91_SAMA5D2_ISR);
726 u32 imr = at91_adc_readl(st, AT91_SAMA5D2_IMR);
728 if (!(status & imr))
729 return IRQ_NONE;
731 if (iio_buffer_enabled(indio) && !st->dma_st.dma_chan) {
732 disable_irq_nosync(irq);
733 iio_trigger_poll(indio->trig);
734 } else if (iio_buffer_enabled(indio) && st->dma_st.dma_chan) {
735 disable_irq_nosync(irq);
736 WARN(true, "Unexpected irq occurred\n");
737 } else if (!iio_buffer_enabled(indio)) {
738 st->conversion_value = at91_adc_readl(st, st->chan->address);
739 st->conversion_done = true;
740 wake_up_interruptible(&st->wq_data_available);
742 return IRQ_HANDLED;
745 static int at91_adc_read_raw(struct iio_dev *indio_dev,
746 struct iio_chan_spec const *chan,
747 int *val, int *val2, long mask)
749 struct at91_adc_state *st = iio_priv(indio_dev);
750 u32 cor = 0;
751 int ret;
753 switch (mask) {
754 case IIO_CHAN_INFO_RAW:
755 /* we cannot use software trigger if hw trigger enabled */
756 ret = iio_device_claim_direct_mode(indio_dev);
757 if (ret)
758 return ret;
759 mutex_lock(&st->lock);
761 st->chan = chan;
763 if (chan->differential)
764 cor = (BIT(chan->channel) | BIT(chan->channel2)) <<
765 AT91_SAMA5D2_COR_DIFF_OFFSET;
767 at91_adc_writel(st, AT91_SAMA5D2_COR, cor);
768 at91_adc_writel(st, AT91_SAMA5D2_CHER, BIT(chan->channel));
769 at91_adc_writel(st, AT91_SAMA5D2_IER, BIT(chan->channel));
770 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_START);
772 ret = wait_event_interruptible_timeout(st->wq_data_available,
773 st->conversion_done,
774 msecs_to_jiffies(1000));
775 if (ret == 0)
776 ret = -ETIMEDOUT;
778 if (ret > 0) {
779 *val = st->conversion_value;
780 if (chan->scan_type.sign == 's')
781 *val = sign_extend32(*val, 11);
782 ret = IIO_VAL_INT;
783 st->conversion_done = false;
786 at91_adc_writel(st, AT91_SAMA5D2_IDR, BIT(chan->channel));
787 at91_adc_writel(st, AT91_SAMA5D2_CHDR, BIT(chan->channel));
789 /* Needed to ACK the DRDY interruption */
790 at91_adc_readl(st, AT91_SAMA5D2_LCDR);
792 mutex_unlock(&st->lock);
794 iio_device_release_direct_mode(indio_dev);
795 return ret;
797 case IIO_CHAN_INFO_SCALE:
798 *val = st->vref_uv / 1000;
799 if (chan->differential)
800 *val *= 2;
801 *val2 = chan->scan_type.realbits;
802 return IIO_VAL_FRACTIONAL_LOG2;
804 case IIO_CHAN_INFO_SAMP_FREQ:
805 *val = at91_adc_get_sample_freq(st);
806 return IIO_VAL_INT;
808 default:
809 return -EINVAL;
813 static int at91_adc_write_raw(struct iio_dev *indio_dev,
814 struct iio_chan_spec const *chan,
815 int val, int val2, long mask)
817 struct at91_adc_state *st = iio_priv(indio_dev);
819 if (mask != IIO_CHAN_INFO_SAMP_FREQ)
820 return -EINVAL;
822 if (val < st->soc_info.min_sample_rate ||
823 val > st->soc_info.max_sample_rate)
824 return -EINVAL;
826 at91_adc_setup_samp_freq(st, val);
828 return 0;
831 static void at91_adc_dma_init(struct platform_device *pdev)
833 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
834 struct at91_adc_state *st = iio_priv(indio_dev);
835 struct dma_slave_config config = {0};
837 * We make the buffer double the size of the fifo,
838 * such that DMA uses one half of the buffer (full fifo size)
839 * and the software uses the other half to read/write.
841 unsigned int pages = DIV_ROUND_UP(AT91_HWFIFO_MAX_SIZE *
842 AT91_BUFFER_MAX_CONVERSION_BYTES * 2,
843 PAGE_SIZE);
845 if (st->dma_st.dma_chan)
846 return;
848 st->dma_st.dma_chan = dma_request_slave_channel(&pdev->dev, "rx");
850 if (!st->dma_st.dma_chan) {
851 dev_info(&pdev->dev, "can't get DMA channel\n");
852 goto dma_exit;
855 st->dma_st.rx_buf = dma_alloc_coherent(st->dma_st.dma_chan->device->dev,
856 pages * PAGE_SIZE,
857 &st->dma_st.rx_dma_buf,
858 GFP_KERNEL);
859 if (!st->dma_st.rx_buf) {
860 dev_info(&pdev->dev, "can't allocate coherent DMA area\n");
861 goto dma_chan_disable;
864 /* Configure DMA channel to read data register */
865 config.direction = DMA_DEV_TO_MEM;
866 config.src_addr = (phys_addr_t)(st->dma_st.phys_addr
867 + AT91_SAMA5D2_LCDR);
868 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
869 config.src_maxburst = 1;
870 config.dst_maxburst = 1;
872 if (dmaengine_slave_config(st->dma_st.dma_chan, &config)) {
873 dev_info(&pdev->dev, "can't configure DMA slave\n");
874 goto dma_free_area;
877 dev_info(&pdev->dev, "using %s for rx DMA transfers\n",
878 dma_chan_name(st->dma_st.dma_chan));
880 return;
882 dma_free_area:
883 dma_free_coherent(st->dma_st.dma_chan->device->dev, pages * PAGE_SIZE,
884 st->dma_st.rx_buf, st->dma_st.rx_dma_buf);
885 dma_chan_disable:
886 dma_release_channel(st->dma_st.dma_chan);
887 st->dma_st.dma_chan = 0;
888 dma_exit:
889 dev_info(&pdev->dev, "continuing without DMA support\n");
892 static void at91_adc_dma_disable(struct platform_device *pdev)
894 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
895 struct at91_adc_state *st = iio_priv(indio_dev);
896 unsigned int pages = DIV_ROUND_UP(AT91_HWFIFO_MAX_SIZE *
897 AT91_BUFFER_MAX_CONVERSION_BYTES * 2,
898 PAGE_SIZE);
900 /* if we are not using DMA, just return */
901 if (!st->dma_st.dma_chan)
902 return;
904 /* wait for all transactions to be terminated first*/
905 dmaengine_terminate_sync(st->dma_st.dma_chan);
907 dma_free_coherent(st->dma_st.dma_chan->device->dev, pages * PAGE_SIZE,
908 st->dma_st.rx_buf, st->dma_st.rx_dma_buf);
909 dma_release_channel(st->dma_st.dma_chan);
910 st->dma_st.dma_chan = 0;
912 dev_info(&pdev->dev, "continuing without DMA support\n");
915 static int at91_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val)
917 struct at91_adc_state *st = iio_priv(indio_dev);
919 if (val > AT91_HWFIFO_MAX_SIZE)
920 return -EINVAL;
922 if (!st->selected_trig->hw_trig) {
923 dev_dbg(&indio_dev->dev, "we need hw trigger for DMA\n");
924 return 0;
927 dev_dbg(&indio_dev->dev, "new watermark is %u\n", val);
928 st->dma_st.watermark = val;
931 * The logic here is: if we have watermark 1, it means we do
932 * each conversion with it's own IRQ, thus we don't need DMA.
933 * If the watermark is higher, we do DMA to do all the transfers in bulk
936 if (val == 1)
937 at91_adc_dma_disable(to_platform_device(&indio_dev->dev));
938 else if (val > 1)
939 at91_adc_dma_init(to_platform_device(&indio_dev->dev));
941 return 0;
944 static const struct iio_info at91_adc_info = {
945 .read_raw = &at91_adc_read_raw,
946 .write_raw = &at91_adc_write_raw,
947 .hwfifo_set_watermark = &at91_adc_set_watermark,
950 static void at91_adc_hw_init(struct at91_adc_state *st)
952 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_SWRST);
953 at91_adc_writel(st, AT91_SAMA5D2_IDR, 0xffffffff);
955 * Transfer field must be set to 2 according to the datasheet and
956 * allows different analog settings for each channel.
958 at91_adc_writel(st, AT91_SAMA5D2_MR,
959 AT91_SAMA5D2_MR_TRANSFER(2) | AT91_SAMA5D2_MR_ANACH);
961 at91_adc_setup_samp_freq(st, st->soc_info.min_sample_rate);
964 static ssize_t at91_adc_get_fifo_state(struct device *dev,
965 struct device_attribute *attr, char *buf)
967 struct iio_dev *indio_dev =
968 platform_get_drvdata(to_platform_device(dev));
969 struct at91_adc_state *st = iio_priv(indio_dev);
971 return scnprintf(buf, PAGE_SIZE, "%d\n", !!st->dma_st.dma_chan);
974 static ssize_t at91_adc_get_watermark(struct device *dev,
975 struct device_attribute *attr, char *buf)
977 struct iio_dev *indio_dev =
978 platform_get_drvdata(to_platform_device(dev));
979 struct at91_adc_state *st = iio_priv(indio_dev);
981 return scnprintf(buf, PAGE_SIZE, "%d\n", st->dma_st.watermark);
984 static IIO_DEVICE_ATTR(hwfifo_enabled, 0444,
985 at91_adc_get_fifo_state, NULL, 0);
986 static IIO_DEVICE_ATTR(hwfifo_watermark, 0444,
987 at91_adc_get_watermark, NULL, 0);
989 static IIO_CONST_ATTR(hwfifo_watermark_min, "2");
990 static IIO_CONST_ATTR(hwfifo_watermark_max, AT91_HWFIFO_MAX_SIZE_STR);
992 static const struct attribute *at91_adc_fifo_attributes[] = {
993 &iio_const_attr_hwfifo_watermark_min.dev_attr.attr,
994 &iio_const_attr_hwfifo_watermark_max.dev_attr.attr,
995 &iio_dev_attr_hwfifo_watermark.dev_attr.attr,
996 &iio_dev_attr_hwfifo_enabled.dev_attr.attr,
997 NULL,
1000 static int at91_adc_probe(struct platform_device *pdev)
1002 struct iio_dev *indio_dev;
1003 struct at91_adc_state *st;
1004 struct resource *res;
1005 int ret, i;
1006 u32 edge_type = IRQ_TYPE_NONE;
1008 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*st));
1009 if (!indio_dev)
1010 return -ENOMEM;
1012 indio_dev->dev.parent = &pdev->dev;
1013 indio_dev->name = dev_name(&pdev->dev);
1014 indio_dev->modes = INDIO_DIRECT_MODE;
1015 indio_dev->info = &at91_adc_info;
1016 indio_dev->channels = at91_adc_channels;
1017 indio_dev->num_channels = ARRAY_SIZE(at91_adc_channels);
1019 st = iio_priv(indio_dev);
1021 ret = of_property_read_u32(pdev->dev.of_node,
1022 "atmel,min-sample-rate-hz",
1023 &st->soc_info.min_sample_rate);
1024 if (ret) {
1025 dev_err(&pdev->dev,
1026 "invalid or missing value for atmel,min-sample-rate-hz\n");
1027 return ret;
1030 ret = of_property_read_u32(pdev->dev.of_node,
1031 "atmel,max-sample-rate-hz",
1032 &st->soc_info.max_sample_rate);
1033 if (ret) {
1034 dev_err(&pdev->dev,
1035 "invalid or missing value for atmel,max-sample-rate-hz\n");
1036 return ret;
1039 ret = of_property_read_u32(pdev->dev.of_node, "atmel,startup-time-ms",
1040 &st->soc_info.startup_time);
1041 if (ret) {
1042 dev_err(&pdev->dev,
1043 "invalid or missing value for atmel,startup-time-ms\n");
1044 return ret;
1047 ret = of_property_read_u32(pdev->dev.of_node,
1048 "atmel,trigger-edge-type", &edge_type);
1049 if (ret) {
1050 dev_dbg(&pdev->dev,
1051 "atmel,trigger-edge-type not specified, only software trigger available\n");
1054 st->selected_trig = NULL;
1056 /* find the right trigger, or no trigger at all */
1057 for (i = 0; i < AT91_SAMA5D2_HW_TRIG_CNT + 1; i++)
1058 if (at91_adc_trigger_list[i].edge_type == edge_type) {
1059 st->selected_trig = &at91_adc_trigger_list[i];
1060 break;
1063 if (!st->selected_trig) {
1064 dev_err(&pdev->dev, "invalid external trigger edge value\n");
1065 return -EINVAL;
1068 init_waitqueue_head(&st->wq_data_available);
1069 mutex_init(&st->lock);
1071 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1072 if (!res)
1073 return -EINVAL;
1075 /* if we plan to use DMA, we need the physical address of the regs */
1076 st->dma_st.phys_addr = res->start;
1078 st->base = devm_ioremap_resource(&pdev->dev, res);
1079 if (IS_ERR(st->base))
1080 return PTR_ERR(st->base);
1082 st->irq = platform_get_irq(pdev, 0);
1083 if (st->irq <= 0) {
1084 if (!st->irq)
1085 st->irq = -ENXIO;
1087 return st->irq;
1090 st->per_clk = devm_clk_get(&pdev->dev, "adc_clk");
1091 if (IS_ERR(st->per_clk))
1092 return PTR_ERR(st->per_clk);
1094 st->reg = devm_regulator_get(&pdev->dev, "vddana");
1095 if (IS_ERR(st->reg))
1096 return PTR_ERR(st->reg);
1098 st->vref = devm_regulator_get(&pdev->dev, "vref");
1099 if (IS_ERR(st->vref))
1100 return PTR_ERR(st->vref);
1102 ret = devm_request_irq(&pdev->dev, st->irq, at91_adc_interrupt, 0,
1103 pdev->dev.driver->name, indio_dev);
1104 if (ret)
1105 return ret;
1107 ret = regulator_enable(st->reg);
1108 if (ret)
1109 return ret;
1111 ret = regulator_enable(st->vref);
1112 if (ret)
1113 goto reg_disable;
1115 st->vref_uv = regulator_get_voltage(st->vref);
1116 if (st->vref_uv <= 0) {
1117 ret = -EINVAL;
1118 goto vref_disable;
1121 at91_adc_hw_init(st);
1123 ret = clk_prepare_enable(st->per_clk);
1124 if (ret)
1125 goto vref_disable;
1127 platform_set_drvdata(pdev, indio_dev);
1129 if (st->selected_trig->hw_trig) {
1130 ret = at91_adc_buffer_init(indio_dev);
1131 if (ret < 0) {
1132 dev_err(&pdev->dev, "couldn't initialize the buffer.\n");
1133 goto per_clk_disable_unprepare;
1136 ret = at91_adc_trigger_init(indio_dev);
1137 if (ret < 0) {
1138 dev_err(&pdev->dev, "couldn't setup the triggers.\n");
1139 goto per_clk_disable_unprepare;
1142 * Initially the iio buffer has a length of 2 and
1143 * a watermark of 1
1145 st->dma_st.watermark = 1;
1147 iio_buffer_set_attrs(indio_dev->buffer,
1148 at91_adc_fifo_attributes);
1151 if (dma_coerce_mask_and_coherent(&indio_dev->dev, DMA_BIT_MASK(32)))
1152 dev_info(&pdev->dev, "cannot set DMA mask to 32-bit\n");
1154 ret = iio_device_register(indio_dev);
1155 if (ret < 0)
1156 goto dma_disable;
1158 if (st->selected_trig->hw_trig)
1159 dev_info(&pdev->dev, "setting up trigger as %s\n",
1160 st->selected_trig->name);
1162 dev_info(&pdev->dev, "version: %x\n",
1163 readl_relaxed(st->base + AT91_SAMA5D2_VERSION));
1165 return 0;
1167 dma_disable:
1168 at91_adc_dma_disable(pdev);
1169 per_clk_disable_unprepare:
1170 clk_disable_unprepare(st->per_clk);
1171 vref_disable:
1172 regulator_disable(st->vref);
1173 reg_disable:
1174 regulator_disable(st->reg);
1175 return ret;
1178 static int at91_adc_remove(struct platform_device *pdev)
1180 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
1181 struct at91_adc_state *st = iio_priv(indio_dev);
1183 iio_device_unregister(indio_dev);
1185 at91_adc_dma_disable(pdev);
1187 clk_disable_unprepare(st->per_clk);
1189 regulator_disable(st->vref);
1190 regulator_disable(st->reg);
1192 return 0;
1195 static __maybe_unused int at91_adc_suspend(struct device *dev)
1197 struct iio_dev *indio_dev =
1198 platform_get_drvdata(to_platform_device(dev));
1199 struct at91_adc_state *st = iio_priv(indio_dev);
1202 * Do a sofware reset of the ADC before we go to suspend.
1203 * this will ensure that all pins are free from being muxed by the ADC
1204 * and can be used by for other devices.
1205 * Otherwise, ADC will hog them and we can't go to suspend mode.
1207 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_SWRST);
1209 clk_disable_unprepare(st->per_clk);
1210 regulator_disable(st->vref);
1211 regulator_disable(st->reg);
1213 return pinctrl_pm_select_sleep_state(dev);
1216 static __maybe_unused int at91_adc_resume(struct device *dev)
1218 struct iio_dev *indio_dev =
1219 platform_get_drvdata(to_platform_device(dev));
1220 struct at91_adc_state *st = iio_priv(indio_dev);
1221 int ret;
1223 ret = pinctrl_pm_select_default_state(dev);
1224 if (ret)
1225 goto resume_failed;
1227 ret = regulator_enable(st->reg);
1228 if (ret)
1229 goto resume_failed;
1231 ret = regulator_enable(st->vref);
1232 if (ret)
1233 goto reg_disable_resume;
1235 ret = clk_prepare_enable(st->per_clk);
1236 if (ret)
1237 goto vref_disable_resume;
1239 at91_adc_hw_init(st);
1241 /* reconfiguring trigger hardware state */
1242 if (iio_buffer_enabled(indio_dev))
1243 at91_adc_configure_trigger(st->trig, true);
1245 return 0;
1247 vref_disable_resume:
1248 regulator_disable(st->vref);
1249 reg_disable_resume:
1250 regulator_disable(st->reg);
1251 resume_failed:
1252 dev_err(&indio_dev->dev, "failed to resume\n");
1253 return ret;
1256 static SIMPLE_DEV_PM_OPS(at91_adc_pm_ops, at91_adc_suspend, at91_adc_resume);
1258 static const struct of_device_id at91_adc_dt_match[] = {
1260 .compatible = "atmel,sama5d2-adc",
1261 }, {
1262 /* sentinel */
1265 MODULE_DEVICE_TABLE(of, at91_adc_dt_match);
1267 static struct platform_driver at91_adc_driver = {
1268 .probe = at91_adc_probe,
1269 .remove = at91_adc_remove,
1270 .driver = {
1271 .name = "at91-sama5d2_adc",
1272 .of_match_table = at91_adc_dt_match,
1273 .pm = &at91_adc_pm_ops,
1276 module_platform_driver(at91_adc_driver)
1278 MODULE_AUTHOR("Ludovic Desroches <ludovic.desroches@atmel.com>");
1279 MODULE_DESCRIPTION("Atmel AT91 SAMA5D2 ADC");
1280 MODULE_LICENSE("GPL v2");