1 // SPDX-License-Identifier: GPL-2.0
3 * Driver for STM32 Digital Camera Memory Interface
5 * Copyright (C) STMicroelectronics SA 2017
6 * Authors: Yannick Fertre <yannick.fertre@st.com>
7 * Hugues Fruchet <hugues.fruchet@st.com>
8 * for STMicroelectronics.
10 * This driver is based on atmel_isi.c
14 #include <linux/clk.h>
15 #include <linux/completion.h>
16 #include <linux/delay.h>
17 #include <linux/dmaengine.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
23 #include <linux/of_device.h>
24 #include <linux/of_graph.h>
25 #include <linux/pinctrl/consumer.h>
26 #include <linux/platform_device.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/reset.h>
29 #include <linux/videodev2.h>
31 #include <media/v4l2-ctrls.h>
32 #include <media/v4l2-dev.h>
33 #include <media/v4l2-device.h>
34 #include <media/v4l2-event.h>
35 #include <media/v4l2-fwnode.h>
36 #include <media/v4l2-image-sizes.h>
37 #include <media/v4l2-ioctl.h>
38 #include <media/v4l2-rect.h>
39 #include <media/videobuf2-dma-contig.h>
41 #define DRV_NAME "stm32-dcmi"
43 /* Registers offset for DCMI */
44 #define DCMI_CR 0x00 /* Control Register */
45 #define DCMI_SR 0x04 /* Status Register */
46 #define DCMI_RIS 0x08 /* Raw Interrupt Status register */
47 #define DCMI_IER 0x0C /* Interrupt Enable Register */
48 #define DCMI_MIS 0x10 /* Masked Interrupt Status register */
49 #define DCMI_ICR 0x14 /* Interrupt Clear Register */
50 #define DCMI_ESCR 0x18 /* Embedded Synchronization Code Register */
51 #define DCMI_ESUR 0x1C /* Embedded Synchronization Unmask Register */
52 #define DCMI_CWSTRT 0x20 /* Crop Window STaRT */
53 #define DCMI_CWSIZE 0x24 /* Crop Window SIZE */
54 #define DCMI_DR 0x28 /* Data Register */
55 #define DCMI_IDR 0x2C /* IDentifier Register */
57 /* Bits definition for control register (DCMI_CR) */
58 #define CR_CAPTURE BIT(0)
60 #define CR_CROP BIT(2)
61 #define CR_JPEG BIT(3)
63 #define CR_PCKPOL BIT(5)
64 #define CR_HSPOL BIT(6)
65 #define CR_VSPOL BIT(7)
66 #define CR_FCRC_0 BIT(8)
67 #define CR_FCRC_1 BIT(9)
68 #define CR_EDM_0 BIT(10)
69 #define CR_EDM_1 BIT(11)
70 #define CR_ENABLE BIT(14)
72 /* Bits definition for status register (DCMI_SR) */
73 #define SR_HSYNC BIT(0)
74 #define SR_VSYNC BIT(1)
78 * Bits definition for interrupt registers
79 * (DCMI_RIS, DCMI_IER, DCMI_MIS, DCMI_ICR)
81 #define IT_FRAME BIT(0)
84 #define IT_VSYNC BIT(3)
85 #define IT_LINE BIT(4)
94 #define MAX_WIDTH 2592U
95 #define MIN_HEIGHT 16U
96 #define MAX_HEIGHT 2592U
98 #define TIMEOUT_MS 1000
100 struct dcmi_graph_entity
{
101 struct device_node
*node
;
103 struct v4l2_async_subdev asd
;
104 struct v4l2_subdev
*subdev
;
113 struct dcmi_framesize
{
119 struct vb2_v4l2_buffer vb
;
123 struct list_head list
;
127 /* Protects the access of variables shared within the interrupt */
131 struct resource
*res
;
132 struct reset_control
*rstc
;
134 struct list_head buffers
;
135 struct dcmi_buf
*active
;
137 struct v4l2_device v4l2_dev
;
138 struct video_device
*vdev
;
139 struct v4l2_async_notifier notifier
;
140 struct dcmi_graph_entity entity
;
141 struct v4l2_format fmt
;
142 struct v4l2_rect crop
;
145 const struct dcmi_format
**sd_formats
;
146 unsigned int num_of_sd_formats
;
147 const struct dcmi_format
*sd_format
;
148 struct dcmi_framesize
*sd_framesizes
;
149 unsigned int num_of_sd_framesizes
;
150 struct dcmi_framesize sd_framesize
;
151 struct v4l2_rect sd_bounds
;
153 /* Protect this data structure */
155 struct vb2_queue queue
;
157 struct v4l2_fwnode_bus_parallel bus
;
158 struct completion complete
;
161 struct dma_chan
*dma_chan
;
162 dma_cookie_t dma_cookie
;
168 /* Ensure DMA operations atomicity */
169 struct mutex dma_lock
;
172 static inline struct stm32_dcmi
*notifier_to_dcmi(struct v4l2_async_notifier
*n
)
174 return container_of(n
, struct stm32_dcmi
, notifier
);
177 static inline u32
reg_read(void __iomem
*base
, u32 reg
)
179 return readl_relaxed(base
+ reg
);
182 static inline void reg_write(void __iomem
*base
, u32 reg
, u32 val
)
184 writel_relaxed(val
, base
+ reg
);
187 static inline void reg_set(void __iomem
*base
, u32 reg
, u32 mask
)
189 reg_write(base
, reg
, reg_read(base
, reg
) | mask
);
192 static inline void reg_clear(void __iomem
*base
, u32 reg
, u32 mask
)
194 reg_write(base
, reg
, reg_read(base
, reg
) & ~mask
);
197 static int dcmi_start_capture(struct stm32_dcmi
*dcmi
, struct dcmi_buf
*buf
);
199 static void dcmi_buffer_done(struct stm32_dcmi
*dcmi
,
200 struct dcmi_buf
*buf
,
204 struct vb2_v4l2_buffer
*vbuf
;
209 list_del_init(&buf
->list
);
213 vbuf
->sequence
= dcmi
->sequence
++;
214 vbuf
->field
= V4L2_FIELD_NONE
;
215 vbuf
->vb2_buf
.timestamp
= ktime_get_ns();
216 vb2_set_plane_payload(&vbuf
->vb2_buf
, 0, bytesused
);
217 vb2_buffer_done(&vbuf
->vb2_buf
,
218 err
? VB2_BUF_STATE_ERROR
: VB2_BUF_STATE_DONE
);
219 dev_dbg(dcmi
->dev
, "buffer[%d] done seq=%d, bytesused=%zu\n",
220 vbuf
->vb2_buf
.index
, vbuf
->sequence
, bytesused
);
222 dcmi
->buffers_count
++;
226 static int dcmi_restart_capture(struct stm32_dcmi
*dcmi
)
228 struct dcmi_buf
*buf
;
230 spin_lock_irq(&dcmi
->irqlock
);
232 if (dcmi
->state
!= RUNNING
) {
233 spin_unlock_irq(&dcmi
->irqlock
);
237 /* Restart a new DMA transfer with next buffer */
238 if (list_empty(&dcmi
->buffers
)) {
239 dev_dbg(dcmi
->dev
, "Capture restart is deferred to next buffer queueing\n");
240 dcmi
->state
= WAIT_FOR_BUFFER
;
241 spin_unlock_irq(&dcmi
->irqlock
);
244 buf
= list_entry(dcmi
->buffers
.next
, struct dcmi_buf
, list
);
247 spin_unlock_irq(&dcmi
->irqlock
);
249 return dcmi_start_capture(dcmi
, buf
);
252 static void dcmi_dma_callback(void *param
)
254 struct stm32_dcmi
*dcmi
= (struct stm32_dcmi
*)param
;
255 struct dma_tx_state state
;
256 enum dma_status status
;
257 struct dcmi_buf
*buf
= dcmi
->active
;
259 spin_lock_irq(&dcmi
->irqlock
);
261 /* Check DMA status */
262 status
= dmaengine_tx_status(dcmi
->dma_chan
, dcmi
->dma_cookie
, &state
);
265 case DMA_IN_PROGRESS
:
266 dev_dbg(dcmi
->dev
, "%s: Received DMA_IN_PROGRESS\n", __func__
);
269 dev_err(dcmi
->dev
, "%s: Received DMA_PAUSED\n", __func__
);
272 dev_err(dcmi
->dev
, "%s: Received DMA_ERROR\n", __func__
);
274 /* Return buffer to V4L2 in error state */
275 dcmi_buffer_done(dcmi
, buf
, 0, -EIO
);
278 dev_dbg(dcmi
->dev
, "%s: Received DMA_COMPLETE\n", __func__
);
280 /* Return buffer to V4L2 */
281 dcmi_buffer_done(dcmi
, buf
, buf
->size
, 0);
283 spin_unlock_irq(&dcmi
->irqlock
);
285 /* Restart capture */
286 if (dcmi_restart_capture(dcmi
))
287 dev_err(dcmi
->dev
, "%s: Cannot restart capture on DMA complete\n",
291 dev_err(dcmi
->dev
, "%s: Received unknown status\n", __func__
);
295 spin_unlock_irq(&dcmi
->irqlock
);
298 static int dcmi_start_dma(struct stm32_dcmi
*dcmi
,
299 struct dcmi_buf
*buf
)
301 struct dma_async_tx_descriptor
*desc
= NULL
;
302 struct dma_slave_config config
;
305 memset(&config
, 0, sizeof(config
));
307 config
.src_addr
= (dma_addr_t
)dcmi
->res
->start
+ DCMI_DR
;
308 config
.src_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
309 config
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
310 config
.dst_maxburst
= 4;
312 /* Configure DMA channel */
313 ret
= dmaengine_slave_config(dcmi
->dma_chan
, &config
);
315 dev_err(dcmi
->dev
, "%s: DMA channel config failed (%d)\n",
321 * Avoid call of dmaengine_terminate_all() between
322 * dmaengine_prep_slave_single() and dmaengine_submit()
323 * by locking the whole DMA submission sequence
325 mutex_lock(&dcmi
->dma_lock
);
327 /* Prepare a DMA transaction */
328 desc
= dmaengine_prep_slave_single(dcmi
->dma_chan
, buf
->paddr
,
333 dev_err(dcmi
->dev
, "%s: DMA dmaengine_prep_slave_single failed for buffer phy=%pad size=%zu\n",
334 __func__
, &buf
->paddr
, buf
->size
);
335 mutex_unlock(&dcmi
->dma_lock
);
339 /* Set completion callback routine for notification */
340 desc
->callback
= dcmi_dma_callback
;
341 desc
->callback_param
= dcmi
;
343 /* Push current DMA transaction in the pending queue */
344 dcmi
->dma_cookie
= dmaengine_submit(desc
);
345 if (dma_submit_error(dcmi
->dma_cookie
)) {
346 dev_err(dcmi
->dev
, "%s: DMA submission failed\n", __func__
);
347 mutex_unlock(&dcmi
->dma_lock
);
351 mutex_unlock(&dcmi
->dma_lock
);
353 dma_async_issue_pending(dcmi
->dma_chan
);
358 static int dcmi_start_capture(struct stm32_dcmi
*dcmi
, struct dcmi_buf
*buf
)
365 ret
= dcmi_start_dma(dcmi
, buf
);
367 dcmi
->errors_count
++;
372 reg_set(dcmi
->regs
, DCMI_CR
, CR_CAPTURE
);
377 static void dcmi_set_crop(struct stm32_dcmi
*dcmi
)
381 /* Crop resolution */
382 size
= ((dcmi
->crop
.height
- 1) << 16) |
383 ((dcmi
->crop
.width
<< 1) - 1);
384 reg_write(dcmi
->regs
, DCMI_CWSIZE
, size
);
386 /* Crop start point */
387 start
= ((dcmi
->crop
.top
) << 16) |
388 ((dcmi
->crop
.left
<< 1));
389 reg_write(dcmi
->regs
, DCMI_CWSTRT
, start
);
391 dev_dbg(dcmi
->dev
, "Cropping to %ux%u@%u:%u\n",
392 dcmi
->crop
.width
, dcmi
->crop
.height
,
393 dcmi
->crop
.left
, dcmi
->crop
.top
);
396 reg_set(dcmi
->regs
, DCMI_CR
, CR_CROP
);
399 static void dcmi_process_jpeg(struct stm32_dcmi
*dcmi
)
401 struct dma_tx_state state
;
402 enum dma_status status
;
403 struct dcmi_buf
*buf
= dcmi
->active
;
409 * Because of variable JPEG buffer size sent by sensor,
410 * DMA transfer never completes due to transfer size never reached.
411 * In order to ensure that all the JPEG data are transferred
412 * in active buffer memory, DMA is drained.
413 * Then DMA tx status gives the amount of data transferred
414 * to memory, which is then returned to V4L2 through the active
419 dmaengine_synchronize(dcmi
->dma_chan
);
421 /* Get DMA residue to get JPEG size */
422 status
= dmaengine_tx_status(dcmi
->dma_chan
, dcmi
->dma_cookie
, &state
);
423 if (status
!= DMA_ERROR
&& state
.residue
< buf
->size
) {
424 /* Return JPEG buffer to V4L2 with received JPEG buffer size */
425 dcmi_buffer_done(dcmi
, buf
, buf
->size
- state
.residue
, 0);
427 dcmi
->errors_count
++;
428 dev_err(dcmi
->dev
, "%s: Cannot get JPEG size from DMA\n",
430 /* Return JPEG buffer to V4L2 in ERROR state */
431 dcmi_buffer_done(dcmi
, buf
, 0, -EIO
);
434 /* Abort DMA operation */
435 dmaengine_terminate_all(dcmi
->dma_chan
);
437 /* Restart capture */
438 if (dcmi_restart_capture(dcmi
))
439 dev_err(dcmi
->dev
, "%s: Cannot restart capture on JPEG received\n",
443 static irqreturn_t
dcmi_irq_thread(int irq
, void *arg
)
445 struct stm32_dcmi
*dcmi
= arg
;
447 spin_lock_irq(&dcmi
->irqlock
);
449 if ((dcmi
->misr
& IT_OVR
) || (dcmi
->misr
& IT_ERR
)) {
450 dcmi
->errors_count
++;
451 if (dcmi
->misr
& IT_OVR
)
452 dcmi
->overrun_count
++;
455 if (dcmi
->sd_format
->fourcc
== V4L2_PIX_FMT_JPEG
&&
456 dcmi
->misr
& IT_FRAME
) {
458 spin_unlock_irq(&dcmi
->irqlock
);
459 dcmi_process_jpeg(dcmi
);
463 spin_unlock_irq(&dcmi
->irqlock
);
467 static irqreturn_t
dcmi_irq_callback(int irq
, void *arg
)
469 struct stm32_dcmi
*dcmi
= arg
;
472 spin_lock_irqsave(&dcmi
->irqlock
, flags
);
474 dcmi
->misr
= reg_read(dcmi
->regs
, DCMI_MIS
);
476 /* Clear interrupt */
477 reg_set(dcmi
->regs
, DCMI_ICR
, IT_FRAME
| IT_OVR
| IT_ERR
);
479 spin_unlock_irqrestore(&dcmi
->irqlock
, flags
);
481 return IRQ_WAKE_THREAD
;
484 static int dcmi_queue_setup(struct vb2_queue
*vq
,
485 unsigned int *nbuffers
,
486 unsigned int *nplanes
,
487 unsigned int sizes
[],
488 struct device
*alloc_devs
[])
490 struct stm32_dcmi
*dcmi
= vb2_get_drv_priv(vq
);
493 size
= dcmi
->fmt
.fmt
.pix
.sizeimage
;
495 /* Make sure the image size is large enough */
497 return sizes
[0] < size
? -EINVAL
: 0;
502 dev_dbg(dcmi
->dev
, "Setup queue, count=%d, size=%d\n",
508 static int dcmi_buf_init(struct vb2_buffer
*vb
)
510 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
511 struct dcmi_buf
*buf
= container_of(vbuf
, struct dcmi_buf
, vb
);
513 INIT_LIST_HEAD(&buf
->list
);
518 static int dcmi_buf_prepare(struct vb2_buffer
*vb
)
520 struct stm32_dcmi
*dcmi
= vb2_get_drv_priv(vb
->vb2_queue
);
521 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
522 struct dcmi_buf
*buf
= container_of(vbuf
, struct dcmi_buf
, vb
);
525 size
= dcmi
->fmt
.fmt
.pix
.sizeimage
;
527 if (vb2_plane_size(vb
, 0) < size
) {
528 dev_err(dcmi
->dev
, "%s data will not fit into plane (%lu < %lu)\n",
529 __func__
, vb2_plane_size(vb
, 0), size
);
533 vb2_set_plane_payload(vb
, 0, size
);
535 if (!buf
->prepared
) {
536 /* Get memory addresses */
538 vb2_dma_contig_plane_dma_addr(&buf
->vb
.vb2_buf
, 0);
539 buf
->size
= vb2_plane_size(&buf
->vb
.vb2_buf
, 0);
540 buf
->prepared
= true;
542 vb2_set_plane_payload(&buf
->vb
.vb2_buf
, 0, buf
->size
);
544 dev_dbg(dcmi
->dev
, "buffer[%d] phy=%pad size=%zu\n",
545 vb
->index
, &buf
->paddr
, buf
->size
);
551 static void dcmi_buf_queue(struct vb2_buffer
*vb
)
553 struct stm32_dcmi
*dcmi
= vb2_get_drv_priv(vb
->vb2_queue
);
554 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
555 struct dcmi_buf
*buf
= container_of(vbuf
, struct dcmi_buf
, vb
);
557 spin_lock_irq(&dcmi
->irqlock
);
559 /* Enqueue to video buffers list */
560 list_add_tail(&buf
->list
, &dcmi
->buffers
);
562 if (dcmi
->state
== WAIT_FOR_BUFFER
) {
563 dcmi
->state
= RUNNING
;
566 dev_dbg(dcmi
->dev
, "Starting capture on buffer[%d] queued\n",
567 buf
->vb
.vb2_buf
.index
);
569 spin_unlock_irq(&dcmi
->irqlock
);
570 if (dcmi_start_capture(dcmi
, buf
))
571 dev_err(dcmi
->dev
, "%s: Cannot restart capture on overflow or error\n",
576 spin_unlock_irq(&dcmi
->irqlock
);
579 static int dcmi_start_streaming(struct vb2_queue
*vq
, unsigned int count
)
581 struct stm32_dcmi
*dcmi
= vb2_get_drv_priv(vq
);
582 struct dcmi_buf
*buf
, *node
;
586 ret
= pm_runtime_get_sync(dcmi
->dev
);
588 dev_err(dcmi
->dev
, "%s: Failed to start streaming, cannot get sync (%d)\n",
590 goto err_release_buffers
;
593 /* Enable stream on the sub device */
594 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, video
, s_stream
, 1);
595 if (ret
&& ret
!= -ENOIOCTLCMD
) {
596 dev_err(dcmi
->dev
, "%s: Failed to start streaming, subdev streamon error",
601 spin_lock_irq(&dcmi
->irqlock
);
604 switch (dcmi
->bus
.bus_width
) {
606 val
|= CR_EDM_0
| CR_EDM_1
;
615 /* Set bus width to 8 bits by default */
619 /* Set vertical synchronization polarity */
620 if (dcmi
->bus
.flags
& V4L2_MBUS_VSYNC_ACTIVE_HIGH
)
623 /* Set horizontal synchronization polarity */
624 if (dcmi
->bus
.flags
& V4L2_MBUS_HSYNC_ACTIVE_HIGH
)
627 /* Set pixel clock polarity */
628 if (dcmi
->bus
.flags
& V4L2_MBUS_PCLK_SAMPLE_RISING
)
631 reg_write(dcmi
->regs
, DCMI_CR
, val
);
637 /* Enable jpeg capture */
638 if (dcmi
->sd_format
->fourcc
== V4L2_PIX_FMT_JPEG
)
639 reg_set(dcmi
->regs
, DCMI_CR
, CR_CM
);/* Snapshot mode */
642 reg_set(dcmi
->regs
, DCMI_CR
, CR_ENABLE
);
645 dcmi
->errors_count
= 0;
646 dcmi
->overrun_count
= 0;
647 dcmi
->buffers_count
= 0;
650 * Start transfer if at least one buffer has been queued,
651 * otherwise transfer is deferred at buffer queueing
653 if (list_empty(&dcmi
->buffers
)) {
654 dev_dbg(dcmi
->dev
, "Start streaming is deferred to next buffer queueing\n");
655 dcmi
->state
= WAIT_FOR_BUFFER
;
656 spin_unlock_irq(&dcmi
->irqlock
);
660 buf
= list_entry(dcmi
->buffers
.next
, struct dcmi_buf
, list
);
663 dcmi
->state
= RUNNING
;
665 dev_dbg(dcmi
->dev
, "Start streaming, starting capture\n");
667 spin_unlock_irq(&dcmi
->irqlock
);
668 ret
= dcmi_start_capture(dcmi
, buf
);
670 dev_err(dcmi
->dev
, "%s: Start streaming failed, cannot start capture\n",
672 goto err_subdev_streamoff
;
675 /* Enable interruptions */
676 reg_set(dcmi
->regs
, DCMI_IER
, IT_FRAME
| IT_OVR
| IT_ERR
);
680 err_subdev_streamoff
:
681 v4l2_subdev_call(dcmi
->entity
.subdev
, video
, s_stream
, 0);
684 pm_runtime_put(dcmi
->dev
);
687 spin_lock_irq(&dcmi
->irqlock
);
689 * Return all buffers to vb2 in QUEUED state.
690 * This will give ownership back to userspace
692 list_for_each_entry_safe(buf
, node
, &dcmi
->buffers
, list
) {
693 list_del_init(&buf
->list
);
694 vb2_buffer_done(&buf
->vb
.vb2_buf
, VB2_BUF_STATE_QUEUED
);
697 spin_unlock_irq(&dcmi
->irqlock
);
702 static void dcmi_stop_streaming(struct vb2_queue
*vq
)
704 struct stm32_dcmi
*dcmi
= vb2_get_drv_priv(vq
);
705 struct dcmi_buf
*buf
, *node
;
708 /* Disable stream on the sub device */
709 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, video
, s_stream
, 0);
710 if (ret
&& ret
!= -ENOIOCTLCMD
)
711 dev_err(dcmi
->dev
, "%s: Failed to stop streaming, subdev streamoff error (%d)\n",
714 spin_lock_irq(&dcmi
->irqlock
);
716 /* Disable interruptions */
717 reg_clear(dcmi
->regs
, DCMI_IER
, IT_FRAME
| IT_OVR
| IT_ERR
);
720 reg_clear(dcmi
->regs
, DCMI_CR
, CR_ENABLE
);
722 /* Return all queued buffers to vb2 in ERROR state */
723 list_for_each_entry_safe(buf
, node
, &dcmi
->buffers
, list
) {
724 list_del_init(&buf
->list
);
725 vb2_buffer_done(&buf
->vb
.vb2_buf
, VB2_BUF_STATE_ERROR
);
729 dcmi
->state
= STOPPED
;
731 spin_unlock_irq(&dcmi
->irqlock
);
733 /* Stop all pending DMA operations */
734 mutex_lock(&dcmi
->dma_lock
);
735 dmaengine_terminate_all(dcmi
->dma_chan
);
736 mutex_unlock(&dcmi
->dma_lock
);
738 pm_runtime_put(dcmi
->dev
);
740 if (dcmi
->errors_count
)
741 dev_warn(dcmi
->dev
, "Some errors found while streaming: errors=%d (overrun=%d), buffers=%d\n",
742 dcmi
->errors_count
, dcmi
->overrun_count
,
743 dcmi
->buffers_count
);
744 dev_dbg(dcmi
->dev
, "Stop streaming, errors=%d (overrun=%d), buffers=%d\n",
745 dcmi
->errors_count
, dcmi
->overrun_count
,
746 dcmi
->buffers_count
);
749 static const struct vb2_ops dcmi_video_qops
= {
750 .queue_setup
= dcmi_queue_setup
,
751 .buf_init
= dcmi_buf_init
,
752 .buf_prepare
= dcmi_buf_prepare
,
753 .buf_queue
= dcmi_buf_queue
,
754 .start_streaming
= dcmi_start_streaming
,
755 .stop_streaming
= dcmi_stop_streaming
,
756 .wait_prepare
= vb2_ops_wait_prepare
,
757 .wait_finish
= vb2_ops_wait_finish
,
760 static int dcmi_g_fmt_vid_cap(struct file
*file
, void *priv
,
761 struct v4l2_format
*fmt
)
763 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
770 static const struct dcmi_format
*find_format_by_fourcc(struct stm32_dcmi
*dcmi
,
773 unsigned int num_formats
= dcmi
->num_of_sd_formats
;
774 const struct dcmi_format
*fmt
;
777 for (i
= 0; i
< num_formats
; i
++) {
778 fmt
= dcmi
->sd_formats
[i
];
779 if (fmt
->fourcc
== fourcc
)
786 static void __find_outer_frame_size(struct stm32_dcmi
*dcmi
,
787 struct v4l2_pix_format
*pix
,
788 struct dcmi_framesize
*framesize
)
790 struct dcmi_framesize
*match
= NULL
;
792 unsigned int min_err
= UINT_MAX
;
794 for (i
= 0; i
< dcmi
->num_of_sd_framesizes
; i
++) {
795 struct dcmi_framesize
*fsize
= &dcmi
->sd_framesizes
[i
];
796 int w_err
= (fsize
->width
- pix
->width
);
797 int h_err
= (fsize
->height
- pix
->height
);
798 int err
= w_err
+ h_err
;
800 if (w_err
>= 0 && h_err
>= 0 && err
< min_err
) {
806 match
= &dcmi
->sd_framesizes
[0];
811 static int dcmi_try_fmt(struct stm32_dcmi
*dcmi
, struct v4l2_format
*f
,
812 const struct dcmi_format
**sd_format
,
813 struct dcmi_framesize
*sd_framesize
)
815 const struct dcmi_format
*sd_fmt
;
816 struct dcmi_framesize sd_fsize
;
817 struct v4l2_pix_format
*pix
= &f
->fmt
.pix
;
818 struct v4l2_subdev_pad_config pad_cfg
;
819 struct v4l2_subdev_format format
= {
820 .which
= V4L2_SUBDEV_FORMAT_TRY
,
825 sd_fmt
= find_format_by_fourcc(dcmi
, pix
->pixelformat
);
827 if (!dcmi
->num_of_sd_formats
)
830 sd_fmt
= dcmi
->sd_formats
[dcmi
->num_of_sd_formats
- 1];
831 pix
->pixelformat
= sd_fmt
->fourcc
;
834 /* Limit to hardware capabilities */
835 pix
->width
= clamp(pix
->width
, MIN_WIDTH
, MAX_WIDTH
);
836 pix
->height
= clamp(pix
->height
, MIN_HEIGHT
, MAX_HEIGHT
);
838 /* No crop if JPEG is requested */
839 do_crop
= dcmi
->do_crop
&& (pix
->pixelformat
!= V4L2_PIX_FMT_JPEG
);
841 if (do_crop
&& dcmi
->num_of_sd_framesizes
) {
842 struct dcmi_framesize outer_sd_fsize
;
844 * If crop is requested and sensor have discrete frame sizes,
845 * select the frame size that is just larger than request
847 __find_outer_frame_size(dcmi
, pix
, &outer_sd_fsize
);
848 pix
->width
= outer_sd_fsize
.width
;
849 pix
->height
= outer_sd_fsize
.height
;
852 v4l2_fill_mbus_format(&format
.format
, pix
, sd_fmt
->mbus_code
);
853 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
, set_fmt
,
858 /* Update pix regarding to what sensor can do */
859 v4l2_fill_pix_format(pix
, &format
.format
);
861 /* Save resolution that sensor can actually do */
862 sd_fsize
.width
= pix
->width
;
863 sd_fsize
.height
= pix
->height
;
866 struct v4l2_rect c
= dcmi
->crop
;
867 struct v4l2_rect max_rect
;
870 * Adjust crop by making the intersection between
871 * format resolution request and crop request
875 max_rect
.width
= pix
->width
;
876 max_rect
.height
= pix
->height
;
877 v4l2_rect_map_inside(&c
, &max_rect
);
878 c
.top
= clamp_t(s32
, c
.top
, 0, pix
->height
- c
.height
);
879 c
.left
= clamp_t(s32
, c
.left
, 0, pix
->width
- c
.width
);
882 /* Adjust format resolution request to crop */
883 pix
->width
= dcmi
->crop
.width
;
884 pix
->height
= dcmi
->crop
.height
;
887 pix
->field
= V4L2_FIELD_NONE
;
888 pix
->bytesperline
= pix
->width
* sd_fmt
->bpp
;
889 pix
->sizeimage
= pix
->bytesperline
* pix
->height
;
894 *sd_framesize
= sd_fsize
;
899 static int dcmi_set_fmt(struct stm32_dcmi
*dcmi
, struct v4l2_format
*f
)
901 struct v4l2_subdev_format format
= {
902 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
904 const struct dcmi_format
*sd_format
;
905 struct dcmi_framesize sd_framesize
;
906 struct v4l2_mbus_framefmt
*mf
= &format
.format
;
907 struct v4l2_pix_format
*pix
= &f
->fmt
.pix
;
911 * Try format, fmt.width/height could have been changed
912 * to match sensor capability or crop request
913 * sd_format & sd_framesize will contain what subdev
914 * can do for this request.
916 ret
= dcmi_try_fmt(dcmi
, f
, &sd_format
, &sd_framesize
);
920 /* Disable crop if JPEG is requested */
921 if (pix
->pixelformat
== V4L2_PIX_FMT_JPEG
)
922 dcmi
->do_crop
= false;
924 /* pix to mbus format */
925 v4l2_fill_mbus_format(mf
, pix
,
926 sd_format
->mbus_code
);
927 mf
->width
= sd_framesize
.width
;
928 mf
->height
= sd_framesize
.height
;
930 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
,
931 set_fmt
, NULL
, &format
);
935 dev_dbg(dcmi
->dev
, "Sensor format set to 0x%x %ux%u\n",
936 mf
->code
, mf
->width
, mf
->height
);
937 dev_dbg(dcmi
->dev
, "Buffer format set to %4.4s %ux%u\n",
938 (char *)&pix
->pixelformat
,
939 pix
->width
, pix
->height
);
942 dcmi
->sd_format
= sd_format
;
943 dcmi
->sd_framesize
= sd_framesize
;
948 static int dcmi_s_fmt_vid_cap(struct file
*file
, void *priv
,
949 struct v4l2_format
*f
)
951 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
953 if (vb2_is_streaming(&dcmi
->queue
))
956 return dcmi_set_fmt(dcmi
, f
);
959 static int dcmi_try_fmt_vid_cap(struct file
*file
, void *priv
,
960 struct v4l2_format
*f
)
962 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
964 return dcmi_try_fmt(dcmi
, f
, NULL
, NULL
);
967 static int dcmi_enum_fmt_vid_cap(struct file
*file
, void *priv
,
968 struct v4l2_fmtdesc
*f
)
970 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
972 if (f
->index
>= dcmi
->num_of_sd_formats
)
975 f
->pixelformat
= dcmi
->sd_formats
[f
->index
]->fourcc
;
979 static int dcmi_get_sensor_format(struct stm32_dcmi
*dcmi
,
980 struct v4l2_pix_format
*pix
)
982 struct v4l2_subdev_format fmt
= {
983 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
987 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
, get_fmt
, NULL
, &fmt
);
991 v4l2_fill_pix_format(pix
, &fmt
.format
);
996 static int dcmi_set_sensor_format(struct stm32_dcmi
*dcmi
,
997 struct v4l2_pix_format
*pix
)
999 const struct dcmi_format
*sd_fmt
;
1000 struct v4l2_subdev_format format
= {
1001 .which
= V4L2_SUBDEV_FORMAT_TRY
,
1003 struct v4l2_subdev_pad_config pad_cfg
;
1006 sd_fmt
= find_format_by_fourcc(dcmi
, pix
->pixelformat
);
1008 if (!dcmi
->num_of_sd_formats
)
1011 sd_fmt
= dcmi
->sd_formats
[dcmi
->num_of_sd_formats
- 1];
1012 pix
->pixelformat
= sd_fmt
->fourcc
;
1015 v4l2_fill_mbus_format(&format
.format
, pix
, sd_fmt
->mbus_code
);
1016 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
, set_fmt
,
1024 static int dcmi_get_sensor_bounds(struct stm32_dcmi
*dcmi
,
1025 struct v4l2_rect
*r
)
1027 struct v4l2_subdev_selection bounds
= {
1028 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1029 .target
= V4L2_SEL_TGT_CROP_BOUNDS
,
1031 unsigned int max_width
, max_height
, max_pixsize
;
1032 struct v4l2_pix_format pix
;
1037 * Get sensor bounds first
1039 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
, get_selection
,
1043 if (ret
!= -ENOIOCTLCMD
)
1047 * If selection is not implemented,
1048 * fallback by enumerating sensor frame sizes
1049 * and take the largest one
1054 for (i
= 0; i
< dcmi
->num_of_sd_framesizes
; i
++) {
1055 struct dcmi_framesize
*fsize
= &dcmi
->sd_framesizes
[i
];
1056 unsigned int pixsize
= fsize
->width
* fsize
->height
;
1058 if (pixsize
> max_pixsize
) {
1059 max_pixsize
= pixsize
;
1060 max_width
= fsize
->width
;
1061 max_height
= fsize
->height
;
1064 if (max_pixsize
> 0) {
1067 r
->width
= max_width
;
1068 r
->height
= max_height
;
1073 * If frame sizes enumeration is not implemented,
1074 * fallback by getting current sensor frame size
1076 ret
= dcmi_get_sensor_format(dcmi
, &pix
);
1082 r
->width
= pix
.width
;
1083 r
->height
= pix
.height
;
1088 static int dcmi_g_selection(struct file
*file
, void *fh
,
1089 struct v4l2_selection
*s
)
1091 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1093 if (s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
)
1096 switch (s
->target
) {
1097 case V4L2_SEL_TGT_CROP_DEFAULT
:
1098 case V4L2_SEL_TGT_CROP_BOUNDS
:
1099 s
->r
= dcmi
->sd_bounds
;
1101 case V4L2_SEL_TGT_CROP
:
1102 if (dcmi
->do_crop
) {
1107 s
->r
.width
= dcmi
->fmt
.fmt
.pix
.width
;
1108 s
->r
.height
= dcmi
->fmt
.fmt
.pix
.height
;
1118 static int dcmi_s_selection(struct file
*file
, void *priv
,
1119 struct v4l2_selection
*s
)
1121 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1122 struct v4l2_rect r
= s
->r
;
1123 struct v4l2_rect max_rect
;
1124 struct v4l2_pix_format pix
;
1126 if (s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
||
1127 s
->target
!= V4L2_SEL_TGT_CROP
)
1130 /* Reset sensor resolution to max resolution */
1131 pix
.pixelformat
= dcmi
->fmt
.fmt
.pix
.pixelformat
;
1132 pix
.width
= dcmi
->sd_bounds
.width
;
1133 pix
.height
= dcmi
->sd_bounds
.height
;
1134 dcmi_set_sensor_format(dcmi
, &pix
);
1137 * Make the intersection between
1143 max_rect
.width
= pix
.width
;
1144 max_rect
.height
= pix
.height
;
1145 v4l2_rect_map_inside(&r
, &max_rect
);
1146 r
.top
= clamp_t(s32
, r
.top
, 0, pix
.height
- r
.height
);
1147 r
.left
= clamp_t(s32
, r
.left
, 0, pix
.width
- r
.width
);
1149 if (!(r
.top
== dcmi
->sd_bounds
.top
&&
1150 r
.left
== dcmi
->sd_bounds
.left
&&
1151 r
.width
== dcmi
->sd_bounds
.width
&&
1152 r
.height
== dcmi
->sd_bounds
.height
)) {
1153 /* Crop if request is different than sensor resolution */
1154 dcmi
->do_crop
= true;
1156 dev_dbg(dcmi
->dev
, "s_selection: crop %ux%u@(%u,%u) from %ux%u\n",
1157 r
.width
, r
.height
, r
.left
, r
.top
,
1158 pix
.width
, pix
.height
);
1161 dcmi
->do_crop
= false;
1162 dev_dbg(dcmi
->dev
, "s_selection: crop is disabled\n");
1169 static int dcmi_querycap(struct file
*file
, void *priv
,
1170 struct v4l2_capability
*cap
)
1172 strlcpy(cap
->driver
, DRV_NAME
, sizeof(cap
->driver
));
1173 strlcpy(cap
->card
, "STM32 Camera Memory Interface",
1175 strlcpy(cap
->bus_info
, "platform:dcmi", sizeof(cap
->bus_info
));
1179 static int dcmi_enum_input(struct file
*file
, void *priv
,
1180 struct v4l2_input
*i
)
1185 i
->type
= V4L2_INPUT_TYPE_CAMERA
;
1186 strlcpy(i
->name
, "Camera", sizeof(i
->name
));
1190 static int dcmi_g_input(struct file
*file
, void *priv
, unsigned int *i
)
1196 static int dcmi_s_input(struct file
*file
, void *priv
, unsigned int i
)
1203 static int dcmi_enum_framesizes(struct file
*file
, void *fh
,
1204 struct v4l2_frmsizeenum
*fsize
)
1206 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1207 const struct dcmi_format
*sd_fmt
;
1208 struct v4l2_subdev_frame_size_enum fse
= {
1209 .index
= fsize
->index
,
1210 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1214 sd_fmt
= find_format_by_fourcc(dcmi
, fsize
->pixel_format
);
1218 fse
.code
= sd_fmt
->mbus_code
;
1220 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
, enum_frame_size
,
1225 fsize
->type
= V4L2_FRMSIZE_TYPE_DISCRETE
;
1226 fsize
->discrete
.width
= fse
.max_width
;
1227 fsize
->discrete
.height
= fse
.max_height
;
1232 static int dcmi_g_parm(struct file
*file
, void *priv
,
1233 struct v4l2_streamparm
*p
)
1235 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1237 return v4l2_g_parm_cap(video_devdata(file
), dcmi
->entity
.subdev
, p
);
1240 static int dcmi_s_parm(struct file
*file
, void *priv
,
1241 struct v4l2_streamparm
*p
)
1243 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1245 return v4l2_s_parm_cap(video_devdata(file
), dcmi
->entity
.subdev
, p
);
1248 static int dcmi_enum_frameintervals(struct file
*file
, void *fh
,
1249 struct v4l2_frmivalenum
*fival
)
1251 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1252 const struct dcmi_format
*sd_fmt
;
1253 struct v4l2_subdev_frame_interval_enum fie
= {
1254 .index
= fival
->index
,
1255 .width
= fival
->width
,
1256 .height
= fival
->height
,
1257 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1261 sd_fmt
= find_format_by_fourcc(dcmi
, fival
->pixel_format
);
1265 fie
.code
= sd_fmt
->mbus_code
;
1267 ret
= v4l2_subdev_call(dcmi
->entity
.subdev
, pad
,
1268 enum_frame_interval
, NULL
, &fie
);
1272 fival
->type
= V4L2_FRMIVAL_TYPE_DISCRETE
;
1273 fival
->discrete
= fie
.interval
;
1278 static const struct of_device_id stm32_dcmi_of_match
[] = {
1279 { .compatible
= "st,stm32-dcmi"},
1282 MODULE_DEVICE_TABLE(of
, stm32_dcmi_of_match
);
1284 static int dcmi_open(struct file
*file
)
1286 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1287 struct v4l2_subdev
*sd
= dcmi
->entity
.subdev
;
1290 if (mutex_lock_interruptible(&dcmi
->lock
))
1291 return -ERESTARTSYS
;
1293 ret
= v4l2_fh_open(file
);
1297 if (!v4l2_fh_is_singular_file(file
))
1300 ret
= v4l2_subdev_call(sd
, core
, s_power
, 1);
1301 if (ret
< 0 && ret
!= -ENOIOCTLCMD
)
1304 ret
= dcmi_set_fmt(dcmi
, &dcmi
->fmt
);
1306 v4l2_subdev_call(sd
, core
, s_power
, 0);
1309 v4l2_fh_release(file
);
1311 mutex_unlock(&dcmi
->lock
);
1315 static int dcmi_release(struct file
*file
)
1317 struct stm32_dcmi
*dcmi
= video_drvdata(file
);
1318 struct v4l2_subdev
*sd
= dcmi
->entity
.subdev
;
1322 mutex_lock(&dcmi
->lock
);
1324 fh_singular
= v4l2_fh_is_singular_file(file
);
1326 ret
= _vb2_fop_release(file
, NULL
);
1329 v4l2_subdev_call(sd
, core
, s_power
, 0);
1331 mutex_unlock(&dcmi
->lock
);
1336 static const struct v4l2_ioctl_ops dcmi_ioctl_ops
= {
1337 .vidioc_querycap
= dcmi_querycap
,
1339 .vidioc_try_fmt_vid_cap
= dcmi_try_fmt_vid_cap
,
1340 .vidioc_g_fmt_vid_cap
= dcmi_g_fmt_vid_cap
,
1341 .vidioc_s_fmt_vid_cap
= dcmi_s_fmt_vid_cap
,
1342 .vidioc_enum_fmt_vid_cap
= dcmi_enum_fmt_vid_cap
,
1343 .vidioc_g_selection
= dcmi_g_selection
,
1344 .vidioc_s_selection
= dcmi_s_selection
,
1346 .vidioc_enum_input
= dcmi_enum_input
,
1347 .vidioc_g_input
= dcmi_g_input
,
1348 .vidioc_s_input
= dcmi_s_input
,
1350 .vidioc_g_parm
= dcmi_g_parm
,
1351 .vidioc_s_parm
= dcmi_s_parm
,
1353 .vidioc_enum_framesizes
= dcmi_enum_framesizes
,
1354 .vidioc_enum_frameintervals
= dcmi_enum_frameintervals
,
1356 .vidioc_reqbufs
= vb2_ioctl_reqbufs
,
1357 .vidioc_create_bufs
= vb2_ioctl_create_bufs
,
1358 .vidioc_querybuf
= vb2_ioctl_querybuf
,
1359 .vidioc_qbuf
= vb2_ioctl_qbuf
,
1360 .vidioc_dqbuf
= vb2_ioctl_dqbuf
,
1361 .vidioc_expbuf
= vb2_ioctl_expbuf
,
1362 .vidioc_prepare_buf
= vb2_ioctl_prepare_buf
,
1363 .vidioc_streamon
= vb2_ioctl_streamon
,
1364 .vidioc_streamoff
= vb2_ioctl_streamoff
,
1366 .vidioc_log_status
= v4l2_ctrl_log_status
,
1367 .vidioc_subscribe_event
= v4l2_ctrl_subscribe_event
,
1368 .vidioc_unsubscribe_event
= v4l2_event_unsubscribe
,
1371 static const struct v4l2_file_operations dcmi_fops
= {
1372 .owner
= THIS_MODULE
,
1373 .unlocked_ioctl
= video_ioctl2
,
1375 .release
= dcmi_release
,
1376 .poll
= vb2_fop_poll
,
1377 .mmap
= vb2_fop_mmap
,
1379 .get_unmapped_area
= vb2_fop_get_unmapped_area
,
1381 .read
= vb2_fop_read
,
1384 static int dcmi_set_default_fmt(struct stm32_dcmi
*dcmi
)
1386 struct v4l2_format f
= {
1387 .type
= V4L2_BUF_TYPE_VIDEO_CAPTURE
,
1390 .height
= CIF_HEIGHT
,
1391 .field
= V4L2_FIELD_NONE
,
1392 .pixelformat
= dcmi
->sd_formats
[0]->fourcc
,
1397 ret
= dcmi_try_fmt(dcmi
, &f
, NULL
, NULL
);
1400 dcmi
->sd_format
= dcmi
->sd_formats
[0];
1405 static const struct dcmi_format dcmi_formats
[] = {
1407 .fourcc
= V4L2_PIX_FMT_RGB565
,
1408 .mbus_code
= MEDIA_BUS_FMT_RGB565_2X8_LE
,
1411 .fourcc
= V4L2_PIX_FMT_YUYV
,
1412 .mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
,
1415 .fourcc
= V4L2_PIX_FMT_UYVY
,
1416 .mbus_code
= MEDIA_BUS_FMT_UYVY8_2X8
,
1419 .fourcc
= V4L2_PIX_FMT_JPEG
,
1420 .mbus_code
= MEDIA_BUS_FMT_JPEG_1X8
,
1425 static int dcmi_formats_init(struct stm32_dcmi
*dcmi
)
1427 const struct dcmi_format
*sd_fmts
[ARRAY_SIZE(dcmi_formats
)];
1428 unsigned int num_fmts
= 0, i
, j
;
1429 struct v4l2_subdev
*subdev
= dcmi
->entity
.subdev
;
1430 struct v4l2_subdev_mbus_code_enum mbus_code
= {
1431 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1434 while (!v4l2_subdev_call(subdev
, pad
, enum_mbus_code
,
1435 NULL
, &mbus_code
)) {
1436 for (i
= 0; i
< ARRAY_SIZE(dcmi_formats
); i
++) {
1437 if (dcmi_formats
[i
].mbus_code
!= mbus_code
.code
)
1440 /* Code supported, have we got this fourcc yet? */
1441 for (j
= 0; j
< num_fmts
; j
++)
1442 if (sd_fmts
[j
]->fourcc
==
1443 dcmi_formats
[i
].fourcc
)
1444 /* Already available */
1448 sd_fmts
[num_fmts
++] = dcmi_formats
+ i
;
1456 dcmi
->num_of_sd_formats
= num_fmts
;
1457 dcmi
->sd_formats
= devm_kcalloc(dcmi
->dev
,
1458 num_fmts
, sizeof(struct dcmi_format
*),
1460 if (!dcmi
->sd_formats
) {
1461 dev_err(dcmi
->dev
, "Could not allocate memory\n");
1465 memcpy(dcmi
->sd_formats
, sd_fmts
,
1466 num_fmts
* sizeof(struct dcmi_format
*));
1467 dcmi
->sd_format
= dcmi
->sd_formats
[0];
1472 static int dcmi_framesizes_init(struct stm32_dcmi
*dcmi
)
1474 unsigned int num_fsize
= 0;
1475 struct v4l2_subdev
*subdev
= dcmi
->entity
.subdev
;
1476 struct v4l2_subdev_frame_size_enum fse
= {
1477 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1478 .code
= dcmi
->sd_format
->mbus_code
,
1483 /* Allocate discrete framesizes array */
1484 while (!v4l2_subdev_call(subdev
, pad
, enum_frame_size
,
1488 num_fsize
= fse
.index
;
1492 dcmi
->num_of_sd_framesizes
= num_fsize
;
1493 dcmi
->sd_framesizes
= devm_kcalloc(dcmi
->dev
, num_fsize
,
1494 sizeof(struct dcmi_framesize
),
1496 if (!dcmi
->sd_framesizes
) {
1497 dev_err(dcmi
->dev
, "Could not allocate memory\n");
1501 /* Fill array with sensor supported framesizes */
1502 dev_dbg(dcmi
->dev
, "Sensor supports %u frame sizes:\n", num_fsize
);
1503 for (i
= 0; i
< dcmi
->num_of_sd_framesizes
; i
++) {
1505 ret
= v4l2_subdev_call(subdev
, pad
, enum_frame_size
,
1509 dcmi
->sd_framesizes
[fse
.index
].width
= fse
.max_width
;
1510 dcmi
->sd_framesizes
[fse
.index
].height
= fse
.max_height
;
1511 dev_dbg(dcmi
->dev
, "%ux%u\n", fse
.max_width
, fse
.max_height
);
1517 static int dcmi_graph_notify_complete(struct v4l2_async_notifier
*notifier
)
1519 struct stm32_dcmi
*dcmi
= notifier_to_dcmi(notifier
);
1522 dcmi
->vdev
->ctrl_handler
= dcmi
->entity
.subdev
->ctrl_handler
;
1523 ret
= dcmi_formats_init(dcmi
);
1525 dev_err(dcmi
->dev
, "No supported mediabus format found\n");
1529 ret
= dcmi_framesizes_init(dcmi
);
1531 dev_err(dcmi
->dev
, "Could not initialize framesizes\n");
1535 ret
= dcmi_get_sensor_bounds(dcmi
, &dcmi
->sd_bounds
);
1537 dev_err(dcmi
->dev
, "Could not get sensor bounds\n");
1541 ret
= dcmi_set_default_fmt(dcmi
);
1543 dev_err(dcmi
->dev
, "Could not set default format\n");
1547 ret
= video_register_device(dcmi
->vdev
, VFL_TYPE_GRABBER
, -1);
1549 dev_err(dcmi
->dev
, "Failed to register video device\n");
1553 dev_dbg(dcmi
->dev
, "Device registered as %s\n",
1554 video_device_node_name(dcmi
->vdev
));
1558 static void dcmi_graph_notify_unbind(struct v4l2_async_notifier
*notifier
,
1559 struct v4l2_subdev
*sd
,
1560 struct v4l2_async_subdev
*asd
)
1562 struct stm32_dcmi
*dcmi
= notifier_to_dcmi(notifier
);
1564 dev_dbg(dcmi
->dev
, "Removing %s\n", video_device_node_name(dcmi
->vdev
));
1566 /* Checks internaly if vdev has been init or not */
1567 video_unregister_device(dcmi
->vdev
);
1570 static int dcmi_graph_notify_bound(struct v4l2_async_notifier
*notifier
,
1571 struct v4l2_subdev
*subdev
,
1572 struct v4l2_async_subdev
*asd
)
1574 struct stm32_dcmi
*dcmi
= notifier_to_dcmi(notifier
);
1576 dev_dbg(dcmi
->dev
, "Subdev %s bound\n", subdev
->name
);
1578 dcmi
->entity
.subdev
= subdev
;
1583 static const struct v4l2_async_notifier_operations dcmi_graph_notify_ops
= {
1584 .bound
= dcmi_graph_notify_bound
,
1585 .unbind
= dcmi_graph_notify_unbind
,
1586 .complete
= dcmi_graph_notify_complete
,
1589 static int dcmi_graph_parse(struct stm32_dcmi
*dcmi
, struct device_node
*node
)
1591 struct device_node
*ep
= NULL
;
1592 struct device_node
*remote
;
1594 ep
= of_graph_get_next_endpoint(node
, ep
);
1598 remote
= of_graph_get_remote_port_parent(ep
);
1603 /* Remote node to connect */
1604 dcmi
->entity
.node
= remote
;
1605 dcmi
->entity
.asd
.match_type
= V4L2_ASYNC_MATCH_FWNODE
;
1606 dcmi
->entity
.asd
.match
.fwnode
= of_fwnode_handle(remote
);
1610 static int dcmi_graph_init(struct stm32_dcmi
*dcmi
)
1612 struct v4l2_async_subdev
**subdevs
= NULL
;
1615 /* Parse the graph to extract a list of subdevice DT nodes. */
1616 ret
= dcmi_graph_parse(dcmi
, dcmi
->dev
->of_node
);
1618 dev_err(dcmi
->dev
, "Graph parsing failed\n");
1622 /* Register the subdevices notifier. */
1623 subdevs
= devm_kzalloc(dcmi
->dev
, sizeof(*subdevs
), GFP_KERNEL
);
1625 of_node_put(dcmi
->entity
.node
);
1629 subdevs
[0] = &dcmi
->entity
.asd
;
1631 dcmi
->notifier
.subdevs
= subdevs
;
1632 dcmi
->notifier
.num_subdevs
= 1;
1633 dcmi
->notifier
.ops
= &dcmi_graph_notify_ops
;
1635 ret
= v4l2_async_notifier_register(&dcmi
->v4l2_dev
, &dcmi
->notifier
);
1637 dev_err(dcmi
->dev
, "Notifier registration failed\n");
1638 of_node_put(dcmi
->entity
.node
);
1645 static int dcmi_probe(struct platform_device
*pdev
)
1647 struct device_node
*np
= pdev
->dev
.of_node
;
1648 const struct of_device_id
*match
= NULL
;
1649 struct v4l2_fwnode_endpoint ep
;
1650 struct stm32_dcmi
*dcmi
;
1651 struct vb2_queue
*q
;
1652 struct dma_chan
*chan
;
1657 match
= of_match_device(of_match_ptr(stm32_dcmi_of_match
), &pdev
->dev
);
1659 dev_err(&pdev
->dev
, "Could not find a match in devicetree\n");
1663 dcmi
= devm_kzalloc(&pdev
->dev
, sizeof(struct stm32_dcmi
), GFP_KERNEL
);
1667 dcmi
->rstc
= devm_reset_control_get_exclusive(&pdev
->dev
, NULL
);
1668 if (IS_ERR(dcmi
->rstc
)) {
1669 dev_err(&pdev
->dev
, "Could not get reset control\n");
1670 return PTR_ERR(dcmi
->rstc
);
1673 /* Get bus characteristics from devicetree */
1674 np
= of_graph_get_next_endpoint(np
, NULL
);
1676 dev_err(&pdev
->dev
, "Could not find the endpoint\n");
1681 ret
= v4l2_fwnode_endpoint_parse(of_fwnode_handle(np
), &ep
);
1684 dev_err(&pdev
->dev
, "Could not parse the endpoint\n");
1688 if (ep
.bus_type
== V4L2_MBUS_CSI2
) {
1689 dev_err(&pdev
->dev
, "CSI bus not supported\n");
1692 dcmi
->bus
.flags
= ep
.bus
.parallel
.flags
;
1693 dcmi
->bus
.bus_width
= ep
.bus
.parallel
.bus_width
;
1694 dcmi
->bus
.data_shift
= ep
.bus
.parallel
.data_shift
;
1696 irq
= platform_get_irq(pdev
, 0);
1698 if (irq
!= -EPROBE_DEFER
)
1699 dev_err(&pdev
->dev
, "Could not get irq\n");
1700 return irq
? irq
: -ENXIO
;
1703 dcmi
->res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1705 dev_err(&pdev
->dev
, "Could not get resource\n");
1709 dcmi
->regs
= devm_ioremap_resource(&pdev
->dev
, dcmi
->res
);
1710 if (IS_ERR(dcmi
->regs
)) {
1711 dev_err(&pdev
->dev
, "Could not map registers\n");
1712 return PTR_ERR(dcmi
->regs
);
1715 ret
= devm_request_threaded_irq(&pdev
->dev
, irq
, dcmi_irq_callback
,
1716 dcmi_irq_thread
, IRQF_ONESHOT
,
1717 dev_name(&pdev
->dev
), dcmi
);
1719 dev_err(&pdev
->dev
, "Unable to request irq %d\n", irq
);
1723 mclk
= devm_clk_get(&pdev
->dev
, "mclk");
1725 if (PTR_ERR(mclk
) != -EPROBE_DEFER
)
1726 dev_err(&pdev
->dev
, "Unable to get mclk\n");
1727 return PTR_ERR(mclk
);
1730 chan
= dma_request_slave_channel(&pdev
->dev
, "tx");
1732 dev_info(&pdev
->dev
, "Unable to request DMA channel, defer probing\n");
1733 return -EPROBE_DEFER
;
1736 spin_lock_init(&dcmi
->irqlock
);
1737 mutex_init(&dcmi
->lock
);
1738 mutex_init(&dcmi
->dma_lock
);
1739 init_completion(&dcmi
->complete
);
1740 INIT_LIST_HEAD(&dcmi
->buffers
);
1742 dcmi
->dev
= &pdev
->dev
;
1744 dcmi
->state
= STOPPED
;
1745 dcmi
->dma_chan
= chan
;
1749 /* Initialize the top-level structure */
1750 ret
= v4l2_device_register(&pdev
->dev
, &dcmi
->v4l2_dev
);
1752 goto err_dma_release
;
1754 dcmi
->vdev
= video_device_alloc();
1757 goto err_device_unregister
;
1761 dcmi
->vdev
->fops
= &dcmi_fops
;
1762 dcmi
->vdev
->v4l2_dev
= &dcmi
->v4l2_dev
;
1763 dcmi
->vdev
->queue
= &dcmi
->queue
;
1764 strlcpy(dcmi
->vdev
->name
, KBUILD_MODNAME
, sizeof(dcmi
->vdev
->name
));
1765 dcmi
->vdev
->release
= video_device_release
;
1766 dcmi
->vdev
->ioctl_ops
= &dcmi_ioctl_ops
;
1767 dcmi
->vdev
->lock
= &dcmi
->lock
;
1768 dcmi
->vdev
->device_caps
= V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING
|
1770 video_set_drvdata(dcmi
->vdev
, dcmi
);
1773 q
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE
;
1774 q
->io_modes
= VB2_MMAP
| VB2_READ
| VB2_DMABUF
;
1775 q
->lock
= &dcmi
->lock
;
1777 q
->buf_struct_size
= sizeof(struct dcmi_buf
);
1778 q
->ops
= &dcmi_video_qops
;
1779 q
->mem_ops
= &vb2_dma_contig_memops
;
1780 q
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC
;
1781 q
->min_buffers_needed
= 2;
1782 q
->dev
= &pdev
->dev
;
1784 ret
= vb2_queue_init(q
);
1786 dev_err(&pdev
->dev
, "Failed to initialize vb2 queue\n");
1787 goto err_device_release
;
1790 ret
= dcmi_graph_init(dcmi
);
1792 goto err_device_release
;
1795 ret
= reset_control_assert(dcmi
->rstc
);
1797 dev_err(&pdev
->dev
, "Failed to assert the reset line\n");
1798 goto err_device_release
;
1801 usleep_range(3000, 5000);
1803 ret
= reset_control_deassert(dcmi
->rstc
);
1805 dev_err(&pdev
->dev
, "Failed to deassert the reset line\n");
1806 goto err_device_release
;
1809 dev_info(&pdev
->dev
, "Probe done\n");
1811 platform_set_drvdata(pdev
, dcmi
);
1813 pm_runtime_enable(&pdev
->dev
);
1818 video_device_release(dcmi
->vdev
);
1819 err_device_unregister
:
1820 v4l2_device_unregister(&dcmi
->v4l2_dev
);
1822 dma_release_channel(dcmi
->dma_chan
);
1827 static int dcmi_remove(struct platform_device
*pdev
)
1829 struct stm32_dcmi
*dcmi
= platform_get_drvdata(pdev
);
1831 pm_runtime_disable(&pdev
->dev
);
1833 v4l2_async_notifier_unregister(&dcmi
->notifier
);
1834 v4l2_device_unregister(&dcmi
->v4l2_dev
);
1836 dma_release_channel(dcmi
->dma_chan
);
1841 static __maybe_unused
int dcmi_runtime_suspend(struct device
*dev
)
1843 struct stm32_dcmi
*dcmi
= dev_get_drvdata(dev
);
1845 clk_disable_unprepare(dcmi
->mclk
);
1850 static __maybe_unused
int dcmi_runtime_resume(struct device
*dev
)
1852 struct stm32_dcmi
*dcmi
= dev_get_drvdata(dev
);
1855 ret
= clk_prepare_enable(dcmi
->mclk
);
1857 dev_err(dev
, "%s: Failed to prepare_enable clock\n", __func__
);
1862 static __maybe_unused
int dcmi_suspend(struct device
*dev
)
1865 pm_runtime_force_suspend(dev
);
1867 /* change pinctrl state */
1868 pinctrl_pm_select_sleep_state(dev
);
1873 static __maybe_unused
int dcmi_resume(struct device
*dev
)
1875 /* restore pinctl default state */
1876 pinctrl_pm_select_default_state(dev
);
1879 pm_runtime_force_resume(dev
);
1884 static const struct dev_pm_ops dcmi_pm_ops
= {
1885 SET_SYSTEM_SLEEP_PM_OPS(dcmi_suspend
, dcmi_resume
)
1886 SET_RUNTIME_PM_OPS(dcmi_runtime_suspend
,
1887 dcmi_runtime_resume
, NULL
)
1890 static struct platform_driver stm32_dcmi_driver
= {
1891 .probe
= dcmi_probe
,
1892 .remove
= dcmi_remove
,
1895 .of_match_table
= of_match_ptr(stm32_dcmi_of_match
),
1900 module_platform_driver(stm32_dcmi_driver
);
1902 MODULE_AUTHOR("Yannick Fertre <yannick.fertre@st.com>");
1903 MODULE_AUTHOR("Hugues Fruchet <hugues.fruchet@st.com>");
1904 MODULE_DESCRIPTION("STMicroelectronics STM32 Digital Camera Memory Interface driver");
1905 MODULE_LICENSE("GPL");
1906 MODULE_SUPPORTED_DEVICE("video");