1 // SPDX-License-Identifier: GPL-2.0
3 * The Marvell camera core. This device appears in a number of settings,
4 * so it needs platform-specific support outside of the core.
6 * Copyright 2011 Jonathan Corbet corbet@lwn.net
7 * Copyright 2018 Lubomir Rintel <lkundrak@v3.sk>
9 #include <linux/kernel.h>
10 #include <linux/module.h>
13 #include <linux/i2c.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/slab.h>
17 #include <linux/device.h>
18 #include <linux/wait.h>
19 #include <linux/list.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/delay.h>
22 #include <linux/vmalloc.h>
24 #include <linux/clk.h>
25 #include <linux/clk-provider.h>
26 #include <linux/videodev2.h>
27 #include <linux/pm_runtime.h>
28 #include <media/v4l2-device.h>
29 #include <media/v4l2-ioctl.h>
30 #include <media/v4l2-ctrls.h>
31 #include <media/v4l2-event.h>
32 #include <media/videobuf2-vmalloc.h>
33 #include <media/videobuf2-dma-contig.h>
34 #include <media/videobuf2-dma-sg.h>
36 #include "mcam-core.h"
38 #ifdef MCAM_MODE_VMALLOC
40 * Internal DMA buffer management. Since the controller cannot do S/G I/O,
41 * we must have physically contiguous buffers to bring frames into.
42 * These parameters control how many buffers we use, whether we
43 * allocate them at load time (better chance of success, but nails down
44 * memory) or when somebody tries to use the camera (riskier), and,
45 * for load-time allocation, how big they should be.
47 * The controller can cycle through three buffers. We could use
48 * more by flipping pointers around, but it probably makes little
52 static bool alloc_bufs_at_read
;
53 module_param(alloc_bufs_at_read
, bool, 0444);
54 MODULE_PARM_DESC(alloc_bufs_at_read
,
55 "Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time. This saves memory, but decreases the chances of successfully getting those buffers. This parameter is only used in the vmalloc buffer mode");
57 static int n_dma_bufs
= 3;
58 module_param(n_dma_bufs
, uint
, 0644);
59 MODULE_PARM_DESC(n_dma_bufs
,
60 "The number of DMA buffers to allocate. Can be either two (saves memory, makes timing tighter) or three.");
62 static int dma_buf_size
= VGA_WIDTH
* VGA_HEIGHT
* 2; /* Worst case */
63 module_param(dma_buf_size
, uint
, 0444);
64 MODULE_PARM_DESC(dma_buf_size
,
65 "The size of the allocated DMA buffers. If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
66 #else /* MCAM_MODE_VMALLOC */
67 static const bool alloc_bufs_at_read
;
68 static const int n_dma_bufs
= 3; /* Used by S/G_PARM */
69 #endif /* MCAM_MODE_VMALLOC */
72 module_param(flip
, bool, 0444);
73 MODULE_PARM_DESC(flip
,
74 "If set, the sensor will be instructed to flip the image vertically.");
76 static int buffer_mode
= -1;
77 module_param(buffer_mode
, int, 0444);
78 MODULE_PARM_DESC(buffer_mode
,
79 "Set the buffer mode to be used; default is to go with what the platform driver asks for. Set to 0 for vmalloc, 1 for DMA contiguous.");
82 * Status flags. Always manipulated with bit operations.
84 #define CF_BUF0_VALID 0 /* Buffers valid - first three */
85 #define CF_BUF1_VALID 1
86 #define CF_BUF2_VALID 2
87 #define CF_DMA_ACTIVE 3 /* A frame is incoming */
88 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
89 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
90 #define CF_SG_RESTART 6 /* SG restart needed */
91 #define CF_FRAME_SOF0 7 /* Frame 0 started */
92 #define CF_FRAME_SOF1 8
93 #define CF_FRAME_SOF2 9
95 #define sensor_call(cam, o, f, args...) \
96 v4l2_subdev_call(cam->sensor, o, f, ##args)
98 #define notifier_to_mcam(notifier) \
99 container_of(notifier, struct mcam_camera, notifier)
101 static struct mcam_format_struct
{
103 int bpp
; /* Bytes per pixel */
108 .pixelformat
= V4L2_PIX_FMT_YUYV
,
109 .mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
,
114 .pixelformat
= V4L2_PIX_FMT_YVYU
,
115 .mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
,
120 .pixelformat
= V4L2_PIX_FMT_YUV420
,
121 .mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
,
126 .pixelformat
= V4L2_PIX_FMT_YVU420
,
127 .mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
,
132 .pixelformat
= V4L2_PIX_FMT_XRGB444
,
133 .mbus_code
= MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE
,
138 .pixelformat
= V4L2_PIX_FMT_RGB565
,
139 .mbus_code
= MEDIA_BUS_FMT_RGB565_2X8_LE
,
144 .pixelformat
= V4L2_PIX_FMT_SBGGR8
,
145 .mbus_code
= MEDIA_BUS_FMT_SBGGR8_1X8
,
150 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
152 static struct mcam_format_struct
*mcam_find_format(u32 pixelformat
)
156 for (i
= 0; i
< N_MCAM_FMTS
; i
++)
157 if (mcam_formats
[i
].pixelformat
== pixelformat
)
158 return mcam_formats
+ i
;
159 /* Not found? Then return the first format. */
164 * The default format we use until somebody says otherwise.
166 static const struct v4l2_pix_format mcam_def_pix_format
= {
168 .height
= VGA_HEIGHT
,
169 .pixelformat
= V4L2_PIX_FMT_YUYV
,
170 .field
= V4L2_FIELD_NONE
,
171 .bytesperline
= VGA_WIDTH
*2,
172 .sizeimage
= VGA_WIDTH
*VGA_HEIGHT
*2,
173 .colorspace
= V4L2_COLORSPACE_SRGB
,
176 static const u32 mcam_def_mbus_code
= MEDIA_BUS_FMT_YUYV8_2X8
;
180 * The two-word DMA descriptor format used by the Armada 610 and like. There
181 * Is a three-word format as well (set C1_DESC_3WORD) where the third
182 * word is a pointer to the next descriptor, but we don't use it. Two-word
183 * descriptors have to be contiguous in memory.
185 struct mcam_dma_desc
{
191 * Our buffer type for working with videobuf2. Note that the vb2
192 * developers have decreed that struct vb2_v4l2_buffer must be at the
193 * beginning of this structure.
195 struct mcam_vb_buffer
{
196 struct vb2_v4l2_buffer vb_buf
;
197 struct list_head queue
;
198 struct mcam_dma_desc
*dma_desc
; /* Descriptor virtual address */
199 dma_addr_t dma_desc_pa
; /* Descriptor physical address */
202 static inline struct mcam_vb_buffer
*vb_to_mvb(struct vb2_v4l2_buffer
*vb
)
204 return container_of(vb
, struct mcam_vb_buffer
, vb_buf
);
208 * Hand a completed buffer back to user space.
210 static void mcam_buffer_done(struct mcam_camera
*cam
, int frame
,
211 struct vb2_v4l2_buffer
*vbuf
)
213 vbuf
->vb2_buf
.planes
[0].bytesused
= cam
->pix_format
.sizeimage
;
214 vbuf
->sequence
= cam
->buf_seq
[frame
];
215 vbuf
->field
= V4L2_FIELD_NONE
;
216 vbuf
->vb2_buf
.timestamp
= ktime_get_ns();
217 vb2_set_plane_payload(&vbuf
->vb2_buf
, 0, cam
->pix_format
.sizeimage
);
218 vb2_buffer_done(&vbuf
->vb2_buf
, VB2_BUF_STATE_DONE
);
224 * Debugging and related.
226 #define cam_err(cam, fmt, arg...) \
227 dev_err((cam)->dev, fmt, ##arg);
228 #define cam_warn(cam, fmt, arg...) \
229 dev_warn((cam)->dev, fmt, ##arg);
230 #define cam_dbg(cam, fmt, arg...) \
231 dev_dbg((cam)->dev, fmt, ##arg);
235 * Flag manipulation helpers
237 static void mcam_reset_buffers(struct mcam_camera
*cam
)
242 for (i
= 0; i
< cam
->nbufs
; i
++) {
243 clear_bit(i
, &cam
->flags
);
244 clear_bit(CF_FRAME_SOF0
+ i
, &cam
->flags
);
248 static inline int mcam_needs_config(struct mcam_camera
*cam
)
250 return test_bit(CF_CONFIG_NEEDED
, &cam
->flags
);
253 static void mcam_set_config_needed(struct mcam_camera
*cam
, int needed
)
256 set_bit(CF_CONFIG_NEEDED
, &cam
->flags
);
258 clear_bit(CF_CONFIG_NEEDED
, &cam
->flags
);
261 /* ------------------------------------------------------------------- */
263 * Make the controller start grabbing images. Everything must
264 * be set up before doing this.
266 static void mcam_ctlr_start(struct mcam_camera
*cam
)
268 /* set_bit performs a read, so no other barrier should be
270 mcam_reg_set_bit(cam
, REG_CTRL0
, C0_ENABLE
);
273 static void mcam_ctlr_stop(struct mcam_camera
*cam
)
275 mcam_reg_clear_bit(cam
, REG_CTRL0
, C0_ENABLE
);
278 static void mcam_enable_mipi(struct mcam_camera
*mcam
)
280 /* Using MIPI mode and enable MIPI */
282 mcam
->calc_dphy(mcam
);
283 cam_dbg(mcam
, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
284 mcam
->dphy
[0], mcam
->dphy
[1], mcam
->dphy
[2]);
285 mcam_reg_write(mcam
, REG_CSI2_DPHY3
, mcam
->dphy
[0]);
286 mcam_reg_write(mcam
, REG_CSI2_DPHY5
, mcam
->dphy
[1]);
287 mcam_reg_write(mcam
, REG_CSI2_DPHY6
, mcam
->dphy
[2]);
289 if (!mcam
->mipi_enabled
) {
290 if (mcam
->lane
> 4 || mcam
->lane
<= 0) {
291 cam_warn(mcam
, "lane number error\n");
292 mcam
->lane
= 1; /* set the default value */
295 * 0x41 actives 1 lane
296 * 0x43 actives 2 lanes
297 * 0x45 actives 3 lanes (never happen)
298 * 0x47 actives 4 lanes
300 mcam_reg_write(mcam
, REG_CSI2_CTRL0
,
301 CSI2_C0_MIPI_EN
| CSI2_C0_ACT_LANE(mcam
->lane
));
302 mcam
->mipi_enabled
= true;
306 static void mcam_disable_mipi(struct mcam_camera
*mcam
)
308 /* Using Parallel mode or disable MIPI */
309 mcam_reg_write(mcam
, REG_CSI2_CTRL0
, 0x0);
310 mcam_reg_write(mcam
, REG_CSI2_DPHY3
, 0x0);
311 mcam_reg_write(mcam
, REG_CSI2_DPHY5
, 0x0);
312 mcam_reg_write(mcam
, REG_CSI2_DPHY6
, 0x0);
313 mcam
->mipi_enabled
= false;
316 static bool mcam_fmt_is_planar(__u32 pfmt
)
318 struct mcam_format_struct
*f
;
320 f
= mcam_find_format(pfmt
);
324 static void mcam_write_yuv_bases(struct mcam_camera
*cam
,
325 unsigned frame
, dma_addr_t base
)
327 struct v4l2_pix_format
*fmt
= &cam
->pix_format
;
328 u32 pixel_count
= fmt
->width
* fmt
->height
;
329 dma_addr_t y
, u
= 0, v
= 0;
333 switch (fmt
->pixelformat
) {
334 case V4L2_PIX_FMT_YUV420
:
336 v
= u
+ pixel_count
/ 4;
338 case V4L2_PIX_FMT_YVU420
:
340 u
= v
+ pixel_count
/ 4;
346 mcam_reg_write(cam
, REG_Y0BAR
+ frame
* 4, y
);
347 if (mcam_fmt_is_planar(fmt
->pixelformat
)) {
348 mcam_reg_write(cam
, REG_U0BAR
+ frame
* 4, u
);
349 mcam_reg_write(cam
, REG_V0BAR
+ frame
* 4, v
);
353 /* ------------------------------------------------------------------- */
355 #ifdef MCAM_MODE_VMALLOC
357 * Code specific to the vmalloc buffer mode.
361 * Allocate in-kernel DMA buffers for vmalloc mode.
363 static int mcam_alloc_dma_bufs(struct mcam_camera
*cam
, int loadtime
)
367 mcam_set_config_needed(cam
, 1);
369 cam
->dma_buf_size
= dma_buf_size
;
371 cam
->dma_buf_size
= cam
->pix_format
.sizeimage
;
376 for (i
= 0; i
< n_dma_bufs
; i
++) {
377 cam
->dma_bufs
[i
] = dma_alloc_coherent(cam
->dev
,
378 cam
->dma_buf_size
, cam
->dma_handles
+ i
,
380 if (cam
->dma_bufs
[i
] == NULL
) {
381 cam_warn(cam
, "Failed to allocate DMA buffer\n");
387 switch (cam
->nbufs
) {
389 dma_free_coherent(cam
->dev
, cam
->dma_buf_size
,
390 cam
->dma_bufs
[0], cam
->dma_handles
[0]);
394 cam_err(cam
, "Insufficient DMA buffers, cannot operate\n");
399 cam_warn(cam
, "Will limp along with only 2 buffers\n");
405 static void mcam_free_dma_bufs(struct mcam_camera
*cam
)
409 for (i
= 0; i
< cam
->nbufs
; i
++) {
410 dma_free_coherent(cam
->dev
, cam
->dma_buf_size
,
411 cam
->dma_bufs
[i
], cam
->dma_handles
[i
]);
412 cam
->dma_bufs
[i
] = NULL
;
419 * Set up DMA buffers when operating in vmalloc mode
421 static void mcam_ctlr_dma_vmalloc(struct mcam_camera
*cam
)
424 * Store the first two YUV buffers. Then either
425 * set the third if it exists, or tell the controller
428 mcam_write_yuv_bases(cam
, 0, cam
->dma_handles
[0]);
429 mcam_write_yuv_bases(cam
, 1, cam
->dma_handles
[1]);
430 if (cam
->nbufs
> 2) {
431 mcam_write_yuv_bases(cam
, 2, cam
->dma_handles
[2]);
432 mcam_reg_clear_bit(cam
, REG_CTRL1
, C1_TWOBUFS
);
434 mcam_reg_set_bit(cam
, REG_CTRL1
, C1_TWOBUFS
);
435 if (cam
->chip_id
== MCAM_CAFE
)
436 mcam_reg_write(cam
, REG_UBAR
, 0); /* 32 bits only */
440 * Copy data out to user space in the vmalloc case
442 static void mcam_frame_tasklet(struct tasklet_struct
*t
)
444 struct mcam_camera
*cam
= from_tasklet(cam
, t
, s_tasklet
);
447 struct mcam_vb_buffer
*buf
;
449 spin_lock_irqsave(&cam
->dev_lock
, flags
);
450 for (i
= 0; i
< cam
->nbufs
; i
++) {
451 int bufno
= cam
->next_buf
;
453 if (cam
->state
!= S_STREAMING
|| bufno
< 0)
454 break; /* I/O got stopped */
455 if (++(cam
->next_buf
) >= cam
->nbufs
)
457 if (!test_bit(bufno
, &cam
->flags
))
459 if (list_empty(&cam
->buffers
)) {
460 cam
->frame_state
.singles
++;
461 break; /* Leave it valid, hope for better later */
463 cam
->frame_state
.delivered
++;
464 clear_bit(bufno
, &cam
->flags
);
465 buf
= list_first_entry(&cam
->buffers
, struct mcam_vb_buffer
,
467 list_del_init(&buf
->queue
);
469 * Drop the lock during the big copy. This *should* be safe...
471 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
472 memcpy(vb2_plane_vaddr(&buf
->vb_buf
.vb2_buf
, 0),
473 cam
->dma_bufs
[bufno
],
474 cam
->pix_format
.sizeimage
);
475 mcam_buffer_done(cam
, bufno
, &buf
->vb_buf
);
476 spin_lock_irqsave(&cam
->dev_lock
, flags
);
478 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
483 * Make sure our allocated buffers are up to the task.
485 static int mcam_check_dma_buffers(struct mcam_camera
*cam
)
487 if (cam
->nbufs
> 0 && cam
->dma_buf_size
< cam
->pix_format
.sizeimage
)
488 mcam_free_dma_bufs(cam
);
490 return mcam_alloc_dma_bufs(cam
, 0);
494 static void mcam_vmalloc_done(struct mcam_camera
*cam
, int frame
)
496 tasklet_schedule(&cam
->s_tasklet
);
499 #else /* MCAM_MODE_VMALLOC */
501 static inline int mcam_alloc_dma_bufs(struct mcam_camera
*cam
, int loadtime
)
506 static inline void mcam_free_dma_bufs(struct mcam_camera
*cam
)
511 static inline int mcam_check_dma_buffers(struct mcam_camera
*cam
)
518 #endif /* MCAM_MODE_VMALLOC */
521 #ifdef MCAM_MODE_DMA_CONTIG
522 /* ---------------------------------------------------------------------- */
524 * DMA-contiguous code.
528 * Set up a contiguous buffer for the given frame. Here also is where
529 * the underrun strategy is set: if there is no buffer available, reuse
530 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
531 * keep the interrupt handler from giving that buffer back to user
532 * space. In this way, we always have a buffer to DMA to and don't
533 * have to try to play games stopping and restarting the controller.
535 static void mcam_set_contig_buffer(struct mcam_camera
*cam
, int frame
)
537 struct mcam_vb_buffer
*buf
;
538 dma_addr_t dma_handle
;
539 struct vb2_v4l2_buffer
*vb
;
542 * If there are no available buffers, go into single mode
544 if (list_empty(&cam
->buffers
)) {
545 buf
= cam
->vb_bufs
[frame
^ 0x1];
546 set_bit(CF_SINGLE_BUFFER
, &cam
->flags
);
547 cam
->frame_state
.singles
++;
550 * OK, we have a buffer we can use.
552 buf
= list_first_entry(&cam
->buffers
, struct mcam_vb_buffer
,
554 list_del_init(&buf
->queue
);
555 clear_bit(CF_SINGLE_BUFFER
, &cam
->flags
);
558 cam
->vb_bufs
[frame
] = buf
;
561 dma_handle
= vb2_dma_contig_plane_dma_addr(&vb
->vb2_buf
, 0);
562 mcam_write_yuv_bases(cam
, frame
, dma_handle
);
566 * Initial B_DMA_contig setup.
568 static void mcam_ctlr_dma_contig(struct mcam_camera
*cam
)
570 mcam_reg_set_bit(cam
, REG_CTRL1
, C1_TWOBUFS
);
572 mcam_set_contig_buffer(cam
, 0);
573 mcam_set_contig_buffer(cam
, 1);
577 * Frame completion handling.
579 static void mcam_dma_contig_done(struct mcam_camera
*cam
, int frame
)
581 struct mcam_vb_buffer
*buf
= cam
->vb_bufs
[frame
];
583 if (!test_bit(CF_SINGLE_BUFFER
, &cam
->flags
)) {
584 cam
->frame_state
.delivered
++;
585 cam
->vb_bufs
[frame
] = NULL
;
586 mcam_buffer_done(cam
, frame
, &buf
->vb_buf
);
588 mcam_set_contig_buffer(cam
, frame
);
591 #endif /* MCAM_MODE_DMA_CONTIG */
593 #ifdef MCAM_MODE_DMA_SG
594 /* ---------------------------------------------------------------------- */
596 * Scatter/gather-specific code.
600 * Set up the next buffer for S/G I/O; caller should be sure that
601 * the controller is stopped and a buffer is available.
603 static void mcam_sg_next_buffer(struct mcam_camera
*cam
)
605 struct mcam_vb_buffer
*buf
;
606 struct sg_table
*sg_table
;
608 buf
= list_first_entry(&cam
->buffers
, struct mcam_vb_buffer
, queue
);
609 list_del_init(&buf
->queue
);
610 sg_table
= vb2_dma_sg_plane_desc(&buf
->vb_buf
.vb2_buf
, 0);
612 * Very Bad Not Good Things happen if you don't clear
613 * C1_DESC_ENA before making any descriptor changes.
615 mcam_reg_clear_bit(cam
, REG_CTRL1
, C1_DESC_ENA
);
616 mcam_reg_write(cam
, REG_DMA_DESC_Y
, buf
->dma_desc_pa
);
617 mcam_reg_write(cam
, REG_DESC_LEN_Y
,
618 sg_table
->nents
* sizeof(struct mcam_dma_desc
));
619 mcam_reg_write(cam
, REG_DESC_LEN_U
, 0);
620 mcam_reg_write(cam
, REG_DESC_LEN_V
, 0);
621 mcam_reg_set_bit(cam
, REG_CTRL1
, C1_DESC_ENA
);
622 cam
->vb_bufs
[0] = buf
;
626 * Initial B_DMA_sg setup
628 static void mcam_ctlr_dma_sg(struct mcam_camera
*cam
)
631 * The list-empty condition can hit us at resume time
632 * if the buffer list was empty when the system was suspended.
634 if (list_empty(&cam
->buffers
)) {
635 set_bit(CF_SG_RESTART
, &cam
->flags
);
639 mcam_reg_clear_bit(cam
, REG_CTRL1
, C1_DESC_3WORD
);
640 mcam_sg_next_buffer(cam
);
646 * Frame completion with S/G is trickier. We can't muck with
647 * a descriptor chain on the fly, since the controller buffers it
648 * internally. So we have to actually stop and restart; Marvell
649 * says this is the way to do it.
651 * Of course, stopping is easier said than done; experience shows
652 * that the controller can start a frame *after* C0_ENABLE has been
653 * cleared. So when running in S/G mode, the controller is "stopped"
654 * on receipt of the start-of-frame interrupt. That means we can
655 * safely change the DMA descriptor array here and restart things
656 * (assuming there's another buffer waiting to go).
658 static void mcam_dma_sg_done(struct mcam_camera
*cam
, int frame
)
660 struct mcam_vb_buffer
*buf
= cam
->vb_bufs
[0];
663 * If we're no longer supposed to be streaming, don't do anything.
665 if (cam
->state
!= S_STREAMING
)
668 * If we have another buffer available, put it in and
669 * restart the engine.
671 if (!list_empty(&cam
->buffers
)) {
672 mcam_sg_next_buffer(cam
);
673 mcam_ctlr_start(cam
);
675 * Otherwise set CF_SG_RESTART and the controller will
676 * be restarted once another buffer shows up.
679 set_bit(CF_SG_RESTART
, &cam
->flags
);
680 cam
->frame_state
.singles
++;
681 cam
->vb_bufs
[0] = NULL
;
684 * Now we can give the completed frame back to user space.
686 cam
->frame_state
.delivered
++;
687 mcam_buffer_done(cam
, frame
, &buf
->vb_buf
);
692 * Scatter/gather mode requires stopping the controller between
693 * frames so we can put in a new DMA descriptor array. If no new
694 * buffer exists at frame completion, the controller is left stopped;
695 * this function is charged with gettig things going again.
697 static void mcam_sg_restart(struct mcam_camera
*cam
)
699 mcam_ctlr_dma_sg(cam
);
700 mcam_ctlr_start(cam
);
701 clear_bit(CF_SG_RESTART
, &cam
->flags
);
704 #else /* MCAM_MODE_DMA_SG */
706 static inline void mcam_sg_restart(struct mcam_camera
*cam
)
711 #endif /* MCAM_MODE_DMA_SG */
713 /* ---------------------------------------------------------------------- */
715 * Buffer-mode-independent controller code.
721 static void mcam_ctlr_image(struct mcam_camera
*cam
)
723 struct v4l2_pix_format
*fmt
= &cam
->pix_format
;
724 u32 widthy
= 0, widthuv
= 0, imgsz_h
, imgsz_w
;
726 cam_dbg(cam
, "camera: bytesperline = %d; height = %d\n",
727 fmt
->bytesperline
, fmt
->sizeimage
/ fmt
->bytesperline
);
728 imgsz_h
= (fmt
->height
<< IMGSZ_V_SHIFT
) & IMGSZ_V_MASK
;
729 imgsz_w
= (fmt
->width
* 2) & IMGSZ_H_MASK
;
731 switch (fmt
->pixelformat
) {
732 case V4L2_PIX_FMT_YUYV
:
733 case V4L2_PIX_FMT_YVYU
:
734 widthy
= fmt
->width
* 2;
737 case V4L2_PIX_FMT_YUV420
:
738 case V4L2_PIX_FMT_YVU420
:
740 widthuv
= fmt
->width
/ 2;
743 widthy
= fmt
->bytesperline
;
748 mcam_reg_write_mask(cam
, REG_IMGPITCH
, widthuv
<< 16 | widthy
,
749 IMGP_YP_MASK
| IMGP_UVP_MASK
);
750 mcam_reg_write(cam
, REG_IMGSIZE
, imgsz_h
| imgsz_w
);
751 mcam_reg_write(cam
, REG_IMGOFFSET
, 0x0);
754 * Tell the controller about the image format we are using.
756 switch (fmt
->pixelformat
) {
757 case V4L2_PIX_FMT_YUV420
:
758 case V4L2_PIX_FMT_YVU420
:
759 mcam_reg_write_mask(cam
, REG_CTRL0
,
760 C0_DF_YUV
| C0_YUV_420PL
| C0_YUVE_VYUY
, C0_DF_MASK
);
762 case V4L2_PIX_FMT_YUYV
:
763 mcam_reg_write_mask(cam
, REG_CTRL0
,
764 C0_DF_YUV
| C0_YUV_PACKED
| C0_YUVE_NOSWAP
, C0_DF_MASK
);
766 case V4L2_PIX_FMT_YVYU
:
767 mcam_reg_write_mask(cam
, REG_CTRL0
,
768 C0_DF_YUV
| C0_YUV_PACKED
| C0_YUVE_SWAP24
, C0_DF_MASK
);
770 case V4L2_PIX_FMT_XRGB444
:
771 mcam_reg_write_mask(cam
, REG_CTRL0
,
772 C0_DF_RGB
| C0_RGBF_444
| C0_RGB4_XBGR
, C0_DF_MASK
);
774 case V4L2_PIX_FMT_RGB565
:
775 mcam_reg_write_mask(cam
, REG_CTRL0
,
776 C0_DF_RGB
| C0_RGBF_565
| C0_RGB5_BGGR
, C0_DF_MASK
);
778 case V4L2_PIX_FMT_SBGGR8
:
779 mcam_reg_write_mask(cam
, REG_CTRL0
,
780 C0_DF_RGB
| C0_RGB5_GRBG
, C0_DF_MASK
);
783 cam_err(cam
, "camera: unknown format: %#x\n", fmt
->pixelformat
);
788 * Make sure it knows we want to use hsync/vsync.
790 mcam_reg_write_mask(cam
, REG_CTRL0
, C0_SIF_HVSYNC
, C0_SIFM_MASK
);
795 * Configure the controller for operation; caller holds the
798 static int mcam_ctlr_configure(struct mcam_camera
*cam
)
802 spin_lock_irqsave(&cam
->dev_lock
, flags
);
803 clear_bit(CF_SG_RESTART
, &cam
->flags
);
805 mcam_ctlr_image(cam
);
806 mcam_set_config_needed(cam
, 0);
807 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
811 static void mcam_ctlr_irq_enable(struct mcam_camera
*cam
)
814 * Clear any pending interrupts, since we do not
815 * expect to have I/O active prior to enabling.
817 mcam_reg_write(cam
, REG_IRQSTAT
, FRAMEIRQS
);
818 mcam_reg_set_bit(cam
, REG_IRQMASK
, FRAMEIRQS
);
821 static void mcam_ctlr_irq_disable(struct mcam_camera
*cam
)
823 mcam_reg_clear_bit(cam
, REG_IRQMASK
, FRAMEIRQS
);
827 * Stop the controller, and don't return until we're really sure that no
828 * further DMA is going on.
830 static void mcam_ctlr_stop_dma(struct mcam_camera
*cam
)
835 * Theory: stop the camera controller (whether it is operating
836 * or not). Delay briefly just in case we race with the SOF
837 * interrupt, then wait until no DMA is active.
839 spin_lock_irqsave(&cam
->dev_lock
, flags
);
840 clear_bit(CF_SG_RESTART
, &cam
->flags
);
843 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
845 * This is a brutally long sleep, but experience shows that
846 * it can take the controller a while to get the message that
847 * it needs to stop grabbing frames. In particular, we can
848 * sometimes (on mmp) get a frame at the end WITHOUT the
849 * start-of-frame indication.
852 if (test_bit(CF_DMA_ACTIVE
, &cam
->flags
))
853 cam_err(cam
, "Timeout waiting for DMA to end\n");
854 /* This would be bad news - what now? */
855 spin_lock_irqsave(&cam
->dev_lock
, flags
);
856 mcam_ctlr_irq_disable(cam
);
857 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
863 static int mcam_ctlr_power_up(struct mcam_camera
*cam
)
868 spin_lock_irqsave(&cam
->dev_lock
, flags
);
869 if (cam
->plat_power_up
) {
870 ret
= cam
->plat_power_up(cam
);
872 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
876 mcam_reg_clear_bit(cam
, REG_CTRL1
, C1_PWRDWN
);
877 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
881 static void mcam_ctlr_power_down(struct mcam_camera
*cam
)
885 spin_lock_irqsave(&cam
->dev_lock
, flags
);
887 * School of hard knocks department: be sure we do any register
888 * twiddling on the controller *before* calling the platform
889 * power down routine.
891 mcam_reg_set_bit(cam
, REG_CTRL1
, C1_PWRDWN
);
892 if (cam
->plat_power_down
)
893 cam
->plat_power_down(cam
);
894 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
897 /* ---------------------------------------------------------------------- */
899 * Master sensor clock.
901 static int mclk_prepare(struct clk_hw
*hw
)
903 struct mcam_camera
*cam
= container_of(hw
, struct mcam_camera
, mclk_hw
);
905 clk_prepare(cam
->clk
[0]);
909 static void mclk_unprepare(struct clk_hw
*hw
)
911 struct mcam_camera
*cam
= container_of(hw
, struct mcam_camera
, mclk_hw
);
913 clk_unprepare(cam
->clk
[0]);
916 static int mclk_enable(struct clk_hw
*hw
)
918 struct mcam_camera
*cam
= container_of(hw
, struct mcam_camera
, mclk_hw
);
923 * Clock the sensor appropriately. Controller clock should
924 * be 48MHz, sensor "typical" value is half that.
926 if (cam
->bus_type
== V4L2_MBUS_CSI2_DPHY
) {
927 mclk_src
= cam
->mclk_src
;
928 mclk_div
= cam
->mclk_div
;
934 clk_enable(cam
->clk
[0]);
935 mcam_reg_write(cam
, REG_CLKCTRL
, (mclk_src
<< 29) | mclk_div
);
936 mcam_ctlr_power_up(cam
);
941 static void mclk_disable(struct clk_hw
*hw
)
943 struct mcam_camera
*cam
= container_of(hw
, struct mcam_camera
, mclk_hw
);
945 mcam_ctlr_power_down(cam
);
946 clk_disable(cam
->clk
[0]);
949 static unsigned long mclk_recalc_rate(struct clk_hw
*hw
,
950 unsigned long parent_rate
)
955 static const struct clk_ops mclk_ops
= {
956 .prepare
= mclk_prepare
,
957 .unprepare
= mclk_unprepare
,
958 .enable
= mclk_enable
,
959 .disable
= mclk_disable
,
960 .recalc_rate
= mclk_recalc_rate
,
963 /* -------------------------------------------------------------------- */
965 * Communications with the sensor.
968 static int __mcam_cam_reset(struct mcam_camera
*cam
)
970 return sensor_call(cam
, core
, reset
, 0);
974 * We have found the sensor on the i2c. Let's try to have a
977 static int mcam_cam_init(struct mcam_camera
*cam
)
981 if (cam
->state
!= S_NOTREADY
)
982 cam_warn(cam
, "Cam init with device in funky state %d",
984 ret
= __mcam_cam_reset(cam
);
985 /* Get/set parameters? */
991 * Configure the sensor to match the parameters we have. Caller should
994 static int mcam_cam_set_flip(struct mcam_camera
*cam
)
996 struct v4l2_control ctrl
;
998 memset(&ctrl
, 0, sizeof(ctrl
));
999 ctrl
.id
= V4L2_CID_VFLIP
;
1001 return v4l2_s_ctrl(NULL
, cam
->sensor
->ctrl_handler
, &ctrl
);
1005 static int mcam_cam_configure(struct mcam_camera
*cam
)
1007 struct v4l2_subdev_format format
= {
1008 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1012 v4l2_fill_mbus_format(&format
.format
, &cam
->pix_format
, cam
->mbus_code
);
1013 ret
= sensor_call(cam
, core
, init
, 0);
1015 ret
= sensor_call(cam
, pad
, set_fmt
, NULL
, &format
);
1017 * OV7670 does weird things if flip is set *before* format...
1019 ret
+= mcam_cam_set_flip(cam
);
1024 * Get everything ready, and start grabbing frames.
1026 static int mcam_read_setup(struct mcam_camera
*cam
)
1029 unsigned long flags
;
1032 * Configuration. If we still don't have DMA buffers,
1033 * make one last, desperate attempt.
1035 if (cam
->buffer_mode
== B_vmalloc
&& cam
->nbufs
== 0 &&
1036 mcam_alloc_dma_bufs(cam
, 0))
1039 if (mcam_needs_config(cam
)) {
1040 mcam_cam_configure(cam
);
1041 ret
= mcam_ctlr_configure(cam
);
1049 spin_lock_irqsave(&cam
->dev_lock
, flags
);
1050 clear_bit(CF_DMA_ACTIVE
, &cam
->flags
);
1051 mcam_reset_buffers(cam
);
1052 if (cam
->bus_type
== V4L2_MBUS_CSI2_DPHY
)
1053 mcam_enable_mipi(cam
);
1055 mcam_disable_mipi(cam
);
1056 mcam_ctlr_irq_enable(cam
);
1057 cam
->state
= S_STREAMING
;
1058 if (!test_bit(CF_SG_RESTART
, &cam
->flags
))
1059 mcam_ctlr_start(cam
);
1060 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
1064 /* ----------------------------------------------------------------------- */
1066 * Videobuf2 interface code.
1069 static int mcam_vb_queue_setup(struct vb2_queue
*vq
,
1070 unsigned int *nbufs
,
1071 unsigned int *num_planes
, unsigned int sizes
[],
1072 struct device
*alloc_devs
[])
1074 struct mcam_camera
*cam
= vb2_get_drv_priv(vq
);
1075 int minbufs
= (cam
->buffer_mode
== B_DMA_contig
) ? 3 : 2;
1076 unsigned size
= cam
->pix_format
.sizeimage
;
1078 if (*nbufs
< minbufs
)
1082 return sizes
[0] < size
? -EINVAL
: 0;
1084 *num_planes
= 1; /* Someday we have to support planar formats... */
1089 static void mcam_vb_buf_queue(struct vb2_buffer
*vb
)
1091 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
1092 struct mcam_vb_buffer
*mvb
= vb_to_mvb(vbuf
);
1093 struct mcam_camera
*cam
= vb2_get_drv_priv(vb
->vb2_queue
);
1094 unsigned long flags
;
1097 spin_lock_irqsave(&cam
->dev_lock
, flags
);
1098 start
= (cam
->state
== S_BUFWAIT
) && !list_empty(&cam
->buffers
);
1099 list_add(&mvb
->queue
, &cam
->buffers
);
1100 if (cam
->state
== S_STREAMING
&& test_bit(CF_SG_RESTART
, &cam
->flags
))
1101 mcam_sg_restart(cam
);
1102 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
1104 mcam_read_setup(cam
);
1107 static void mcam_vb_requeue_bufs(struct vb2_queue
*vq
,
1108 enum vb2_buffer_state state
)
1110 struct mcam_camera
*cam
= vb2_get_drv_priv(vq
);
1111 struct mcam_vb_buffer
*buf
, *node
;
1112 unsigned long flags
;
1115 spin_lock_irqsave(&cam
->dev_lock
, flags
);
1116 list_for_each_entry_safe(buf
, node
, &cam
->buffers
, queue
) {
1117 vb2_buffer_done(&buf
->vb_buf
.vb2_buf
, state
);
1118 list_del(&buf
->queue
);
1120 for (i
= 0; i
< MAX_DMA_BUFS
; i
++) {
1121 buf
= cam
->vb_bufs
[i
];
1124 vb2_buffer_done(&buf
->vb_buf
.vb2_buf
, state
);
1125 cam
->vb_bufs
[i
] = NULL
;
1128 spin_unlock_irqrestore(&cam
->dev_lock
, flags
);
1132 * These need to be called with the mutex held from vb2
1134 static int mcam_vb_start_streaming(struct vb2_queue
*vq
, unsigned int count
)
1136 struct mcam_camera
*cam
= vb2_get_drv_priv(vq
);
1140 if (cam
->state
!= S_IDLE
) {
1141 mcam_vb_requeue_bufs(vq
, VB2_BUF_STATE_QUEUED
);
1144 cam
->frame_state
.frames
= 0;
1145 cam
->frame_state
.singles
= 0;
1146 cam
->frame_state
.delivered
= 0;
1149 * Videobuf2 sneakily hoards all the buffers and won't
1150 * give them to us until *after* streaming starts. But
1151 * we can't actually start streaming until we have a
1152 * destination. So go into a wait state and hope they
1153 * give us buffers soon.
1155 if (cam
->buffer_mode
!= B_vmalloc
&& list_empty(&cam
->buffers
)) {
1156 cam
->state
= S_BUFWAIT
;
1161 * Ensure clear the left over frame flags
1162 * before every really start streaming
1164 for (frame
= 0; frame
< cam
->nbufs
; frame
++)
1165 clear_bit(CF_FRAME_SOF0
+ frame
, &cam
->flags
);
1167 ret
= mcam_read_setup(cam
);
1169 mcam_vb_requeue_bufs(vq
, VB2_BUF_STATE_QUEUED
);
1173 static void mcam_vb_stop_streaming(struct vb2_queue
*vq
)
1175 struct mcam_camera
*cam
= vb2_get_drv_priv(vq
);
1177 cam_dbg(cam
, "stop_streaming: %d frames, %d singles, %d delivered\n",
1178 cam
->frame_state
.frames
, cam
->frame_state
.singles
,
1179 cam
->frame_state
.delivered
);
1180 if (cam
->state
== S_BUFWAIT
) {
1181 /* They never gave us buffers */
1182 cam
->state
= S_IDLE
;
1185 if (cam
->state
!= S_STREAMING
)
1187 mcam_ctlr_stop_dma(cam
);
1189 * VB2 reclaims the buffers, so we need to forget
1192 mcam_vb_requeue_bufs(vq
, VB2_BUF_STATE_ERROR
);
1196 static const struct vb2_ops mcam_vb2_ops
= {
1197 .queue_setup
= mcam_vb_queue_setup
,
1198 .buf_queue
= mcam_vb_buf_queue
,
1199 .start_streaming
= mcam_vb_start_streaming
,
1200 .stop_streaming
= mcam_vb_stop_streaming
,
1201 .wait_prepare
= vb2_ops_wait_prepare
,
1202 .wait_finish
= vb2_ops_wait_finish
,
1206 #ifdef MCAM_MODE_DMA_SG
1208 * Scatter/gather mode uses all of the above functions plus a
1209 * few extras to deal with DMA mapping.
1211 static int mcam_vb_sg_buf_init(struct vb2_buffer
*vb
)
1213 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
1214 struct mcam_vb_buffer
*mvb
= vb_to_mvb(vbuf
);
1215 struct mcam_camera
*cam
= vb2_get_drv_priv(vb
->vb2_queue
);
1216 int ndesc
= cam
->pix_format
.sizeimage
/PAGE_SIZE
+ 1;
1218 mvb
->dma_desc
= dma_alloc_coherent(cam
->dev
,
1219 ndesc
* sizeof(struct mcam_dma_desc
),
1220 &mvb
->dma_desc_pa
, GFP_KERNEL
);
1221 if (mvb
->dma_desc
== NULL
) {
1222 cam_err(cam
, "Unable to get DMA descriptor array\n");
1228 static int mcam_vb_sg_buf_prepare(struct vb2_buffer
*vb
)
1230 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
1231 struct mcam_vb_buffer
*mvb
= vb_to_mvb(vbuf
);
1232 struct sg_table
*sg_table
= vb2_dma_sg_plane_desc(vb
, 0);
1233 struct mcam_dma_desc
*desc
= mvb
->dma_desc
;
1234 struct scatterlist
*sg
;
1237 for_each_sg(sg_table
->sgl
, sg
, sg_table
->nents
, i
) {
1238 desc
->dma_addr
= sg_dma_address(sg
);
1239 desc
->segment_len
= sg_dma_len(sg
);
1245 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer
*vb
)
1247 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
1248 struct mcam_camera
*cam
= vb2_get_drv_priv(vb
->vb2_queue
);
1249 struct mcam_vb_buffer
*mvb
= vb_to_mvb(vbuf
);
1250 int ndesc
= cam
->pix_format
.sizeimage
/PAGE_SIZE
+ 1;
1252 dma_free_coherent(cam
->dev
, ndesc
* sizeof(struct mcam_dma_desc
),
1253 mvb
->dma_desc
, mvb
->dma_desc_pa
);
1257 static const struct vb2_ops mcam_vb2_sg_ops
= {
1258 .queue_setup
= mcam_vb_queue_setup
,
1259 .buf_init
= mcam_vb_sg_buf_init
,
1260 .buf_prepare
= mcam_vb_sg_buf_prepare
,
1261 .buf_queue
= mcam_vb_buf_queue
,
1262 .buf_cleanup
= mcam_vb_sg_buf_cleanup
,
1263 .start_streaming
= mcam_vb_start_streaming
,
1264 .stop_streaming
= mcam_vb_stop_streaming
,
1265 .wait_prepare
= vb2_ops_wait_prepare
,
1266 .wait_finish
= vb2_ops_wait_finish
,
1269 #endif /* MCAM_MODE_DMA_SG */
1271 static int mcam_setup_vb2(struct mcam_camera
*cam
)
1273 struct vb2_queue
*vq
= &cam
->vb_queue
;
1275 memset(vq
, 0, sizeof(*vq
));
1276 vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE
;
1278 vq
->lock
= &cam
->s_mutex
;
1279 vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC
;
1280 vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
| VB2_READ
;
1281 vq
->buf_struct_size
= sizeof(struct mcam_vb_buffer
);
1283 INIT_LIST_HEAD(&cam
->buffers
);
1284 switch (cam
->buffer_mode
) {
1286 #ifdef MCAM_MODE_DMA_CONTIG
1287 vq
->ops
= &mcam_vb2_ops
;
1288 vq
->mem_ops
= &vb2_dma_contig_memops
;
1289 cam
->dma_setup
= mcam_ctlr_dma_contig
;
1290 cam
->frame_complete
= mcam_dma_contig_done
;
1294 #ifdef MCAM_MODE_DMA_SG
1295 vq
->ops
= &mcam_vb2_sg_ops
;
1296 vq
->mem_ops
= &vb2_dma_sg_memops
;
1297 cam
->dma_setup
= mcam_ctlr_dma_sg
;
1298 cam
->frame_complete
= mcam_dma_sg_done
;
1302 #ifdef MCAM_MODE_VMALLOC
1303 tasklet_setup(&cam
->s_tasklet
, mcam_frame_tasklet
);
1304 vq
->ops
= &mcam_vb2_ops
;
1305 vq
->mem_ops
= &vb2_vmalloc_memops
;
1306 cam
->dma_setup
= mcam_ctlr_dma_vmalloc
;
1307 cam
->frame_complete
= mcam_vmalloc_done
;
1311 return vb2_queue_init(vq
);
1315 /* ---------------------------------------------------------------------- */
1317 * The long list of V4L2 ioctl() operations.
1320 static int mcam_vidioc_querycap(struct file
*file
, void *priv
,
1321 struct v4l2_capability
*cap
)
1323 struct mcam_camera
*cam
= video_drvdata(file
);
1325 strscpy(cap
->driver
, "marvell_ccic", sizeof(cap
->driver
));
1326 strscpy(cap
->card
, "marvell_ccic", sizeof(cap
->card
));
1327 strscpy(cap
->bus_info
, cam
->bus_info
, sizeof(cap
->bus_info
));
1332 static int mcam_vidioc_enum_fmt_vid_cap(struct file
*filp
,
1333 void *priv
, struct v4l2_fmtdesc
*fmt
)
1335 if (fmt
->index
>= N_MCAM_FMTS
)
1337 fmt
->pixelformat
= mcam_formats
[fmt
->index
].pixelformat
;
1341 static int mcam_vidioc_try_fmt_vid_cap(struct file
*filp
, void *priv
,
1342 struct v4l2_format
*fmt
)
1344 struct mcam_camera
*cam
= video_drvdata(filp
);
1345 struct mcam_format_struct
*f
;
1346 struct v4l2_pix_format
*pix
= &fmt
->fmt
.pix
;
1347 struct v4l2_subdev_pad_config pad_cfg
;
1348 struct v4l2_subdev_format format
= {
1349 .which
= V4L2_SUBDEV_FORMAT_TRY
,
1353 f
= mcam_find_format(pix
->pixelformat
);
1354 pix
->pixelformat
= f
->pixelformat
;
1355 v4l2_fill_mbus_format(&format
.format
, pix
, f
->mbus_code
);
1356 ret
= sensor_call(cam
, pad
, set_fmt
, &pad_cfg
, &format
);
1357 v4l2_fill_pix_format(pix
, &format
.format
);
1358 pix
->bytesperline
= pix
->width
* f
->bpp
;
1359 switch (f
->pixelformat
) {
1360 case V4L2_PIX_FMT_YUV420
:
1361 case V4L2_PIX_FMT_YVU420
:
1362 pix
->sizeimage
= pix
->height
* pix
->bytesperline
* 3 / 2;
1365 pix
->sizeimage
= pix
->height
* pix
->bytesperline
;
1368 pix
->colorspace
= V4L2_COLORSPACE_SRGB
;
1372 static int mcam_vidioc_s_fmt_vid_cap(struct file
*filp
, void *priv
,
1373 struct v4l2_format
*fmt
)
1375 struct mcam_camera
*cam
= video_drvdata(filp
);
1376 struct mcam_format_struct
*f
;
1380 * Can't do anything if the device is not idle
1381 * Also can't if there are streaming buffers in place.
1383 if (cam
->state
!= S_IDLE
|| vb2_is_busy(&cam
->vb_queue
))
1386 f
= mcam_find_format(fmt
->fmt
.pix
.pixelformat
);
1389 * See if the formatting works in principle.
1391 ret
= mcam_vidioc_try_fmt_vid_cap(filp
, priv
, fmt
);
1395 * Now we start to change things for real, so let's do it
1398 cam
->pix_format
= fmt
->fmt
.pix
;
1399 cam
->mbus_code
= f
->mbus_code
;
1402 * Make sure we have appropriate DMA buffers.
1404 if (cam
->buffer_mode
== B_vmalloc
) {
1405 ret
= mcam_check_dma_buffers(cam
);
1409 mcam_set_config_needed(cam
, 1);
1415 * Return our stored notion of how the camera is/should be configured.
1416 * The V4l2 spec wants us to be smarter, and actually get this from
1417 * the camera (and not mess with it at open time). Someday.
1419 static int mcam_vidioc_g_fmt_vid_cap(struct file
*filp
, void *priv
,
1420 struct v4l2_format
*f
)
1422 struct mcam_camera
*cam
= video_drvdata(filp
);
1424 f
->fmt
.pix
= cam
->pix_format
;
1429 * We only have one input - the sensor - so minimize the nonsense here.
1431 static int mcam_vidioc_enum_input(struct file
*filp
, void *priv
,
1432 struct v4l2_input
*input
)
1434 if (input
->index
!= 0)
1437 input
->type
= V4L2_INPUT_TYPE_CAMERA
;
1438 strscpy(input
->name
, "Camera", sizeof(input
->name
));
1442 static int mcam_vidioc_g_input(struct file
*filp
, void *priv
, unsigned int *i
)
1448 static int mcam_vidioc_s_input(struct file
*filp
, void *priv
, unsigned int i
)
1456 * G/S_PARM. Most of this is done by the sensor, but we are
1457 * the level which controls the number of read buffers.
1459 static int mcam_vidioc_g_parm(struct file
*filp
, void *priv
,
1460 struct v4l2_streamparm
*a
)
1462 struct mcam_camera
*cam
= video_drvdata(filp
);
1465 ret
= v4l2_g_parm_cap(video_devdata(filp
), cam
->sensor
, a
);
1466 a
->parm
.capture
.readbuffers
= n_dma_bufs
;
1470 static int mcam_vidioc_s_parm(struct file
*filp
, void *priv
,
1471 struct v4l2_streamparm
*a
)
1473 struct mcam_camera
*cam
= video_drvdata(filp
);
1476 ret
= v4l2_s_parm_cap(video_devdata(filp
), cam
->sensor
, a
);
1477 a
->parm
.capture
.readbuffers
= n_dma_bufs
;
1481 static int mcam_vidioc_enum_framesizes(struct file
*filp
, void *priv
,
1482 struct v4l2_frmsizeenum
*sizes
)
1484 struct mcam_camera
*cam
= video_drvdata(filp
);
1485 struct mcam_format_struct
*f
;
1486 struct v4l2_subdev_frame_size_enum fse
= {
1487 .index
= sizes
->index
,
1488 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1492 f
= mcam_find_format(sizes
->pixel_format
);
1493 if (f
->pixelformat
!= sizes
->pixel_format
)
1495 fse
.code
= f
->mbus_code
;
1496 ret
= sensor_call(cam
, pad
, enum_frame_size
, NULL
, &fse
);
1499 if (fse
.min_width
== fse
.max_width
&&
1500 fse
.min_height
== fse
.max_height
) {
1501 sizes
->type
= V4L2_FRMSIZE_TYPE_DISCRETE
;
1502 sizes
->discrete
.width
= fse
.min_width
;
1503 sizes
->discrete
.height
= fse
.min_height
;
1506 sizes
->type
= V4L2_FRMSIZE_TYPE_CONTINUOUS
;
1507 sizes
->stepwise
.min_width
= fse
.min_width
;
1508 sizes
->stepwise
.max_width
= fse
.max_width
;
1509 sizes
->stepwise
.min_height
= fse
.min_height
;
1510 sizes
->stepwise
.max_height
= fse
.max_height
;
1511 sizes
->stepwise
.step_width
= 1;
1512 sizes
->stepwise
.step_height
= 1;
1516 static int mcam_vidioc_enum_frameintervals(struct file
*filp
, void *priv
,
1517 struct v4l2_frmivalenum
*interval
)
1519 struct mcam_camera
*cam
= video_drvdata(filp
);
1520 struct mcam_format_struct
*f
;
1521 struct v4l2_subdev_frame_interval_enum fie
= {
1522 .index
= interval
->index
,
1523 .width
= interval
->width
,
1524 .height
= interval
->height
,
1525 .which
= V4L2_SUBDEV_FORMAT_ACTIVE
,
1529 f
= mcam_find_format(interval
->pixel_format
);
1530 if (f
->pixelformat
!= interval
->pixel_format
)
1532 fie
.code
= f
->mbus_code
;
1533 ret
= sensor_call(cam
, pad
, enum_frame_interval
, NULL
, &fie
);
1536 interval
->type
= V4L2_FRMIVAL_TYPE_DISCRETE
;
1537 interval
->discrete
= fie
.interval
;
1541 #ifdef CONFIG_VIDEO_ADV_DEBUG
1542 static int mcam_vidioc_g_register(struct file
*file
, void *priv
,
1543 struct v4l2_dbg_register
*reg
)
1545 struct mcam_camera
*cam
= video_drvdata(file
);
1547 if (reg
->reg
> cam
->regs_size
- 4)
1549 reg
->val
= mcam_reg_read(cam
, reg
->reg
);
1554 static int mcam_vidioc_s_register(struct file
*file
, void *priv
,
1555 const struct v4l2_dbg_register
*reg
)
1557 struct mcam_camera
*cam
= video_drvdata(file
);
1559 if (reg
->reg
> cam
->regs_size
- 4)
1561 mcam_reg_write(cam
, reg
->reg
, reg
->val
);
1566 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops
= {
1567 .vidioc_querycap
= mcam_vidioc_querycap
,
1568 .vidioc_enum_fmt_vid_cap
= mcam_vidioc_enum_fmt_vid_cap
,
1569 .vidioc_try_fmt_vid_cap
= mcam_vidioc_try_fmt_vid_cap
,
1570 .vidioc_s_fmt_vid_cap
= mcam_vidioc_s_fmt_vid_cap
,
1571 .vidioc_g_fmt_vid_cap
= mcam_vidioc_g_fmt_vid_cap
,
1572 .vidioc_enum_input
= mcam_vidioc_enum_input
,
1573 .vidioc_g_input
= mcam_vidioc_g_input
,
1574 .vidioc_s_input
= mcam_vidioc_s_input
,
1575 .vidioc_reqbufs
= vb2_ioctl_reqbufs
,
1576 .vidioc_create_bufs
= vb2_ioctl_create_bufs
,
1577 .vidioc_querybuf
= vb2_ioctl_querybuf
,
1578 .vidioc_qbuf
= vb2_ioctl_qbuf
,
1579 .vidioc_dqbuf
= vb2_ioctl_dqbuf
,
1580 .vidioc_expbuf
= vb2_ioctl_expbuf
,
1581 .vidioc_streamon
= vb2_ioctl_streamon
,
1582 .vidioc_streamoff
= vb2_ioctl_streamoff
,
1583 .vidioc_g_parm
= mcam_vidioc_g_parm
,
1584 .vidioc_s_parm
= mcam_vidioc_s_parm
,
1585 .vidioc_enum_framesizes
= mcam_vidioc_enum_framesizes
,
1586 .vidioc_enum_frameintervals
= mcam_vidioc_enum_frameintervals
,
1587 .vidioc_subscribe_event
= v4l2_ctrl_subscribe_event
,
1588 .vidioc_unsubscribe_event
= v4l2_event_unsubscribe
,
1589 #ifdef CONFIG_VIDEO_ADV_DEBUG
1590 .vidioc_g_register
= mcam_vidioc_g_register
,
1591 .vidioc_s_register
= mcam_vidioc_s_register
,
1595 /* ---------------------------------------------------------------------- */
1597 * Our various file operations.
1599 static int mcam_v4l_open(struct file
*filp
)
1601 struct mcam_camera
*cam
= video_drvdata(filp
);
1604 mutex_lock(&cam
->s_mutex
);
1605 ret
= v4l2_fh_open(filp
);
1608 if (v4l2_fh_is_singular_file(filp
)) {
1609 ret
= sensor_call(cam
, core
, s_power
, 1);
1612 pm_runtime_get_sync(cam
->dev
);
1613 __mcam_cam_reset(cam
);
1614 mcam_set_config_needed(cam
, 1);
1617 mutex_unlock(&cam
->s_mutex
);
1619 v4l2_fh_release(filp
);
1624 static int mcam_v4l_release(struct file
*filp
)
1626 struct mcam_camera
*cam
= video_drvdata(filp
);
1629 mutex_lock(&cam
->s_mutex
);
1630 last_open
= v4l2_fh_is_singular_file(filp
);
1631 _vb2_fop_release(filp
, NULL
);
1633 mcam_disable_mipi(cam
);
1634 sensor_call(cam
, core
, s_power
, 0);
1635 pm_runtime_put(cam
->dev
);
1636 if (cam
->buffer_mode
== B_vmalloc
&& alloc_bufs_at_read
)
1637 mcam_free_dma_bufs(cam
);
1640 mutex_unlock(&cam
->s_mutex
);
1644 static const struct v4l2_file_operations mcam_v4l_fops
= {
1645 .owner
= THIS_MODULE
,
1646 .open
= mcam_v4l_open
,
1647 .release
= mcam_v4l_release
,
1648 .read
= vb2_fop_read
,
1649 .poll
= vb2_fop_poll
,
1650 .mmap
= vb2_fop_mmap
,
1651 .unlocked_ioctl
= video_ioctl2
,
1656 * This template device holds all of those v4l2 methods; we
1657 * clone it for specific real devices.
1659 static const struct video_device mcam_v4l_template
= {
1661 .fops
= &mcam_v4l_fops
,
1662 .ioctl_ops
= &mcam_v4l_ioctl_ops
,
1663 .release
= video_device_release_empty
,
1664 .device_caps
= V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_READWRITE
|
1668 /* ---------------------------------------------------------------------- */
1670 * Interrupt handler stuff
1672 static void mcam_frame_complete(struct mcam_camera
*cam
, int frame
)
1675 * Basic frame housekeeping.
1677 set_bit(frame
, &cam
->flags
);
1678 clear_bit(CF_DMA_ACTIVE
, &cam
->flags
);
1679 cam
->next_buf
= frame
;
1680 cam
->buf_seq
[frame
] = cam
->sequence
++;
1681 cam
->frame_state
.frames
++;
1683 * "This should never happen"
1685 if (cam
->state
!= S_STREAMING
)
1688 * Process the frame and set up the next one.
1690 cam
->frame_complete(cam
, frame
);
1695 * The interrupt handler; this needs to be called from the
1696 * platform irq handler with the lock held.
1698 int mccic_irq(struct mcam_camera
*cam
, unsigned int irqs
)
1700 unsigned int frame
, handled
= 0;
1702 mcam_reg_write(cam
, REG_IRQSTAT
, FRAMEIRQS
); /* Clear'em all */
1704 * Handle any frame completions. There really should
1705 * not be more than one of these, or we have fallen
1708 * When running in S/G mode, the frame number lacks any
1709 * real meaning - there's only one descriptor array - but
1710 * the controller still picks a different one to signal
1713 for (frame
= 0; frame
< cam
->nbufs
; frame
++)
1714 if (irqs
& (IRQ_EOF0
<< frame
) &&
1715 test_bit(CF_FRAME_SOF0
+ frame
, &cam
->flags
)) {
1716 mcam_frame_complete(cam
, frame
);
1718 clear_bit(CF_FRAME_SOF0
+ frame
, &cam
->flags
);
1719 if (cam
->buffer_mode
== B_DMA_sg
)
1723 * If a frame starts, note that we have DMA active. This
1724 * code assumes that we won't get multiple frame interrupts
1725 * at once; may want to rethink that.
1727 for (frame
= 0; frame
< cam
->nbufs
; frame
++) {
1728 if (irqs
& (IRQ_SOF0
<< frame
)) {
1729 set_bit(CF_FRAME_SOF0
+ frame
, &cam
->flags
);
1730 handled
= IRQ_HANDLED
;
1734 if (handled
== IRQ_HANDLED
) {
1735 set_bit(CF_DMA_ACTIVE
, &cam
->flags
);
1736 if (cam
->buffer_mode
== B_DMA_sg
)
1737 mcam_ctlr_stop(cam
);
1741 EXPORT_SYMBOL_GPL(mccic_irq
);
1743 /* ---------------------------------------------------------------------- */
1745 * Registration and such.
1748 static int mccic_notify_bound(struct v4l2_async_notifier
*notifier
,
1749 struct v4l2_subdev
*subdev
, struct v4l2_async_subdev
*asd
)
1751 struct mcam_camera
*cam
= notifier_to_mcam(notifier
);
1754 mutex_lock(&cam
->s_mutex
);
1756 cam_err(cam
, "sensor already bound\n");
1761 v4l2_set_subdev_hostdata(subdev
, cam
);
1762 cam
->sensor
= subdev
;
1764 ret
= mcam_cam_init(cam
);
1770 ret
= mcam_setup_vb2(cam
);
1776 cam
->vdev
= mcam_v4l_template
;
1777 cam
->vdev
.v4l2_dev
= &cam
->v4l2_dev
;
1778 cam
->vdev
.lock
= &cam
->s_mutex
;
1779 cam
->vdev
.queue
= &cam
->vb_queue
;
1780 video_set_drvdata(&cam
->vdev
, cam
);
1781 ret
= video_register_device(&cam
->vdev
, VFL_TYPE_VIDEO
, -1);
1787 cam_dbg(cam
, "sensor %s bound\n", subdev
->name
);
1789 mutex_unlock(&cam
->s_mutex
);
1793 static void mccic_notify_unbind(struct v4l2_async_notifier
*notifier
,
1794 struct v4l2_subdev
*subdev
, struct v4l2_async_subdev
*asd
)
1796 struct mcam_camera
*cam
= notifier_to_mcam(notifier
);
1798 mutex_lock(&cam
->s_mutex
);
1799 if (cam
->sensor
!= subdev
) {
1800 cam_err(cam
, "sensor %s not bound\n", subdev
->name
);
1804 video_unregister_device(&cam
->vdev
);
1806 cam_dbg(cam
, "sensor %s unbound\n", subdev
->name
);
1809 mutex_unlock(&cam
->s_mutex
);
1812 static int mccic_notify_complete(struct v4l2_async_notifier
*notifier
)
1814 struct mcam_camera
*cam
= notifier_to_mcam(notifier
);
1818 * Get the v4l2 setup done.
1820 ret
= v4l2_ctrl_handler_init(&cam
->ctrl_handler
, 10);
1822 cam
->v4l2_dev
.ctrl_handler
= &cam
->ctrl_handler
;
1827 static const struct v4l2_async_notifier_operations mccic_notify_ops
= {
1828 .bound
= mccic_notify_bound
,
1829 .unbind
= mccic_notify_unbind
,
1830 .complete
= mccic_notify_complete
,
1833 int mccic_register(struct mcam_camera
*cam
)
1835 struct clk_init_data mclk_init
= { };
1839 * Validate the requested buffer mode.
1841 if (buffer_mode
>= 0)
1842 cam
->buffer_mode
= buffer_mode
;
1843 if (cam
->buffer_mode
== B_DMA_sg
&&
1844 cam
->chip_id
== MCAM_CAFE
) {
1845 printk(KERN_ERR
"marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1846 cam
->buffer_mode
= B_vmalloc
;
1849 if (!mcam_buffer_mode_supported(cam
->buffer_mode
)) {
1850 printk(KERN_ERR
"marvell-cam: buffer mode %d unsupported\n",
1859 ret
= v4l2_device_register(cam
->dev
, &cam
->v4l2_dev
);
1863 mutex_init(&cam
->s_mutex
);
1864 cam
->state
= S_NOTREADY
;
1865 mcam_set_config_needed(cam
, 1);
1866 cam
->pix_format
= mcam_def_pix_format
;
1867 cam
->mbus_code
= mcam_def_mbus_code
;
1870 * Register sensor notifier.
1872 v4l2_async_notifier_init(&cam
->notifier
);
1873 ret
= v4l2_async_notifier_add_subdev(&cam
->notifier
, &cam
->asd
);
1875 cam_warn(cam
, "failed to add subdev to a notifier");
1879 cam
->notifier
.ops
= &mccic_notify_ops
;
1880 ret
= v4l2_async_notifier_register(&cam
->v4l2_dev
, &cam
->notifier
);
1882 cam_warn(cam
, "failed to register a sensor notifier");
1887 * Register sensor master clock.
1889 mclk_init
.parent_names
= NULL
;
1890 mclk_init
.num_parents
= 0;
1891 mclk_init
.ops
= &mclk_ops
;
1892 mclk_init
.name
= "mclk";
1894 of_property_read_string(cam
->dev
->of_node
, "clock-output-names",
1897 cam
->mclk_hw
.init
= &mclk_init
;
1899 cam
->mclk
= devm_clk_register(cam
->dev
, &cam
->mclk_hw
);
1900 if (IS_ERR(cam
->mclk
)) {
1901 ret
= PTR_ERR(cam
->mclk
);
1902 dev_err(cam
->dev
, "can't register clock\n");
1907 * If so requested, try to get our DMA buffers now.
1909 if (cam
->buffer_mode
== B_vmalloc
&& !alloc_bufs_at_read
) {
1910 if (mcam_alloc_dma_bufs(cam
, 1))
1911 cam_warn(cam
, "Unable to alloc DMA buffers at load will try again later.");
1917 v4l2_async_notifier_unregister(&cam
->notifier
);
1918 v4l2_device_unregister(&cam
->v4l2_dev
);
1919 v4l2_async_notifier_cleanup(&cam
->notifier
);
1922 EXPORT_SYMBOL_GPL(mccic_register
);
1924 void mccic_shutdown(struct mcam_camera
*cam
)
1927 * If we have no users (and we really, really should have no
1928 * users) the device will already be powered down. Trying to
1929 * take it down again will wedge the machine, which is frowned
1932 if (!list_empty(&cam
->vdev
.fh_list
)) {
1933 cam_warn(cam
, "Removing a device with users!\n");
1934 sensor_call(cam
, core
, s_power
, 0);
1936 if (cam
->buffer_mode
== B_vmalloc
)
1937 mcam_free_dma_bufs(cam
);
1938 v4l2_ctrl_handler_free(&cam
->ctrl_handler
);
1939 v4l2_async_notifier_unregister(&cam
->notifier
);
1940 v4l2_device_unregister(&cam
->v4l2_dev
);
1941 v4l2_async_notifier_cleanup(&cam
->notifier
);
1943 EXPORT_SYMBOL_GPL(mccic_shutdown
);
1948 void mccic_suspend(struct mcam_camera
*cam
)
1950 mutex_lock(&cam
->s_mutex
);
1951 if (!list_empty(&cam
->vdev
.fh_list
)) {
1952 enum mcam_state cstate
= cam
->state
;
1954 mcam_ctlr_stop_dma(cam
);
1955 sensor_call(cam
, core
, s_power
, 0);
1956 cam
->state
= cstate
;
1958 mutex_unlock(&cam
->s_mutex
);
1960 EXPORT_SYMBOL_GPL(mccic_suspend
);
1962 int mccic_resume(struct mcam_camera
*cam
)
1966 mutex_lock(&cam
->s_mutex
);
1967 if (!list_empty(&cam
->vdev
.fh_list
)) {
1968 ret
= sensor_call(cam
, core
, s_power
, 1);
1970 mutex_unlock(&cam
->s_mutex
);
1973 __mcam_cam_reset(cam
);
1975 sensor_call(cam
, core
, s_power
, 0);
1977 mutex_unlock(&cam
->s_mutex
);
1979 set_bit(CF_CONFIG_NEEDED
, &cam
->flags
);
1980 if (cam
->state
== S_STREAMING
) {
1982 * If there was a buffer in the DMA engine at suspend
1983 * time, put it back on the queue or we'll forget about it.
1985 if (cam
->buffer_mode
== B_DMA_sg
&& cam
->vb_bufs
[0])
1986 list_add(&cam
->vb_bufs
[0]->queue
, &cam
->buffers
);
1987 ret
= mcam_read_setup(cam
);
1991 EXPORT_SYMBOL_GPL(mccic_resume
);
1993 MODULE_LICENSE("GPL v2");
1994 MODULE_AUTHOR("Jonathan Corbet <corbet@lwn.net>");