2 * vsp1_video.c -- R-Car VSP1 Video Node
4 * Copyright (C) 2013-2015 Renesas Electronics Corporation
6 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/mutex.h>
17 #include <linux/slab.h>
18 #include <linux/v4l2-mediabus.h>
19 #include <linux/videodev2.h>
20 #include <linux/wait.h>
22 #include <media/media-entity.h>
23 #include <media/v4l2-dev.h>
24 #include <media/v4l2-fh.h>
25 #include <media/v4l2-ioctl.h>
26 #include <media/v4l2-subdev.h>
27 #include <media/videobuf2-v4l2.h>
28 #include <media/videobuf2-dma-contig.h>
32 #include "vsp1_entity.h"
33 #include "vsp1_pipe.h"
34 #include "vsp1_rwpf.h"
36 #include "vsp1_video.h"
38 #define VSP1_VIDEO_DEF_FORMAT V4L2_PIX_FMT_YUYV
39 #define VSP1_VIDEO_DEF_WIDTH 1024
40 #define VSP1_VIDEO_DEF_HEIGHT 768
42 #define VSP1_VIDEO_MIN_WIDTH 2U
43 #define VSP1_VIDEO_MAX_WIDTH 8190U
44 #define VSP1_VIDEO_MIN_HEIGHT 2U
45 #define VSP1_VIDEO_MAX_HEIGHT 8190U
47 /* -----------------------------------------------------------------------------
51 static struct v4l2_subdev
*
52 vsp1_video_remote_subdev(struct media_pad
*local
, u32
*pad
)
54 struct media_pad
*remote
;
56 remote
= media_entity_remote_pad(local
);
57 if (!remote
|| !is_media_entity_v4l2_subdev(remote
->entity
))
63 return media_entity_to_v4l2_subdev(remote
->entity
);
66 static int vsp1_video_verify_format(struct vsp1_video
*video
)
68 struct v4l2_subdev_format fmt
;
69 struct v4l2_subdev
*subdev
;
72 subdev
= vsp1_video_remote_subdev(&video
->pad
, &fmt
.pad
);
76 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
77 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
79 return ret
== -ENOIOCTLCMD
? -EINVAL
: ret
;
81 if (video
->rwpf
->fmtinfo
->mbus
!= fmt
.format
.code
||
82 video
->rwpf
->format
.height
!= fmt
.format
.height
||
83 video
->rwpf
->format
.width
!= fmt
.format
.width
)
89 static int __vsp1_video_try_format(struct vsp1_video
*video
,
90 struct v4l2_pix_format_mplane
*pix
,
91 const struct vsp1_format_info
**fmtinfo
)
93 static const u32 xrgb_formats
[][2] = {
94 { V4L2_PIX_FMT_RGB444
, V4L2_PIX_FMT_XRGB444
},
95 { V4L2_PIX_FMT_RGB555
, V4L2_PIX_FMT_XRGB555
},
96 { V4L2_PIX_FMT_BGR32
, V4L2_PIX_FMT_XBGR32
},
97 { V4L2_PIX_FMT_RGB32
, V4L2_PIX_FMT_XRGB32
},
100 const struct vsp1_format_info
*info
;
101 unsigned int width
= pix
->width
;
102 unsigned int height
= pix
->height
;
105 /* Backward compatibility: replace deprecated RGB formats by their XRGB
106 * equivalent. This selects the format older userspace applications want
107 * while still exposing the new format.
109 for (i
= 0; i
< ARRAY_SIZE(xrgb_formats
); ++i
) {
110 if (xrgb_formats
[i
][0] == pix
->pixelformat
) {
111 pix
->pixelformat
= xrgb_formats
[i
][1];
116 /* Retrieve format information and select the default format if the
117 * requested format isn't supported.
119 info
= vsp1_get_format_info(pix
->pixelformat
);
121 info
= vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT
);
123 pix
->pixelformat
= info
->fourcc
;
124 pix
->colorspace
= V4L2_COLORSPACE_SRGB
;
125 pix
->field
= V4L2_FIELD_NONE
;
126 memset(pix
->reserved
, 0, sizeof(pix
->reserved
));
128 /* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
129 width
= round_down(width
, info
->hsub
);
130 height
= round_down(height
, info
->vsub
);
132 /* Clamp the width and height. */
133 pix
->width
= clamp(width
, VSP1_VIDEO_MIN_WIDTH
, VSP1_VIDEO_MAX_WIDTH
);
134 pix
->height
= clamp(height
, VSP1_VIDEO_MIN_HEIGHT
,
135 VSP1_VIDEO_MAX_HEIGHT
);
137 /* Compute and clamp the stride and image size. While not documented in
138 * the datasheet, strides not aligned to a multiple of 128 bytes result
139 * in image corruption.
141 for (i
= 0; i
< min(info
->planes
, 2U); ++i
) {
142 unsigned int hsub
= i
> 0 ? info
->hsub
: 1;
143 unsigned int vsub
= i
> 0 ? info
->vsub
: 1;
144 unsigned int align
= 128;
147 bpl
= clamp_t(unsigned int, pix
->plane_fmt
[i
].bytesperline
,
148 pix
->width
/ hsub
* info
->bpp
[i
] / 8,
149 round_down(65535U, align
));
151 pix
->plane_fmt
[i
].bytesperline
= round_up(bpl
, align
);
152 pix
->plane_fmt
[i
].sizeimage
= pix
->plane_fmt
[i
].bytesperline
153 * pix
->height
/ vsub
;
156 if (info
->planes
== 3) {
157 /* The second and third planes must have the same stride. */
158 pix
->plane_fmt
[2].bytesperline
= pix
->plane_fmt
[1].bytesperline
;
159 pix
->plane_fmt
[2].sizeimage
= pix
->plane_fmt
[1].sizeimage
;
162 pix
->num_planes
= info
->planes
;
170 /* -----------------------------------------------------------------------------
171 * Pipeline Management
174 static int vsp1_video_pipeline_validate_branch(struct vsp1_pipeline
*pipe
,
175 struct vsp1_rwpf
*input
,
176 struct vsp1_rwpf
*output
)
178 struct vsp1_entity
*entity
;
179 struct media_entity_enum ent_enum
;
180 struct media_pad
*pad
;
182 bool bru_found
= false;
184 input
->location
.left
= 0;
185 input
->location
.top
= 0;
187 rval
= media_entity_enum_init(
188 &ent_enum
, input
->entity
.pads
[RWPF_PAD_SOURCE
].graph_obj
.mdev
);
192 pad
= media_entity_remote_pad(&input
->entity
.pads
[RWPF_PAD_SOURCE
]);
200 /* We've reached a video node, that shouldn't have happened. */
201 if (!is_media_entity_v4l2_subdev(pad
->entity
)) {
206 entity
= to_vsp1_entity(
207 media_entity_to_v4l2_subdev(pad
->entity
));
209 /* A BRU is present in the pipeline, store the compose rectangle
210 * location in the input RPF for use when configuring the RPF.
212 if (entity
->type
== VSP1_ENTITY_BRU
) {
213 struct vsp1_bru
*bru
= to_bru(&entity
->subdev
);
214 struct v4l2_rect
*rect
=
215 &bru
->inputs
[pad
->index
].compose
;
217 bru
->inputs
[pad
->index
].rpf
= input
;
219 input
->location
.left
= rect
->left
;
220 input
->location
.top
= rect
->top
;
225 /* We've reached the WPF, we're done. */
226 if (entity
->type
== VSP1_ENTITY_WPF
)
229 /* Ensure the branch has no loop. */
230 if (media_entity_enum_test_and_set(&ent_enum
,
231 &entity
->subdev
.entity
)) {
236 /* UDS can't be chained. */
237 if (entity
->type
== VSP1_ENTITY_UDS
) {
244 pipe
->uds_input
= bru_found
? pipe
->bru
248 /* Follow the source link. The link setup operations ensure
249 * that the output fan-out can't be more than one, there is thus
250 * no need to verify here that only a single source link is
253 pad
= &entity
->pads
[entity
->source_pad
];
254 pad
= media_entity_remote_pad(pad
);
257 /* The last entity must be the output WPF. */
258 if (entity
!= &output
->entity
)
262 media_entity_enum_cleanup(&ent_enum
);
267 static int vsp1_video_pipeline_validate(struct vsp1_pipeline
*pipe
,
268 struct vsp1_video
*video
)
270 struct media_entity_graph graph
;
271 struct media_entity
*entity
= &video
->video
.entity
;
272 struct media_device
*mdev
= entity
->graph_obj
.mdev
;
276 mutex_lock(&mdev
->graph_mutex
);
278 /* Walk the graph to locate the entities and video nodes. */
279 ret
= media_entity_graph_walk_init(&graph
, mdev
);
281 mutex_unlock(&mdev
->graph_mutex
);
285 media_entity_graph_walk_start(&graph
, entity
);
287 while ((entity
= media_entity_graph_walk_next(&graph
))) {
288 struct v4l2_subdev
*subdev
;
289 struct vsp1_rwpf
*rwpf
;
290 struct vsp1_entity
*e
;
292 if (!is_media_entity_v4l2_subdev(entity
))
295 subdev
= media_entity_to_v4l2_subdev(entity
);
296 e
= to_vsp1_entity(subdev
);
297 list_add_tail(&e
->list_pipe
, &pipe
->entities
);
299 if (e
->type
== VSP1_ENTITY_RPF
) {
300 rwpf
= to_rwpf(subdev
);
301 pipe
->inputs
[rwpf
->entity
.index
] = rwpf
;
302 rwpf
->video
->pipe_index
= ++pipe
->num_inputs
;
303 } else if (e
->type
== VSP1_ENTITY_WPF
) {
304 rwpf
= to_rwpf(subdev
);
306 rwpf
->video
->pipe_index
= 0;
307 } else if (e
->type
== VSP1_ENTITY_LIF
) {
309 } else if (e
->type
== VSP1_ENTITY_BRU
) {
314 mutex_unlock(&mdev
->graph_mutex
);
316 media_entity_graph_walk_cleanup(&graph
);
318 /* We need one output and at least one input. */
319 if (pipe
->num_inputs
== 0 || !pipe
->output
) {
324 /* Follow links downstream for each input and make sure the graph
325 * contains no loop and that all branches end at the output WPF.
327 for (i
= 0; i
< video
->vsp1
->info
->rpf_count
; ++i
) {
328 if (!pipe
->inputs
[i
])
331 ret
= vsp1_video_pipeline_validate_branch(pipe
, pipe
->inputs
[i
],
340 vsp1_pipeline_reset(pipe
);
344 static int vsp1_video_pipeline_init(struct vsp1_pipeline
*pipe
,
345 struct vsp1_video
*video
)
349 mutex_lock(&pipe
->lock
);
351 /* If we're the first user validate and initialize the pipeline. */
352 if (pipe
->use_count
== 0) {
353 ret
= vsp1_video_pipeline_validate(pipe
, video
);
362 mutex_unlock(&pipe
->lock
);
366 static void vsp1_video_pipeline_cleanup(struct vsp1_pipeline
*pipe
)
368 mutex_lock(&pipe
->lock
);
370 /* If we're the last user clean up the pipeline. */
371 if (--pipe
->use_count
== 0)
372 vsp1_pipeline_reset(pipe
);
374 mutex_unlock(&pipe
->lock
);
378 * vsp1_video_complete_buffer - Complete the current buffer
379 * @video: the video node
381 * This function completes the current buffer by filling its sequence number,
382 * time stamp and payload size, and hands it back to the videobuf core.
384 * When operating in DU output mode (deep pipeline to the DU through the LIF),
385 * the VSP1 needs to constantly supply frames to the display. In that case, if
386 * no other buffer is queued, reuse the one that has just been processed instead
387 * of handing it back to the videobuf core.
389 * Return the next queued buffer or NULL if the queue is empty.
391 static struct vsp1_vb2_buffer
*
392 vsp1_video_complete_buffer(struct vsp1_video
*video
)
394 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
395 struct vsp1_vb2_buffer
*next
= NULL
;
396 struct vsp1_vb2_buffer
*done
;
400 spin_lock_irqsave(&video
->irqlock
, flags
);
402 if (list_empty(&video
->irqqueue
)) {
403 spin_unlock_irqrestore(&video
->irqlock
, flags
);
407 done
= list_first_entry(&video
->irqqueue
,
408 struct vsp1_vb2_buffer
, queue
);
410 /* In DU output mode reuse the buffer if the list is singular. */
411 if (pipe
->lif
&& list_is_singular(&video
->irqqueue
)) {
412 spin_unlock_irqrestore(&video
->irqlock
, flags
);
416 list_del(&done
->queue
);
418 if (!list_empty(&video
->irqqueue
))
419 next
= list_first_entry(&video
->irqqueue
,
420 struct vsp1_vb2_buffer
, queue
);
422 spin_unlock_irqrestore(&video
->irqlock
, flags
);
424 done
->buf
.sequence
= video
->sequence
++;
425 done
->buf
.vb2_buf
.timestamp
= ktime_get_ns();
426 for (i
= 0; i
< done
->buf
.vb2_buf
.num_planes
; ++i
)
427 vb2_set_plane_payload(&done
->buf
.vb2_buf
, i
,
428 done
->mem
.length
[i
]);
429 vb2_buffer_done(&done
->buf
.vb2_buf
, VB2_BUF_STATE_DONE
);
434 static void vsp1_video_frame_end(struct vsp1_pipeline
*pipe
,
435 struct vsp1_rwpf
*rwpf
)
437 struct vsp1_video
*video
= rwpf
->video
;
438 struct vsp1_vb2_buffer
*buf
;
441 buf
= vsp1_video_complete_buffer(video
);
445 spin_lock_irqsave(&pipe
->irqlock
, flags
);
447 video
->rwpf
->ops
->set_memory(video
->rwpf
, &buf
->mem
);
448 pipe
->buffers_ready
|= 1 << video
->pipe_index
;
450 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
453 static void vsp1_video_pipeline_frame_end(struct vsp1_pipeline
*pipe
)
455 struct vsp1_device
*vsp1
= pipe
->output
->entity
.vsp1
;
458 /* Complete buffers on all video nodes. */
459 for (i
= 0; i
< vsp1
->info
->rpf_count
; ++i
) {
460 if (!pipe
->inputs
[i
])
463 vsp1_video_frame_end(pipe
, pipe
->inputs
[i
]);
467 vsp1_video_frame_end(pipe
, pipe
->output
);
470 /* -----------------------------------------------------------------------------
471 * videobuf2 Queue Operations
475 vsp1_video_queue_setup(struct vb2_queue
*vq
,
476 unsigned int *nbuffers
, unsigned int *nplanes
,
477 unsigned int sizes
[], void *alloc_ctxs
[])
479 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
480 const struct v4l2_pix_format_mplane
*format
= &video
->rwpf
->format
;
484 if (*nplanes
!= format
->num_planes
)
487 for (i
= 0; i
< *nplanes
; i
++) {
488 if (sizes
[i
] < format
->plane_fmt
[i
].sizeimage
)
490 alloc_ctxs
[i
] = video
->alloc_ctx
;
495 *nplanes
= format
->num_planes
;
497 for (i
= 0; i
< format
->num_planes
; ++i
) {
498 sizes
[i
] = format
->plane_fmt
[i
].sizeimage
;
499 alloc_ctxs
[i
] = video
->alloc_ctx
;
505 static int vsp1_video_buffer_prepare(struct vb2_buffer
*vb
)
507 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
508 struct vsp1_video
*video
= vb2_get_drv_priv(vb
->vb2_queue
);
509 struct vsp1_vb2_buffer
*buf
= to_vsp1_vb2_buffer(vbuf
);
510 const struct v4l2_pix_format_mplane
*format
= &video
->rwpf
->format
;
513 if (vb
->num_planes
< format
->num_planes
)
516 buf
->mem
.num_planes
= vb
->num_planes
;
518 for (i
= 0; i
< vb
->num_planes
; ++i
) {
519 buf
->mem
.addr
[i
] = vb2_dma_contig_plane_dma_addr(vb
, i
);
520 buf
->mem
.length
[i
] = vb2_plane_size(vb
, i
);
522 if (buf
->mem
.length
[i
] < format
->plane_fmt
[i
].sizeimage
)
529 static void vsp1_video_buffer_queue(struct vb2_buffer
*vb
)
531 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
532 struct vsp1_video
*video
= vb2_get_drv_priv(vb
->vb2_queue
);
533 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
534 struct vsp1_vb2_buffer
*buf
= to_vsp1_vb2_buffer(vbuf
);
538 spin_lock_irqsave(&video
->irqlock
, flags
);
539 empty
= list_empty(&video
->irqqueue
);
540 list_add_tail(&buf
->queue
, &video
->irqqueue
);
541 spin_unlock_irqrestore(&video
->irqlock
, flags
);
546 spin_lock_irqsave(&pipe
->irqlock
, flags
);
548 video
->rwpf
->ops
->set_memory(video
->rwpf
, &buf
->mem
);
549 pipe
->buffers_ready
|= 1 << video
->pipe_index
;
551 if (vb2_is_streaming(&video
->queue
) &&
552 vsp1_pipeline_ready(pipe
))
553 vsp1_pipeline_run(pipe
);
555 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
558 static int vsp1_video_start_streaming(struct vb2_queue
*vq
, unsigned int count
)
560 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
561 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
562 struct vsp1_entity
*entity
;
566 mutex_lock(&pipe
->lock
);
567 if (pipe
->stream_count
== pipe
->num_inputs
) {
569 struct vsp1_uds
*uds
= to_uds(&pipe
->uds
->subdev
);
571 /* If a BRU is present in the pipeline before the UDS,
572 * the alpha component doesn't need to be scaled as the
573 * BRU output alpha value is fixed to 255. Otherwise we
574 * need to scale the alpha component only when available
577 if (pipe
->uds_input
->type
== VSP1_ENTITY_BRU
) {
578 uds
->scale_alpha
= false;
580 struct vsp1_rwpf
*rpf
=
581 to_rwpf(&pipe
->uds_input
->subdev
);
583 uds
->scale_alpha
= rpf
->fmtinfo
->alpha
;
587 list_for_each_entry(entity
, &pipe
->entities
, list_pipe
) {
588 vsp1_entity_route_setup(entity
);
590 ret
= v4l2_subdev_call(&entity
->subdev
, video
,
593 mutex_unlock(&pipe
->lock
);
599 pipe
->stream_count
++;
600 mutex_unlock(&pipe
->lock
);
602 spin_lock_irqsave(&pipe
->irqlock
, flags
);
603 if (vsp1_pipeline_ready(pipe
))
604 vsp1_pipeline_run(pipe
);
605 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
610 static void vsp1_video_stop_streaming(struct vb2_queue
*vq
)
612 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
613 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
614 struct vsp1_vb2_buffer
*buffer
;
618 mutex_lock(&pipe
->lock
);
619 if (--pipe
->stream_count
== 0) {
620 /* Stop the pipeline. */
621 ret
= vsp1_pipeline_stop(pipe
);
622 if (ret
== -ETIMEDOUT
)
623 dev_err(video
->vsp1
->dev
, "pipeline stop timeout\n");
625 mutex_unlock(&pipe
->lock
);
627 vsp1_video_pipeline_cleanup(pipe
);
628 media_entity_pipeline_stop(&video
->video
.entity
);
630 /* Remove all buffers from the IRQ queue. */
631 spin_lock_irqsave(&video
->irqlock
, flags
);
632 list_for_each_entry(buffer
, &video
->irqqueue
, queue
)
633 vb2_buffer_done(&buffer
->buf
.vb2_buf
, VB2_BUF_STATE_ERROR
);
634 INIT_LIST_HEAD(&video
->irqqueue
);
635 spin_unlock_irqrestore(&video
->irqlock
, flags
);
638 static struct vb2_ops vsp1_video_queue_qops
= {
639 .queue_setup
= vsp1_video_queue_setup
,
640 .buf_prepare
= vsp1_video_buffer_prepare
,
641 .buf_queue
= vsp1_video_buffer_queue
,
642 .wait_prepare
= vb2_ops_wait_prepare
,
643 .wait_finish
= vb2_ops_wait_finish
,
644 .start_streaming
= vsp1_video_start_streaming
,
645 .stop_streaming
= vsp1_video_stop_streaming
,
648 /* -----------------------------------------------------------------------------
653 vsp1_video_querycap(struct file
*file
, void *fh
, struct v4l2_capability
*cap
)
655 struct v4l2_fh
*vfh
= file
->private_data
;
656 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
658 cap
->capabilities
= V4L2_CAP_DEVICE_CAPS
| V4L2_CAP_STREAMING
659 | V4L2_CAP_VIDEO_CAPTURE_MPLANE
660 | V4L2_CAP_VIDEO_OUTPUT_MPLANE
;
662 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
663 cap
->device_caps
= V4L2_CAP_VIDEO_CAPTURE_MPLANE
664 | V4L2_CAP_STREAMING
;
666 cap
->device_caps
= V4L2_CAP_VIDEO_OUTPUT_MPLANE
667 | V4L2_CAP_STREAMING
;
669 strlcpy(cap
->driver
, "vsp1", sizeof(cap
->driver
));
670 strlcpy(cap
->card
, video
->video
.name
, sizeof(cap
->card
));
671 snprintf(cap
->bus_info
, sizeof(cap
->bus_info
), "platform:%s",
672 dev_name(video
->vsp1
->dev
));
678 vsp1_video_get_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
680 struct v4l2_fh
*vfh
= file
->private_data
;
681 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
683 if (format
->type
!= video
->queue
.type
)
686 mutex_lock(&video
->lock
);
687 format
->fmt
.pix_mp
= video
->rwpf
->format
;
688 mutex_unlock(&video
->lock
);
694 vsp1_video_try_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
696 struct v4l2_fh
*vfh
= file
->private_data
;
697 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
699 if (format
->type
!= video
->queue
.type
)
702 return __vsp1_video_try_format(video
, &format
->fmt
.pix_mp
, NULL
);
706 vsp1_video_set_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
708 struct v4l2_fh
*vfh
= file
->private_data
;
709 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
710 const struct vsp1_format_info
*info
;
713 if (format
->type
!= video
->queue
.type
)
716 ret
= __vsp1_video_try_format(video
, &format
->fmt
.pix_mp
, &info
);
720 mutex_lock(&video
->lock
);
722 if (vb2_is_busy(&video
->queue
)) {
727 video
->rwpf
->format
= format
->fmt
.pix_mp
;
728 video
->rwpf
->fmtinfo
= info
;
731 mutex_unlock(&video
->lock
);
736 vsp1_video_streamon(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
738 struct v4l2_fh
*vfh
= file
->private_data
;
739 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
740 struct vsp1_pipeline
*pipe
;
743 if (video
->queue
.owner
&& video
->queue
.owner
!= file
->private_data
)
748 /* Start streaming on the pipeline. No link touching an entity in the
749 * pipeline can be activated or deactivated once streaming is started.
751 * Use the VSP1 pipeline object embedded in the first video object that
754 pipe
= video
->video
.entity
.pipe
755 ? to_vsp1_pipeline(&video
->video
.entity
) : &video
->pipe
;
757 ret
= media_entity_pipeline_start(&video
->video
.entity
, &pipe
->pipe
);
761 /* Verify that the configured format matches the output of the connected
764 ret
= vsp1_video_verify_format(video
);
768 ret
= vsp1_video_pipeline_init(pipe
, video
);
772 /* Start the queue. */
773 ret
= vb2_streamon(&video
->queue
, type
);
780 vsp1_video_pipeline_cleanup(pipe
);
782 media_entity_pipeline_stop(&video
->video
.entity
);
786 static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops
= {
787 .vidioc_querycap
= vsp1_video_querycap
,
788 .vidioc_g_fmt_vid_cap_mplane
= vsp1_video_get_format
,
789 .vidioc_s_fmt_vid_cap_mplane
= vsp1_video_set_format
,
790 .vidioc_try_fmt_vid_cap_mplane
= vsp1_video_try_format
,
791 .vidioc_g_fmt_vid_out_mplane
= vsp1_video_get_format
,
792 .vidioc_s_fmt_vid_out_mplane
= vsp1_video_set_format
,
793 .vidioc_try_fmt_vid_out_mplane
= vsp1_video_try_format
,
794 .vidioc_reqbufs
= vb2_ioctl_reqbufs
,
795 .vidioc_querybuf
= vb2_ioctl_querybuf
,
796 .vidioc_qbuf
= vb2_ioctl_qbuf
,
797 .vidioc_dqbuf
= vb2_ioctl_dqbuf
,
798 .vidioc_create_bufs
= vb2_ioctl_create_bufs
,
799 .vidioc_prepare_buf
= vb2_ioctl_prepare_buf
,
800 .vidioc_streamon
= vsp1_video_streamon
,
801 .vidioc_streamoff
= vb2_ioctl_streamoff
,
804 /* -----------------------------------------------------------------------------
805 * V4L2 File Operations
808 static int vsp1_video_open(struct file
*file
)
810 struct vsp1_video
*video
= video_drvdata(file
);
814 vfh
= kzalloc(sizeof(*vfh
), GFP_KERNEL
);
818 v4l2_fh_init(vfh
, &video
->video
);
821 file
->private_data
= vfh
;
823 ret
= vsp1_device_get(video
->vsp1
);
832 static int vsp1_video_release(struct file
*file
)
834 struct vsp1_video
*video
= video_drvdata(file
);
835 struct v4l2_fh
*vfh
= file
->private_data
;
837 mutex_lock(&video
->lock
);
838 if (video
->queue
.owner
== vfh
) {
839 vb2_queue_release(&video
->queue
);
840 video
->queue
.owner
= NULL
;
842 mutex_unlock(&video
->lock
);
844 vsp1_device_put(video
->vsp1
);
846 v4l2_fh_release(file
);
848 file
->private_data
= NULL
;
853 static struct v4l2_file_operations vsp1_video_fops
= {
854 .owner
= THIS_MODULE
,
855 .unlocked_ioctl
= video_ioctl2
,
856 .open
= vsp1_video_open
,
857 .release
= vsp1_video_release
,
858 .poll
= vb2_fop_poll
,
859 .mmap
= vb2_fop_mmap
,
862 /* -----------------------------------------------------------------------------
863 * Initialization and Cleanup
866 struct vsp1_video
*vsp1_video_create(struct vsp1_device
*vsp1
,
867 struct vsp1_rwpf
*rwpf
)
869 struct vsp1_video
*video
;
870 const char *direction
;
873 video
= devm_kzalloc(vsp1
->dev
, sizeof(*video
), GFP_KERNEL
);
875 return ERR_PTR(-ENOMEM
);
882 if (rwpf
->entity
.type
== VSP1_ENTITY_RPF
) {
884 video
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
885 video
->pad
.flags
= MEDIA_PAD_FL_SOURCE
;
886 video
->video
.vfl_dir
= VFL_DIR_TX
;
888 direction
= "output";
889 video
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
890 video
->pad
.flags
= MEDIA_PAD_FL_SINK
;
891 video
->video
.vfl_dir
= VFL_DIR_RX
;
894 mutex_init(&video
->lock
);
895 spin_lock_init(&video
->irqlock
);
896 INIT_LIST_HEAD(&video
->irqqueue
);
898 vsp1_pipeline_init(&video
->pipe
);
899 video
->pipe
.frame_end
= vsp1_video_pipeline_frame_end
;
901 /* Initialize the media entity... */
902 ret
= media_entity_pads_init(&video
->video
.entity
, 1, &video
->pad
);
906 /* ... and the format ... */
907 rwpf
->fmtinfo
= vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT
);
908 rwpf
->format
.pixelformat
= rwpf
->fmtinfo
->fourcc
;
909 rwpf
->format
.colorspace
= V4L2_COLORSPACE_SRGB
;
910 rwpf
->format
.field
= V4L2_FIELD_NONE
;
911 rwpf
->format
.width
= VSP1_VIDEO_DEF_WIDTH
;
912 rwpf
->format
.height
= VSP1_VIDEO_DEF_HEIGHT
;
913 rwpf
->format
.num_planes
= 1;
914 rwpf
->format
.plane_fmt
[0].bytesperline
=
915 rwpf
->format
.width
* rwpf
->fmtinfo
->bpp
[0] / 8;
916 rwpf
->format
.plane_fmt
[0].sizeimage
=
917 rwpf
->format
.plane_fmt
[0].bytesperline
* rwpf
->format
.height
;
919 /* ... and the video node... */
920 video
->video
.v4l2_dev
= &video
->vsp1
->v4l2_dev
;
921 video
->video
.fops
= &vsp1_video_fops
;
922 snprintf(video
->video
.name
, sizeof(video
->video
.name
), "%s %s",
923 rwpf
->entity
.subdev
.name
, direction
);
924 video
->video
.vfl_type
= VFL_TYPE_GRABBER
;
925 video
->video
.release
= video_device_release_empty
;
926 video
->video
.ioctl_ops
= &vsp1_video_ioctl_ops
;
928 video_set_drvdata(&video
->video
, video
);
930 /* ... and the buffers queue... */
931 video
->alloc_ctx
= vb2_dma_contig_init_ctx(video
->vsp1
->dev
);
932 if (IS_ERR(video
->alloc_ctx
)) {
933 ret
= PTR_ERR(video
->alloc_ctx
);
937 video
->queue
.type
= video
->type
;
938 video
->queue
.io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
939 video
->queue
.lock
= &video
->lock
;
940 video
->queue
.drv_priv
= video
;
941 video
->queue
.buf_struct_size
= sizeof(struct vsp1_vb2_buffer
);
942 video
->queue
.ops
= &vsp1_video_queue_qops
;
943 video
->queue
.mem_ops
= &vb2_dma_contig_memops
;
944 video
->queue
.timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
945 ret
= vb2_queue_init(&video
->queue
);
947 dev_err(video
->vsp1
->dev
, "failed to initialize vb2 queue\n");
951 /* ... and register the video device. */
952 video
->video
.queue
= &video
->queue
;
953 ret
= video_register_device(&video
->video
, VFL_TYPE_GRABBER
, -1);
955 dev_err(video
->vsp1
->dev
, "failed to register video device\n");
962 vb2_dma_contig_cleanup_ctx(video
->alloc_ctx
);
963 vsp1_video_cleanup(video
);
967 void vsp1_video_cleanup(struct vsp1_video
*video
)
969 if (video_is_registered(&video
->video
))
970 video_unregister_device(&video
->video
);
972 vb2_dma_contig_cleanup_ctx(video
->alloc_ctx
);
973 media_entity_cleanup(&video
->video
.entity
);