2 * vsp1_video.c -- R-Car VSP1 Video Node
4 * Copyright (C) 2013-2015 Renesas Electronics Corporation
6 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/mutex.h>
17 #include <linux/sched.h>
18 #include <linux/slab.h>
19 #include <linux/v4l2-mediabus.h>
20 #include <linux/videodev2.h>
22 #include <media/media-entity.h>
23 #include <media/v4l2-dev.h>
24 #include <media/v4l2-fh.h>
25 #include <media/v4l2-ioctl.h>
26 #include <media/v4l2-subdev.h>
27 #include <media/videobuf2-v4l2.h>
28 #include <media/videobuf2-dma-contig.h>
32 #include "vsp1_entity.h"
33 #include "vsp1_rwpf.h"
35 #include "vsp1_video.h"
37 #define VSP1_VIDEO_DEF_FORMAT V4L2_PIX_FMT_YUYV
38 #define VSP1_VIDEO_DEF_WIDTH 1024
39 #define VSP1_VIDEO_DEF_HEIGHT 768
41 #define VSP1_VIDEO_MIN_WIDTH 2U
42 #define VSP1_VIDEO_MAX_WIDTH 8190U
43 #define VSP1_VIDEO_MIN_HEIGHT 2U
44 #define VSP1_VIDEO_MAX_HEIGHT 8190U
46 /* -----------------------------------------------------------------------------
50 static const struct vsp1_format_info vsp1_video_formats
[] = {
51 { V4L2_PIX_FMT_RGB332
, MEDIA_BUS_FMT_ARGB8888_1X32
,
52 VI6_FMT_RGB_332
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
53 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
54 1, { 8, 0, 0 }, false, false, 1, 1, false },
55 { V4L2_PIX_FMT_ARGB444
, MEDIA_BUS_FMT_ARGB8888_1X32
,
56 VI6_FMT_ARGB_4444
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
58 1, { 16, 0, 0 }, false, false, 1, 1, true },
59 { V4L2_PIX_FMT_XRGB444
, MEDIA_BUS_FMT_ARGB8888_1X32
,
60 VI6_FMT_XRGB_4444
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
62 1, { 16, 0, 0 }, false, false, 1, 1, true },
63 { V4L2_PIX_FMT_ARGB555
, MEDIA_BUS_FMT_ARGB8888_1X32
,
64 VI6_FMT_ARGB_1555
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
66 1, { 16, 0, 0 }, false, false, 1, 1, true },
67 { V4L2_PIX_FMT_XRGB555
, MEDIA_BUS_FMT_ARGB8888_1X32
,
68 VI6_FMT_XRGB_1555
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
70 1, { 16, 0, 0 }, false, false, 1, 1, false },
71 { V4L2_PIX_FMT_RGB565
, MEDIA_BUS_FMT_ARGB8888_1X32
,
72 VI6_FMT_RGB_565
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
74 1, { 16, 0, 0 }, false, false, 1, 1, false },
75 { V4L2_PIX_FMT_BGR24
, MEDIA_BUS_FMT_ARGB8888_1X32
,
76 VI6_FMT_BGR_888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
77 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
78 1, { 24, 0, 0 }, false, false, 1, 1, false },
79 { V4L2_PIX_FMT_RGB24
, MEDIA_BUS_FMT_ARGB8888_1X32
,
80 VI6_FMT_RGB_888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
81 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
82 1, { 24, 0, 0 }, false, false, 1, 1, false },
83 { V4L2_PIX_FMT_ABGR32
, MEDIA_BUS_FMT_ARGB8888_1X32
,
84 VI6_FMT_ARGB_8888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
,
85 1, { 32, 0, 0 }, false, false, 1, 1, true },
86 { V4L2_PIX_FMT_XBGR32
, MEDIA_BUS_FMT_ARGB8888_1X32
,
87 VI6_FMT_ARGB_8888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
,
88 1, { 32, 0, 0 }, false, false, 1, 1, false },
89 { V4L2_PIX_FMT_ARGB32
, MEDIA_BUS_FMT_ARGB8888_1X32
,
90 VI6_FMT_ARGB_8888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
91 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
92 1, { 32, 0, 0 }, false, false, 1, 1, true },
93 { V4L2_PIX_FMT_XRGB32
, MEDIA_BUS_FMT_ARGB8888_1X32
,
94 VI6_FMT_ARGB_8888
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
95 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
96 1, { 32, 0, 0 }, false, false, 1, 1, false },
97 { V4L2_PIX_FMT_UYVY
, MEDIA_BUS_FMT_AYUV8_1X32
,
98 VI6_FMT_YUYV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
99 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
100 1, { 16, 0, 0 }, false, false, 2, 1, false },
101 { V4L2_PIX_FMT_VYUY
, MEDIA_BUS_FMT_AYUV8_1X32
,
102 VI6_FMT_YUYV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
103 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
104 1, { 16, 0, 0 }, false, true, 2, 1, false },
105 { V4L2_PIX_FMT_YUYV
, MEDIA_BUS_FMT_AYUV8_1X32
,
106 VI6_FMT_YUYV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
107 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
108 1, { 16, 0, 0 }, true, false, 2, 1, false },
109 { V4L2_PIX_FMT_YVYU
, MEDIA_BUS_FMT_AYUV8_1X32
,
110 VI6_FMT_YUYV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
111 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
112 1, { 16, 0, 0 }, true, true, 2, 1, false },
113 { V4L2_PIX_FMT_NV12M
, MEDIA_BUS_FMT_AYUV8_1X32
,
114 VI6_FMT_Y_UV_420
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
115 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
116 2, { 8, 16, 0 }, false, false, 2, 2, false },
117 { V4L2_PIX_FMT_NV21M
, MEDIA_BUS_FMT_AYUV8_1X32
,
118 VI6_FMT_Y_UV_420
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
119 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
120 2, { 8, 16, 0 }, false, true, 2, 2, false },
121 { V4L2_PIX_FMT_NV16M
, MEDIA_BUS_FMT_AYUV8_1X32
,
122 VI6_FMT_Y_UV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
123 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
124 2, { 8, 16, 0 }, false, false, 2, 1, false },
125 { V4L2_PIX_FMT_NV61M
, MEDIA_BUS_FMT_AYUV8_1X32
,
126 VI6_FMT_Y_UV_422
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
127 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
128 2, { 8, 16, 0 }, false, true, 2, 1, false },
129 { V4L2_PIX_FMT_YUV420M
, MEDIA_BUS_FMT_AYUV8_1X32
,
130 VI6_FMT_Y_U_V_420
, VI6_RPF_DSWAP_P_LLS
| VI6_RPF_DSWAP_P_LWS
|
131 VI6_RPF_DSWAP_P_WDS
| VI6_RPF_DSWAP_P_BTS
,
132 3, { 8, 8, 8 }, false, false, 2, 2, false },
136 * vsp1_get_format_info - Retrieve format information for a 4CC
137 * @fourcc: the format 4CC
139 * Return a pointer to the format information structure corresponding to the
140 * given V4L2 format 4CC, or NULL if no corresponding format can be found.
142 static const struct vsp1_format_info
*vsp1_get_format_info(u32 fourcc
)
146 for (i
= 0; i
< ARRAY_SIZE(vsp1_video_formats
); ++i
) {
147 const struct vsp1_format_info
*info
= &vsp1_video_formats
[i
];
149 if (info
->fourcc
== fourcc
)
157 static struct v4l2_subdev
*
158 vsp1_video_remote_subdev(struct media_pad
*local
, u32
*pad
)
160 struct media_pad
*remote
;
162 remote
= media_entity_remote_pad(local
);
163 if (remote
== NULL
||
164 media_entity_type(remote
->entity
) != MEDIA_ENT_T_V4L2_SUBDEV
)
168 *pad
= remote
->index
;
170 return media_entity_to_v4l2_subdev(remote
->entity
);
173 static int vsp1_video_verify_format(struct vsp1_video
*video
)
175 struct v4l2_subdev_format fmt
;
176 struct v4l2_subdev
*subdev
;
179 subdev
= vsp1_video_remote_subdev(&video
->pad
, &fmt
.pad
);
183 fmt
.which
= V4L2_SUBDEV_FORMAT_ACTIVE
;
184 ret
= v4l2_subdev_call(subdev
, pad
, get_fmt
, NULL
, &fmt
);
186 return ret
== -ENOIOCTLCMD
? -EINVAL
: ret
;
188 if (video
->fmtinfo
->mbus
!= fmt
.format
.code
||
189 video
->format
.height
!= fmt
.format
.height
||
190 video
->format
.width
!= fmt
.format
.width
)
196 static int __vsp1_video_try_format(struct vsp1_video
*video
,
197 struct v4l2_pix_format_mplane
*pix
,
198 const struct vsp1_format_info
**fmtinfo
)
200 static const u32 xrgb_formats
[][2] = {
201 { V4L2_PIX_FMT_RGB444
, V4L2_PIX_FMT_XRGB444
},
202 { V4L2_PIX_FMT_RGB555
, V4L2_PIX_FMT_XRGB555
},
203 { V4L2_PIX_FMT_BGR32
, V4L2_PIX_FMT_XBGR32
},
204 { V4L2_PIX_FMT_RGB32
, V4L2_PIX_FMT_XRGB32
},
207 const struct vsp1_format_info
*info
;
208 unsigned int width
= pix
->width
;
209 unsigned int height
= pix
->height
;
212 /* Backward compatibility: replace deprecated RGB formats by their XRGB
213 * equivalent. This selects the format older userspace applications want
214 * while still exposing the new format.
216 for (i
= 0; i
< ARRAY_SIZE(xrgb_formats
); ++i
) {
217 if (xrgb_formats
[i
][0] == pix
->pixelformat
) {
218 pix
->pixelformat
= xrgb_formats
[i
][1];
223 /* Retrieve format information and select the default format if the
224 * requested format isn't supported.
226 info
= vsp1_get_format_info(pix
->pixelformat
);
228 info
= vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT
);
230 pix
->pixelformat
= info
->fourcc
;
231 pix
->colorspace
= V4L2_COLORSPACE_SRGB
;
232 pix
->field
= V4L2_FIELD_NONE
;
233 memset(pix
->reserved
, 0, sizeof(pix
->reserved
));
235 /* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
236 width
= round_down(width
, info
->hsub
);
237 height
= round_down(height
, info
->vsub
);
239 /* Clamp the width and height. */
240 pix
->width
= clamp(width
, VSP1_VIDEO_MIN_WIDTH
, VSP1_VIDEO_MAX_WIDTH
);
241 pix
->height
= clamp(height
, VSP1_VIDEO_MIN_HEIGHT
,
242 VSP1_VIDEO_MAX_HEIGHT
);
244 /* Compute and clamp the stride and image size. While not documented in
245 * the datasheet, strides not aligned to a multiple of 128 bytes result
246 * in image corruption.
248 for (i
= 0; i
< min(info
->planes
, 2U); ++i
) {
249 unsigned int hsub
= i
> 0 ? info
->hsub
: 1;
250 unsigned int vsub
= i
> 0 ? info
->vsub
: 1;
251 unsigned int align
= 128;
254 bpl
= clamp_t(unsigned int, pix
->plane_fmt
[i
].bytesperline
,
255 pix
->width
/ hsub
* info
->bpp
[i
] / 8,
256 round_down(65535U, align
));
258 pix
->plane_fmt
[i
].bytesperline
= round_up(bpl
, align
);
259 pix
->plane_fmt
[i
].sizeimage
= pix
->plane_fmt
[i
].bytesperline
260 * pix
->height
/ vsub
;
263 if (info
->planes
== 3) {
264 /* The second and third planes must have the same stride. */
265 pix
->plane_fmt
[2].bytesperline
= pix
->plane_fmt
[1].bytesperline
;
266 pix
->plane_fmt
[2].sizeimage
= pix
->plane_fmt
[1].sizeimage
;
269 pix
->num_planes
= info
->planes
;
278 vsp1_video_format_adjust(struct vsp1_video
*video
,
279 const struct v4l2_pix_format_mplane
*format
,
280 struct v4l2_pix_format_mplane
*adjust
)
285 __vsp1_video_try_format(video
, adjust
, NULL
);
287 if (format
->width
!= adjust
->width
||
288 format
->height
!= adjust
->height
||
289 format
->pixelformat
!= adjust
->pixelformat
||
290 format
->num_planes
!= adjust
->num_planes
)
293 for (i
= 0; i
< format
->num_planes
; ++i
) {
294 if (format
->plane_fmt
[i
].bytesperline
!=
295 adjust
->plane_fmt
[i
].bytesperline
)
298 adjust
->plane_fmt
[i
].sizeimage
=
299 max(adjust
->plane_fmt
[i
].sizeimage
,
300 format
->plane_fmt
[i
].sizeimage
);
306 /* -----------------------------------------------------------------------------
307 * Pipeline Management
310 static int vsp1_pipeline_validate_branch(struct vsp1_pipeline
*pipe
,
311 struct vsp1_rwpf
*input
,
312 struct vsp1_rwpf
*output
)
314 struct vsp1_entity
*entity
;
315 unsigned int entities
= 0;
316 struct media_pad
*pad
;
317 bool bru_found
= false;
319 input
->location
.left
= 0;
320 input
->location
.top
= 0;
322 pad
= media_entity_remote_pad(&input
->entity
.pads
[RWPF_PAD_SOURCE
]);
328 /* We've reached a video node, that shouldn't have happened. */
329 if (media_entity_type(pad
->entity
) != MEDIA_ENT_T_V4L2_SUBDEV
)
332 entity
= to_vsp1_entity(media_entity_to_v4l2_subdev(pad
->entity
));
334 /* A BRU is present in the pipeline, store the compose rectangle
335 * location in the input RPF for use when configuring the RPF.
337 if (entity
->type
== VSP1_ENTITY_BRU
) {
338 struct vsp1_bru
*bru
= to_bru(&entity
->subdev
);
339 struct v4l2_rect
*rect
=
340 &bru
->inputs
[pad
->index
].compose
;
342 bru
->inputs
[pad
->index
].rpf
= input
;
344 input
->location
.left
= rect
->left
;
345 input
->location
.top
= rect
->top
;
350 /* We've reached the WPF, we're done. */
351 if (entity
->type
== VSP1_ENTITY_WPF
)
354 /* Ensure the branch has no loop. */
355 if (entities
& (1 << entity
->subdev
.entity
.id
))
358 entities
|= 1 << entity
->subdev
.entity
.id
;
360 /* UDS can't be chained. */
361 if (entity
->type
== VSP1_ENTITY_UDS
) {
366 pipe
->uds_input
= bru_found
? pipe
->bru
370 /* Follow the source link. The link setup operations ensure
371 * that the output fan-out can't be more than one, there is thus
372 * no need to verify here that only a single source link is
375 pad
= &entity
->pads
[entity
->source_pad
];
376 pad
= media_entity_remote_pad(pad
);
379 /* The last entity must be the output WPF. */
380 if (entity
!= &output
->entity
)
386 static void __vsp1_pipeline_cleanup(struct vsp1_pipeline
*pipe
)
389 struct vsp1_bru
*bru
= to_bru(&pipe
->bru
->subdev
);
392 for (i
= 0; i
< ARRAY_SIZE(bru
->inputs
); ++i
)
393 bru
->inputs
[i
].rpf
= NULL
;
396 INIT_LIST_HEAD(&pipe
->entities
);
397 pipe
->state
= VSP1_PIPELINE_STOPPED
;
398 pipe
->buffers_ready
= 0;
400 pipe
->num_inputs
= 0;
407 static int vsp1_pipeline_validate(struct vsp1_pipeline
*pipe
,
408 struct vsp1_video
*video
)
410 struct media_entity_graph graph
;
411 struct media_entity
*entity
= &video
->video
.entity
;
412 struct media_device
*mdev
= entity
->parent
;
416 mutex_lock(&mdev
->graph_mutex
);
418 /* Walk the graph to locate the entities and video nodes. */
419 media_entity_graph_walk_start(&graph
, entity
);
421 while ((entity
= media_entity_graph_walk_next(&graph
))) {
422 struct v4l2_subdev
*subdev
;
423 struct vsp1_rwpf
*rwpf
;
424 struct vsp1_entity
*e
;
426 if (media_entity_type(entity
) != MEDIA_ENT_T_V4L2_SUBDEV
) {
431 subdev
= media_entity_to_v4l2_subdev(entity
);
432 e
= to_vsp1_entity(subdev
);
433 list_add_tail(&e
->list_pipe
, &pipe
->entities
);
435 if (e
->type
== VSP1_ENTITY_RPF
) {
436 rwpf
= to_rwpf(subdev
);
437 pipe
->inputs
[pipe
->num_inputs
++] = rwpf
;
438 rwpf
->video
.pipe_index
= pipe
->num_inputs
;
439 } else if (e
->type
== VSP1_ENTITY_WPF
) {
440 rwpf
= to_rwpf(subdev
);
441 pipe
->output
= to_rwpf(subdev
);
442 rwpf
->video
.pipe_index
= 0;
443 } else if (e
->type
== VSP1_ENTITY_LIF
) {
445 } else if (e
->type
== VSP1_ENTITY_BRU
) {
450 mutex_unlock(&mdev
->graph_mutex
);
452 /* We need one output and at least one input. */
453 if (pipe
->num_inputs
== 0 || !pipe
->output
) {
458 /* Follow links downstream for each input and make sure the graph
459 * contains no loop and that all branches end at the output WPF.
461 for (i
= 0; i
< pipe
->num_inputs
; ++i
) {
462 ret
= vsp1_pipeline_validate_branch(pipe
, pipe
->inputs
[i
],
471 __vsp1_pipeline_cleanup(pipe
);
475 static int vsp1_pipeline_init(struct vsp1_pipeline
*pipe
,
476 struct vsp1_video
*video
)
480 mutex_lock(&pipe
->lock
);
482 /* If we're the first user validate and initialize the pipeline. */
483 if (pipe
->use_count
== 0) {
484 ret
= vsp1_pipeline_validate(pipe
, video
);
493 mutex_unlock(&pipe
->lock
);
497 static void vsp1_pipeline_cleanup(struct vsp1_pipeline
*pipe
)
499 mutex_lock(&pipe
->lock
);
501 /* If we're the last user clean up the pipeline. */
502 if (--pipe
->use_count
== 0)
503 __vsp1_pipeline_cleanup(pipe
);
505 mutex_unlock(&pipe
->lock
);
508 static void vsp1_pipeline_run(struct vsp1_pipeline
*pipe
)
510 struct vsp1_device
*vsp1
= pipe
->output
->entity
.vsp1
;
512 vsp1_write(vsp1
, VI6_CMD(pipe
->output
->entity
.index
), VI6_CMD_STRCMD
);
513 pipe
->state
= VSP1_PIPELINE_RUNNING
;
514 pipe
->buffers_ready
= 0;
517 static bool vsp1_pipeline_stopped(struct vsp1_pipeline
*pipe
)
522 spin_lock_irqsave(&pipe
->irqlock
, flags
);
523 stopped
= pipe
->state
== VSP1_PIPELINE_STOPPED
,
524 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
529 static int vsp1_pipeline_stop(struct vsp1_pipeline
*pipe
)
531 struct vsp1_entity
*entity
;
535 spin_lock_irqsave(&pipe
->irqlock
, flags
);
536 if (pipe
->state
== VSP1_PIPELINE_RUNNING
)
537 pipe
->state
= VSP1_PIPELINE_STOPPING
;
538 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
540 ret
= wait_event_timeout(pipe
->wq
, vsp1_pipeline_stopped(pipe
),
541 msecs_to_jiffies(500));
542 ret
= ret
== 0 ? -ETIMEDOUT
: 0;
544 list_for_each_entry(entity
, &pipe
->entities
, list_pipe
) {
545 if (entity
->route
&& entity
->route
->reg
)
546 vsp1_write(entity
->vsp1
, entity
->route
->reg
,
547 VI6_DPR_NODE_UNUSED
);
549 v4l2_subdev_call(&entity
->subdev
, video
, s_stream
, 0);
555 static bool vsp1_pipeline_ready(struct vsp1_pipeline
*pipe
)
559 mask
= ((1 << pipe
->num_inputs
) - 1) << 1;
563 return pipe
->buffers_ready
== mask
;
567 * vsp1_video_complete_buffer - Complete the current buffer
568 * @video: the video node
570 * This function completes the current buffer by filling its sequence number,
571 * time stamp and payload size, and hands it back to the videobuf core.
573 * When operating in DU output mode (deep pipeline to the DU through the LIF),
574 * the VSP1 needs to constantly supply frames to the display. In that case, if
575 * no other buffer is queued, reuse the one that has just been processed instead
576 * of handing it back to the videobuf core.
578 * Return the next queued buffer or NULL if the queue is empty.
580 static struct vsp1_video_buffer
*
581 vsp1_video_complete_buffer(struct vsp1_video
*video
)
583 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
584 struct vsp1_video_buffer
*next
= NULL
;
585 struct vsp1_video_buffer
*done
;
589 spin_lock_irqsave(&video
->irqlock
, flags
);
591 if (list_empty(&video
->irqqueue
)) {
592 spin_unlock_irqrestore(&video
->irqlock
, flags
);
596 done
= list_first_entry(&video
->irqqueue
,
597 struct vsp1_video_buffer
, queue
);
599 /* In DU output mode reuse the buffer if the list is singular. */
600 if (pipe
->lif
&& list_is_singular(&video
->irqqueue
)) {
601 spin_unlock_irqrestore(&video
->irqlock
, flags
);
605 list_del(&done
->queue
);
607 if (!list_empty(&video
->irqqueue
))
608 next
= list_first_entry(&video
->irqqueue
,
609 struct vsp1_video_buffer
, queue
);
611 spin_unlock_irqrestore(&video
->irqlock
, flags
);
613 done
->buf
.sequence
= video
->sequence
++;
614 v4l2_get_timestamp(&done
->buf
.timestamp
);
615 for (i
= 0; i
< done
->buf
.vb2_buf
.num_planes
; ++i
)
616 vb2_set_plane_payload(&done
->buf
.vb2_buf
, i
, done
->length
[i
]);
617 vb2_buffer_done(&done
->buf
.vb2_buf
, VB2_BUF_STATE_DONE
);
622 static void vsp1_video_frame_end(struct vsp1_pipeline
*pipe
,
623 struct vsp1_video
*video
)
625 struct vsp1_video_buffer
*buf
;
628 buf
= vsp1_video_complete_buffer(video
);
632 spin_lock_irqsave(&pipe
->irqlock
, flags
);
634 video
->ops
->queue(video
, buf
);
635 pipe
->buffers_ready
|= 1 << video
->pipe_index
;
637 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
640 void vsp1_pipeline_frame_end(struct vsp1_pipeline
*pipe
)
642 enum vsp1_pipeline_state state
;
649 /* Complete buffers on all video nodes. */
650 for (i
= 0; i
< pipe
->num_inputs
; ++i
)
651 vsp1_video_frame_end(pipe
, &pipe
->inputs
[i
]->video
);
654 vsp1_video_frame_end(pipe
, &pipe
->output
->video
);
656 spin_lock_irqsave(&pipe
->irqlock
, flags
);
659 pipe
->state
= VSP1_PIPELINE_STOPPED
;
661 /* If a stop has been requested, mark the pipeline as stopped and
664 if (state
== VSP1_PIPELINE_STOPPING
) {
669 /* Restart the pipeline if ready. */
670 if (vsp1_pipeline_ready(pipe
))
671 vsp1_pipeline_run(pipe
);
674 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
678 * Propagate the alpha value through the pipeline.
680 * As the UDS has restricted scaling capabilities when the alpha component needs
681 * to be scaled, we disable alpha scaling when the UDS input has a fixed alpha
682 * value. The UDS then outputs a fixed alpha value which needs to be programmed
683 * from the input RPF alpha.
685 void vsp1_pipeline_propagate_alpha(struct vsp1_pipeline
*pipe
,
686 struct vsp1_entity
*input
,
689 struct vsp1_entity
*entity
;
690 struct media_pad
*pad
;
692 pad
= media_entity_remote_pad(&input
->pads
[RWPF_PAD_SOURCE
]);
695 if (media_entity_type(pad
->entity
) != MEDIA_ENT_T_V4L2_SUBDEV
)
698 entity
= to_vsp1_entity(media_entity_to_v4l2_subdev(pad
->entity
));
700 /* The BRU background color has a fixed alpha value set to 255,
701 * the output alpha value is thus always equal to 255.
703 if (entity
->type
== VSP1_ENTITY_BRU
)
706 if (entity
->type
== VSP1_ENTITY_UDS
) {
707 struct vsp1_uds
*uds
= to_uds(&entity
->subdev
);
709 vsp1_uds_set_alpha(uds
, alpha
);
713 pad
= &entity
->pads
[entity
->source_pad
];
714 pad
= media_entity_remote_pad(pad
);
718 void vsp1_pipelines_suspend(struct vsp1_device
*vsp1
)
724 /* To avoid increasing the system suspend time needlessly, loop over the
725 * pipelines twice, first to set them all to the stopping state, and then
726 * to wait for the stop to complete.
728 for (i
= 0; i
< vsp1
->pdata
.wpf_count
; ++i
) {
729 struct vsp1_rwpf
*wpf
= vsp1
->wpf
[i
];
730 struct vsp1_pipeline
*pipe
;
735 pipe
= to_vsp1_pipeline(&wpf
->entity
.subdev
.entity
);
739 spin_lock_irqsave(&pipe
->irqlock
, flags
);
740 if (pipe
->state
== VSP1_PIPELINE_RUNNING
)
741 pipe
->state
= VSP1_PIPELINE_STOPPING
;
742 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
745 for (i
= 0; i
< vsp1
->pdata
.wpf_count
; ++i
) {
746 struct vsp1_rwpf
*wpf
= vsp1
->wpf
[i
];
747 struct vsp1_pipeline
*pipe
;
752 pipe
= to_vsp1_pipeline(&wpf
->entity
.subdev
.entity
);
756 ret
= wait_event_timeout(pipe
->wq
, vsp1_pipeline_stopped(pipe
),
757 msecs_to_jiffies(500));
759 dev_warn(vsp1
->dev
, "pipeline %u stop timeout\n",
764 void vsp1_pipelines_resume(struct vsp1_device
*vsp1
)
768 /* Resume pipeline all running pipelines. */
769 for (i
= 0; i
< vsp1
->pdata
.wpf_count
; ++i
) {
770 struct vsp1_rwpf
*wpf
= vsp1
->wpf
[i
];
771 struct vsp1_pipeline
*pipe
;
776 pipe
= to_vsp1_pipeline(&wpf
->entity
.subdev
.entity
);
780 if (vsp1_pipeline_ready(pipe
))
781 vsp1_pipeline_run(pipe
);
785 /* -----------------------------------------------------------------------------
786 * videobuf2 Queue Operations
790 vsp1_video_queue_setup(struct vb2_queue
*vq
, const void *parg
,
791 unsigned int *nbuffers
, unsigned int *nplanes
,
792 unsigned int sizes
[], void *alloc_ctxs
[])
794 const struct v4l2_format
*fmt
= parg
;
795 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
796 const struct v4l2_pix_format_mplane
*format
;
797 struct v4l2_pix_format_mplane pix_mp
;
801 /* Make sure the format is valid and adjust the sizeimage field
804 if (!vsp1_video_format_adjust(video
, &fmt
->fmt
.pix_mp
, &pix_mp
))
809 format
= &video
->format
;
812 *nplanes
= format
->num_planes
;
814 for (i
= 0; i
< format
->num_planes
; ++i
) {
815 sizes
[i
] = format
->plane_fmt
[i
].sizeimage
;
816 alloc_ctxs
[i
] = video
->alloc_ctx
;
822 static int vsp1_video_buffer_prepare(struct vb2_buffer
*vb
)
824 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
825 struct vsp1_video
*video
= vb2_get_drv_priv(vb
->vb2_queue
);
826 struct vsp1_video_buffer
*buf
= to_vsp1_video_buffer(vbuf
);
827 const struct v4l2_pix_format_mplane
*format
= &video
->format
;
830 if (vb
->num_planes
< format
->num_planes
)
833 for (i
= 0; i
< vb
->num_planes
; ++i
) {
834 buf
->addr
[i
] = vb2_dma_contig_plane_dma_addr(vb
, i
);
835 buf
->length
[i
] = vb2_plane_size(vb
, i
);
837 if (buf
->length
[i
] < format
->plane_fmt
[i
].sizeimage
)
844 static void vsp1_video_buffer_queue(struct vb2_buffer
*vb
)
846 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
847 struct vsp1_video
*video
= vb2_get_drv_priv(vb
->vb2_queue
);
848 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
849 struct vsp1_video_buffer
*buf
= to_vsp1_video_buffer(vbuf
);
853 spin_lock_irqsave(&video
->irqlock
, flags
);
854 empty
= list_empty(&video
->irqqueue
);
855 list_add_tail(&buf
->queue
, &video
->irqqueue
);
856 spin_unlock_irqrestore(&video
->irqlock
, flags
);
861 spin_lock_irqsave(&pipe
->irqlock
, flags
);
863 video
->ops
->queue(video
, buf
);
864 pipe
->buffers_ready
|= 1 << video
->pipe_index
;
866 if (vb2_is_streaming(&video
->queue
) &&
867 vsp1_pipeline_ready(pipe
))
868 vsp1_pipeline_run(pipe
);
870 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
873 static void vsp1_entity_route_setup(struct vsp1_entity
*source
)
875 struct vsp1_entity
*sink
;
877 if (source
->route
->reg
== 0)
880 sink
= container_of(source
->sink
, struct vsp1_entity
, subdev
.entity
);
881 vsp1_write(source
->vsp1
, source
->route
->reg
,
882 sink
->route
->inputs
[source
->sink_pad
]);
885 static int vsp1_video_start_streaming(struct vb2_queue
*vq
, unsigned int count
)
887 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
888 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
889 struct vsp1_entity
*entity
;
893 mutex_lock(&pipe
->lock
);
894 if (pipe
->stream_count
== pipe
->num_video
- 1) {
896 struct vsp1_uds
*uds
= to_uds(&pipe
->uds
->subdev
);
898 /* If a BRU is present in the pipeline before the UDS,
899 * the alpha component doesn't need to be scaled as the
900 * BRU output alpha value is fixed to 255. Otherwise we
901 * need to scale the alpha component only when available
904 if (pipe
->uds_input
->type
== VSP1_ENTITY_BRU
) {
905 uds
->scale_alpha
= false;
907 struct vsp1_rwpf
*rpf
=
908 to_rwpf(&pipe
->uds_input
->subdev
);
910 uds
->scale_alpha
= rpf
->video
.fmtinfo
->alpha
;
914 list_for_each_entry(entity
, &pipe
->entities
, list_pipe
) {
915 vsp1_entity_route_setup(entity
);
917 ret
= v4l2_subdev_call(&entity
->subdev
, video
,
920 mutex_unlock(&pipe
->lock
);
926 pipe
->stream_count
++;
927 mutex_unlock(&pipe
->lock
);
929 spin_lock_irqsave(&pipe
->irqlock
, flags
);
930 if (vsp1_pipeline_ready(pipe
))
931 vsp1_pipeline_run(pipe
);
932 spin_unlock_irqrestore(&pipe
->irqlock
, flags
);
937 static void vsp1_video_stop_streaming(struct vb2_queue
*vq
)
939 struct vsp1_video
*video
= vb2_get_drv_priv(vq
);
940 struct vsp1_pipeline
*pipe
= to_vsp1_pipeline(&video
->video
.entity
);
941 struct vsp1_video_buffer
*buffer
;
945 mutex_lock(&pipe
->lock
);
946 if (--pipe
->stream_count
== 0) {
947 /* Stop the pipeline. */
948 ret
= vsp1_pipeline_stop(pipe
);
949 if (ret
== -ETIMEDOUT
)
950 dev_err(video
->vsp1
->dev
, "pipeline stop timeout\n");
952 mutex_unlock(&pipe
->lock
);
954 vsp1_pipeline_cleanup(pipe
);
955 media_entity_pipeline_stop(&video
->video
.entity
);
957 /* Remove all buffers from the IRQ queue. */
958 spin_lock_irqsave(&video
->irqlock
, flags
);
959 list_for_each_entry(buffer
, &video
->irqqueue
, queue
)
960 vb2_buffer_done(&buffer
->buf
.vb2_buf
, VB2_BUF_STATE_ERROR
);
961 INIT_LIST_HEAD(&video
->irqqueue
);
962 spin_unlock_irqrestore(&video
->irqlock
, flags
);
965 static struct vb2_ops vsp1_video_queue_qops
= {
966 .queue_setup
= vsp1_video_queue_setup
,
967 .buf_prepare
= vsp1_video_buffer_prepare
,
968 .buf_queue
= vsp1_video_buffer_queue
,
969 .wait_prepare
= vb2_ops_wait_prepare
,
970 .wait_finish
= vb2_ops_wait_finish
,
971 .start_streaming
= vsp1_video_start_streaming
,
972 .stop_streaming
= vsp1_video_stop_streaming
,
975 /* -----------------------------------------------------------------------------
980 vsp1_video_querycap(struct file
*file
, void *fh
, struct v4l2_capability
*cap
)
982 struct v4l2_fh
*vfh
= file
->private_data
;
983 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
985 cap
->capabilities
= V4L2_CAP_DEVICE_CAPS
| V4L2_CAP_STREAMING
986 | V4L2_CAP_VIDEO_CAPTURE_MPLANE
987 | V4L2_CAP_VIDEO_OUTPUT_MPLANE
;
989 if (video
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
990 cap
->device_caps
= V4L2_CAP_VIDEO_CAPTURE_MPLANE
991 | V4L2_CAP_STREAMING
;
993 cap
->device_caps
= V4L2_CAP_VIDEO_OUTPUT_MPLANE
994 | V4L2_CAP_STREAMING
;
996 strlcpy(cap
->driver
, "vsp1", sizeof(cap
->driver
));
997 strlcpy(cap
->card
, video
->video
.name
, sizeof(cap
->card
));
998 snprintf(cap
->bus_info
, sizeof(cap
->bus_info
), "platform:%s",
999 dev_name(video
->vsp1
->dev
));
1005 vsp1_video_get_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
1007 struct v4l2_fh
*vfh
= file
->private_data
;
1008 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
1010 if (format
->type
!= video
->queue
.type
)
1013 mutex_lock(&video
->lock
);
1014 format
->fmt
.pix_mp
= video
->format
;
1015 mutex_unlock(&video
->lock
);
1021 vsp1_video_try_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
1023 struct v4l2_fh
*vfh
= file
->private_data
;
1024 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
1026 if (format
->type
!= video
->queue
.type
)
1029 return __vsp1_video_try_format(video
, &format
->fmt
.pix_mp
, NULL
);
1033 vsp1_video_set_format(struct file
*file
, void *fh
, struct v4l2_format
*format
)
1035 struct v4l2_fh
*vfh
= file
->private_data
;
1036 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
1037 const struct vsp1_format_info
*info
;
1040 if (format
->type
!= video
->queue
.type
)
1043 ret
= __vsp1_video_try_format(video
, &format
->fmt
.pix_mp
, &info
);
1047 mutex_lock(&video
->lock
);
1049 if (vb2_is_busy(&video
->queue
)) {
1054 video
->format
= format
->fmt
.pix_mp
;
1055 video
->fmtinfo
= info
;
1058 mutex_unlock(&video
->lock
);
1063 vsp1_video_streamon(struct file
*file
, void *fh
, enum v4l2_buf_type type
)
1065 struct v4l2_fh
*vfh
= file
->private_data
;
1066 struct vsp1_video
*video
= to_vsp1_video(vfh
->vdev
);
1067 struct vsp1_pipeline
*pipe
;
1070 if (video
->queue
.owner
&& video
->queue
.owner
!= file
->private_data
)
1073 video
->sequence
= 0;
1075 /* Start streaming on the pipeline. No link touching an entity in the
1076 * pipeline can be activated or deactivated once streaming is started.
1078 * Use the VSP1 pipeline object embedded in the first video object that
1081 pipe
= video
->video
.entity
.pipe
1082 ? to_vsp1_pipeline(&video
->video
.entity
) : &video
->pipe
;
1084 ret
= media_entity_pipeline_start(&video
->video
.entity
, &pipe
->pipe
);
1088 /* Verify that the configured format matches the output of the connected
1091 ret
= vsp1_video_verify_format(video
);
1095 ret
= vsp1_pipeline_init(pipe
, video
);
1099 /* Start the queue. */
1100 ret
= vb2_streamon(&video
->queue
, type
);
1107 vsp1_pipeline_cleanup(pipe
);
1109 media_entity_pipeline_stop(&video
->video
.entity
);
1113 static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops
= {
1114 .vidioc_querycap
= vsp1_video_querycap
,
1115 .vidioc_g_fmt_vid_cap_mplane
= vsp1_video_get_format
,
1116 .vidioc_s_fmt_vid_cap_mplane
= vsp1_video_set_format
,
1117 .vidioc_try_fmt_vid_cap_mplane
= vsp1_video_try_format
,
1118 .vidioc_g_fmt_vid_out_mplane
= vsp1_video_get_format
,
1119 .vidioc_s_fmt_vid_out_mplane
= vsp1_video_set_format
,
1120 .vidioc_try_fmt_vid_out_mplane
= vsp1_video_try_format
,
1121 .vidioc_reqbufs
= vb2_ioctl_reqbufs
,
1122 .vidioc_querybuf
= vb2_ioctl_querybuf
,
1123 .vidioc_qbuf
= vb2_ioctl_qbuf
,
1124 .vidioc_dqbuf
= vb2_ioctl_dqbuf
,
1125 .vidioc_create_bufs
= vb2_ioctl_create_bufs
,
1126 .vidioc_prepare_buf
= vb2_ioctl_prepare_buf
,
1127 .vidioc_streamon
= vsp1_video_streamon
,
1128 .vidioc_streamoff
= vb2_ioctl_streamoff
,
1131 /* -----------------------------------------------------------------------------
1132 * V4L2 File Operations
1135 static int vsp1_video_open(struct file
*file
)
1137 struct vsp1_video
*video
= video_drvdata(file
);
1138 struct v4l2_fh
*vfh
;
1141 vfh
= kzalloc(sizeof(*vfh
), GFP_KERNEL
);
1145 v4l2_fh_init(vfh
, &video
->video
);
1148 file
->private_data
= vfh
;
1150 ret
= vsp1_device_get(video
->vsp1
);
1159 static int vsp1_video_release(struct file
*file
)
1161 struct vsp1_video
*video
= video_drvdata(file
);
1162 struct v4l2_fh
*vfh
= file
->private_data
;
1164 mutex_lock(&video
->lock
);
1165 if (video
->queue
.owner
== vfh
) {
1166 vb2_queue_release(&video
->queue
);
1167 video
->queue
.owner
= NULL
;
1169 mutex_unlock(&video
->lock
);
1171 vsp1_device_put(video
->vsp1
);
1173 v4l2_fh_release(file
);
1175 file
->private_data
= NULL
;
1180 static struct v4l2_file_operations vsp1_video_fops
= {
1181 .owner
= THIS_MODULE
,
1182 .unlocked_ioctl
= video_ioctl2
,
1183 .open
= vsp1_video_open
,
1184 .release
= vsp1_video_release
,
1185 .poll
= vb2_fop_poll
,
1186 .mmap
= vb2_fop_mmap
,
1189 /* -----------------------------------------------------------------------------
1190 * Initialization and Cleanup
1193 int vsp1_video_init(struct vsp1_video
*video
, struct vsp1_entity
*rwpf
)
1195 const char *direction
;
1198 switch (video
->type
) {
1199 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
:
1200 direction
= "output";
1201 video
->pad
.flags
= MEDIA_PAD_FL_SINK
;
1204 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
:
1205 direction
= "input";
1206 video
->pad
.flags
= MEDIA_PAD_FL_SOURCE
;
1207 video
->video
.vfl_dir
= VFL_DIR_TX
;
1216 mutex_init(&video
->lock
);
1217 spin_lock_init(&video
->irqlock
);
1218 INIT_LIST_HEAD(&video
->irqqueue
);
1220 mutex_init(&video
->pipe
.lock
);
1221 spin_lock_init(&video
->pipe
.irqlock
);
1222 INIT_LIST_HEAD(&video
->pipe
.entities
);
1223 init_waitqueue_head(&video
->pipe
.wq
);
1224 video
->pipe
.state
= VSP1_PIPELINE_STOPPED
;
1226 /* Initialize the media entity... */
1227 ret
= media_entity_init(&video
->video
.entity
, 1, &video
->pad
, 0);
1231 /* ... and the format ... */
1232 video
->fmtinfo
= vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT
);
1233 video
->format
.pixelformat
= video
->fmtinfo
->fourcc
;
1234 video
->format
.colorspace
= V4L2_COLORSPACE_SRGB
;
1235 video
->format
.field
= V4L2_FIELD_NONE
;
1236 video
->format
.width
= VSP1_VIDEO_DEF_WIDTH
;
1237 video
->format
.height
= VSP1_VIDEO_DEF_HEIGHT
;
1238 video
->format
.num_planes
= 1;
1239 video
->format
.plane_fmt
[0].bytesperline
=
1240 video
->format
.width
* video
->fmtinfo
->bpp
[0] / 8;
1241 video
->format
.plane_fmt
[0].sizeimage
=
1242 video
->format
.plane_fmt
[0].bytesperline
* video
->format
.height
;
1244 /* ... and the video node... */
1245 video
->video
.v4l2_dev
= &video
->vsp1
->v4l2_dev
;
1246 video
->video
.fops
= &vsp1_video_fops
;
1247 snprintf(video
->video
.name
, sizeof(video
->video
.name
), "%s %s",
1248 rwpf
->subdev
.name
, direction
);
1249 video
->video
.vfl_type
= VFL_TYPE_GRABBER
;
1250 video
->video
.release
= video_device_release_empty
;
1251 video
->video
.ioctl_ops
= &vsp1_video_ioctl_ops
;
1253 video_set_drvdata(&video
->video
, video
);
1255 /* ... and the buffers queue... */
1256 video
->alloc_ctx
= vb2_dma_contig_init_ctx(video
->vsp1
->dev
);
1257 if (IS_ERR(video
->alloc_ctx
)) {
1258 ret
= PTR_ERR(video
->alloc_ctx
);
1262 video
->queue
.type
= video
->type
;
1263 video
->queue
.io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
1264 video
->queue
.lock
= &video
->lock
;
1265 video
->queue
.drv_priv
= video
;
1266 video
->queue
.buf_struct_size
= sizeof(struct vsp1_video_buffer
);
1267 video
->queue
.ops
= &vsp1_video_queue_qops
;
1268 video
->queue
.mem_ops
= &vb2_dma_contig_memops
;
1269 video
->queue
.timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
1270 ret
= vb2_queue_init(&video
->queue
);
1272 dev_err(video
->vsp1
->dev
, "failed to initialize vb2 queue\n");
1276 /* ... and register the video device. */
1277 video
->video
.queue
= &video
->queue
;
1278 ret
= video_register_device(&video
->video
, VFL_TYPE_GRABBER
, -1);
1280 dev_err(video
->vsp1
->dev
, "failed to register video device\n");
1287 vb2_dma_contig_cleanup_ctx(video
->alloc_ctx
);
1288 vsp1_video_cleanup(video
);
1292 void vsp1_video_cleanup(struct vsp1_video
*video
)
1294 if (video_is_registered(&video
->video
))
1295 video_unregister_device(&video
->video
);
1297 vb2_dma_contig_cleanup_ctx(video
->alloc_ctx
);
1298 media_entity_cleanup(&video
->video
.entity
);