1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
4 * http://www.samsung.com
6 * Samsung EXYNOS5 SoC series G-Scaler driver
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/types.h>
12 #include <linux/errno.h>
13 #include <linux/bug.h>
14 #include <linux/interrupt.h>
15 #include <linux/workqueue.h>
16 #include <linux/device.h>
17 #include <linux/platform_device.h>
18 #include <linux/list.h>
20 #include <linux/slab.h>
21 #include <linux/clk.h>
23 #include <media/v4l2-ioctl.h>
27 static int gsc_m2m_ctx_stop_req(struct gsc_ctx
*ctx
)
29 struct gsc_ctx
*curr_ctx
;
30 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
33 curr_ctx
= v4l2_m2m_get_curr_priv(gsc
->m2m
.m2m_dev
);
34 if (!gsc_m2m_pending(gsc
) || (curr_ctx
!= ctx
))
37 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ
, ctx
);
38 ret
= wait_event_timeout(gsc
->irq_queue
,
39 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ
, ctx
),
40 GSC_SHUTDOWN_TIMEOUT
);
42 return ret
== 0 ? -ETIMEDOUT
: ret
;
45 static void __gsc_m2m_job_abort(struct gsc_ctx
*ctx
)
49 ret
= gsc_m2m_ctx_stop_req(ctx
);
50 if ((ret
== -ETIMEDOUT
) || (ctx
->state
& GSC_CTX_ABORT
)) {
51 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ
| GSC_CTX_ABORT
, ctx
);
52 gsc_m2m_job_finish(ctx
, VB2_BUF_STATE_ERROR
);
56 static int gsc_m2m_start_streaming(struct vb2_queue
*q
, unsigned int count
)
58 struct gsc_ctx
*ctx
= q
->drv_priv
;
61 ret
= pm_runtime_get_sync(&ctx
->gsc_dev
->pdev
->dev
);
62 return ret
> 0 ? 0 : ret
;
65 static void __gsc_m2m_cleanup_queue(struct gsc_ctx
*ctx
)
67 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
69 while (v4l2_m2m_num_src_bufs_ready(ctx
->m2m_ctx
) > 0) {
70 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
71 v4l2_m2m_buf_done(src_vb
, VB2_BUF_STATE_ERROR
);
74 while (v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
) > 0) {
75 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
76 v4l2_m2m_buf_done(dst_vb
, VB2_BUF_STATE_ERROR
);
80 static void gsc_m2m_stop_streaming(struct vb2_queue
*q
)
82 struct gsc_ctx
*ctx
= q
->drv_priv
;
84 __gsc_m2m_job_abort(ctx
);
86 __gsc_m2m_cleanup_queue(ctx
);
88 pm_runtime_put(&ctx
->gsc_dev
->pdev
->dev
);
91 void gsc_m2m_job_finish(struct gsc_ctx
*ctx
, int vb_state
)
93 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
95 if (!ctx
|| !ctx
->m2m_ctx
)
98 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
99 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
101 if (src_vb
&& dst_vb
) {
102 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
103 dst_vb
->timecode
= src_vb
->timecode
;
104 dst_vb
->flags
&= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
107 & V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
109 v4l2_m2m_buf_done(src_vb
, vb_state
);
110 v4l2_m2m_buf_done(dst_vb
, vb_state
);
112 v4l2_m2m_job_finish(ctx
->gsc_dev
->m2m
.m2m_dev
,
117 static void gsc_m2m_job_abort(void *priv
)
119 __gsc_m2m_job_abort((struct gsc_ctx
*)priv
);
122 static int gsc_get_bufs(struct gsc_ctx
*ctx
)
124 struct gsc_frame
*s_frame
, *d_frame
;
125 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
128 s_frame
= &ctx
->s_frame
;
129 d_frame
= &ctx
->d_frame
;
131 src_vb
= v4l2_m2m_next_src_buf(ctx
->m2m_ctx
);
132 ret
= gsc_prepare_addr(ctx
, &src_vb
->vb2_buf
, s_frame
, &s_frame
->addr
);
136 dst_vb
= v4l2_m2m_next_dst_buf(ctx
->m2m_ctx
);
137 ret
= gsc_prepare_addr(ctx
, &dst_vb
->vb2_buf
, d_frame
, &d_frame
->addr
);
141 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
146 static void gsc_m2m_device_run(void *priv
)
148 struct gsc_ctx
*ctx
= priv
;
154 if (WARN(!ctx
, "null hardware context\n"))
158 spin_lock_irqsave(&gsc
->slock
, flags
);
160 set_bit(ST_M2M_PEND
, &gsc
->state
);
162 /* Reconfigure hardware if the context has changed. */
163 if (gsc
->m2m
.ctx
!= ctx
) {
164 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
166 ctx
->state
|= GSC_PARAMS
;
170 is_set
= ctx
->state
& GSC_CTX_STOP_REQ
;
172 ctx
->state
&= ~GSC_CTX_STOP_REQ
;
173 ctx
->state
|= GSC_CTX_ABORT
;
174 wake_up(&gsc
->irq_queue
);
178 ret
= gsc_get_bufs(ctx
);
180 pr_err("Wrong address");
184 gsc_set_prefbuf(gsc
, &ctx
->s_frame
);
185 gsc_hw_set_input_addr(gsc
, &ctx
->s_frame
.addr
, GSC_M2M_BUF_NUM
);
186 gsc_hw_set_output_addr(gsc
, &ctx
->d_frame
.addr
, GSC_M2M_BUF_NUM
);
188 if (ctx
->state
& GSC_PARAMS
) {
189 gsc_hw_set_input_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
190 gsc_hw_set_output_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
191 gsc_hw_set_frm_done_irq_mask(gsc
, false);
192 gsc_hw_set_gsc_irq_enable(gsc
, true);
194 if (gsc_set_scaler_info(ctx
)) {
195 pr_err("Scaler setup error");
199 gsc_hw_set_input_path(ctx
);
200 gsc_hw_set_in_size(ctx
);
201 gsc_hw_set_in_image_format(ctx
);
203 gsc_hw_set_output_path(ctx
);
204 gsc_hw_set_out_size(ctx
);
205 gsc_hw_set_out_image_format(ctx
);
207 gsc_hw_set_prescaler(ctx
);
208 gsc_hw_set_mainscaler(ctx
);
209 gsc_hw_set_rotation(ctx
);
210 gsc_hw_set_global_alpha(ctx
);
213 /* update shadow registers */
214 gsc_hw_set_sfr_update(ctx
);
216 ctx
->state
&= ~GSC_PARAMS
;
217 gsc_hw_enable_control(gsc
, true);
219 spin_unlock_irqrestore(&gsc
->slock
, flags
);
223 ctx
->state
&= ~GSC_PARAMS
;
224 spin_unlock_irqrestore(&gsc
->slock
, flags
);
227 static int gsc_m2m_queue_setup(struct vb2_queue
*vq
,
228 unsigned int *num_buffers
, unsigned int *num_planes
,
229 unsigned int sizes
[], struct device
*alloc_devs
[])
231 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vq
);
232 struct gsc_frame
*frame
;
235 frame
= ctx_get_frame(ctx
, vq
->type
);
237 return PTR_ERR(frame
);
242 *num_planes
= frame
->fmt
->num_planes
;
243 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
244 sizes
[i
] = frame
->payload
[i
];
248 static int gsc_m2m_buf_prepare(struct vb2_buffer
*vb
)
250 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
251 struct gsc_frame
*frame
;
254 frame
= ctx_get_frame(ctx
, vb
->vb2_queue
->type
);
256 return PTR_ERR(frame
);
258 if (V4L2_TYPE_IS_CAPTURE(vb
->vb2_queue
->type
)) {
259 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
260 vb2_set_plane_payload(vb
, i
, frame
->payload
[i
]);
266 static void gsc_m2m_buf_queue(struct vb2_buffer
*vb
)
268 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
269 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
271 pr_debug("ctx: %p, ctx->state: 0x%x", ctx
, ctx
->state
);
274 v4l2_m2m_buf_queue(ctx
->m2m_ctx
, vbuf
);
277 static const struct vb2_ops gsc_m2m_qops
= {
278 .queue_setup
= gsc_m2m_queue_setup
,
279 .buf_prepare
= gsc_m2m_buf_prepare
,
280 .buf_queue
= gsc_m2m_buf_queue
,
281 .wait_prepare
= vb2_ops_wait_prepare
,
282 .wait_finish
= vb2_ops_wait_finish
,
283 .stop_streaming
= gsc_m2m_stop_streaming
,
284 .start_streaming
= gsc_m2m_start_streaming
,
287 static int gsc_m2m_querycap(struct file
*file
, void *fh
,
288 struct v4l2_capability
*cap
)
290 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
291 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
293 strscpy(cap
->driver
, GSC_MODULE_NAME
, sizeof(cap
->driver
));
294 strscpy(cap
->card
, GSC_MODULE_NAME
" gscaler", sizeof(cap
->card
));
295 snprintf(cap
->bus_info
, sizeof(cap
->bus_info
), "platform:%s",
296 dev_name(&gsc
->pdev
->dev
));
300 static int gsc_m2m_enum_fmt(struct file
*file
, void *priv
,
301 struct v4l2_fmtdesc
*f
)
303 return gsc_enum_fmt(f
);
306 static int gsc_m2m_g_fmt_mplane(struct file
*file
, void *fh
,
307 struct v4l2_format
*f
)
309 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
311 return gsc_g_fmt_mplane(ctx
, f
);
314 static int gsc_m2m_try_fmt_mplane(struct file
*file
, void *fh
,
315 struct v4l2_format
*f
)
317 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
319 return gsc_try_fmt_mplane(ctx
, f
);
322 static int gsc_m2m_s_fmt_mplane(struct file
*file
, void *fh
,
323 struct v4l2_format
*f
)
325 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
326 struct vb2_queue
*vq
;
327 struct gsc_frame
*frame
;
328 struct v4l2_pix_format_mplane
*pix
;
331 ret
= gsc_m2m_try_fmt_mplane(file
, fh
, f
);
335 vq
= v4l2_m2m_get_vq(ctx
->m2m_ctx
, f
->type
);
337 if (vb2_is_streaming(vq
)) {
338 pr_err("queue (%d) busy", f
->type
);
342 if (V4L2_TYPE_IS_OUTPUT(f
->type
))
343 frame
= &ctx
->s_frame
;
345 frame
= &ctx
->d_frame
;
347 pix
= &f
->fmt
.pix_mp
;
348 frame
->fmt
= find_fmt(&pix
->pixelformat
, NULL
, 0);
349 frame
->colorspace
= pix
->colorspace
;
353 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
354 frame
->payload
[i
] = pix
->plane_fmt
[i
].sizeimage
;
356 gsc_set_frame_size(frame
, pix
->width
, pix
->height
);
358 if (f
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
359 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_DST_FMT
, ctx
);
361 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_SRC_FMT
, ctx
);
363 pr_debug("f_w: %d, f_h: %d", frame
->f_width
, frame
->f_height
);
368 static int gsc_m2m_reqbufs(struct file
*file
, void *fh
,
369 struct v4l2_requestbuffers
*reqbufs
)
371 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
372 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
375 max_cnt
= (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) ?
376 gsc
->variant
->in_buf_cnt
: gsc
->variant
->out_buf_cnt
;
377 if (reqbufs
->count
> max_cnt
)
380 return v4l2_m2m_reqbufs(file
, ctx
->m2m_ctx
, reqbufs
);
383 static int gsc_m2m_expbuf(struct file
*file
, void *fh
,
384 struct v4l2_exportbuffer
*eb
)
386 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
387 return v4l2_m2m_expbuf(file
, ctx
->m2m_ctx
, eb
);
390 static int gsc_m2m_querybuf(struct file
*file
, void *fh
,
391 struct v4l2_buffer
*buf
)
393 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
394 return v4l2_m2m_querybuf(file
, ctx
->m2m_ctx
, buf
);
397 static int gsc_m2m_qbuf(struct file
*file
, void *fh
,
398 struct v4l2_buffer
*buf
)
400 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
401 return v4l2_m2m_qbuf(file
, ctx
->m2m_ctx
, buf
);
404 static int gsc_m2m_dqbuf(struct file
*file
, void *fh
,
405 struct v4l2_buffer
*buf
)
407 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
408 return v4l2_m2m_dqbuf(file
, ctx
->m2m_ctx
, buf
);
411 static int gsc_m2m_streamon(struct file
*file
, void *fh
,
412 enum v4l2_buf_type type
)
414 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
416 /* The source and target color format need to be set */
417 if (V4L2_TYPE_IS_OUTPUT(type
)) {
418 if (!gsc_ctx_state_is_set(GSC_SRC_FMT
, ctx
))
420 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT
, ctx
)) {
424 return v4l2_m2m_streamon(file
, ctx
->m2m_ctx
, type
);
427 static int gsc_m2m_streamoff(struct file
*file
, void *fh
,
428 enum v4l2_buf_type type
)
430 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
431 return v4l2_m2m_streamoff(file
, ctx
->m2m_ctx
, type
);
434 /* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
435 static int is_rectangle_enclosed(struct v4l2_rect
*a
, struct v4l2_rect
*b
)
437 if (a
->left
< b
->left
|| a
->top
< b
->top
)
440 if (a
->left
+ a
->width
> b
->left
+ b
->width
)
443 if (a
->top
+ a
->height
> b
->top
+ b
->height
)
449 static int gsc_m2m_g_selection(struct file
*file
, void *fh
,
450 struct v4l2_selection
*s
)
452 struct gsc_frame
*frame
;
453 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
455 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
) &&
456 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
))
459 frame
= ctx_get_frame(ctx
, s
->type
);
461 return PTR_ERR(frame
);
464 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
465 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
466 case V4L2_SEL_TGT_CROP_BOUNDS
:
467 case V4L2_SEL_TGT_CROP_DEFAULT
:
470 s
->r
.width
= frame
->f_width
;
471 s
->r
.height
= frame
->f_height
;
474 case V4L2_SEL_TGT_COMPOSE
:
475 case V4L2_SEL_TGT_CROP
:
476 s
->r
.left
= frame
->crop
.left
;
477 s
->r
.top
= frame
->crop
.top
;
478 s
->r
.width
= frame
->crop
.width
;
479 s
->r
.height
= frame
->crop
.height
;
486 static int gsc_m2m_s_selection(struct file
*file
, void *fh
,
487 struct v4l2_selection
*s
)
489 struct gsc_frame
*frame
;
490 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
491 struct gsc_variant
*variant
= ctx
->gsc_dev
->variant
;
492 struct v4l2_selection sel
= *s
;
495 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
) &&
496 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
))
499 ret
= gsc_try_selection(ctx
, &sel
);
503 if (s
->flags
& V4L2_SEL_FLAG_LE
&&
504 !is_rectangle_enclosed(&sel
.r
, &s
->r
))
507 if (s
->flags
& V4L2_SEL_FLAG_GE
&&
508 !is_rectangle_enclosed(&s
->r
, &sel
.r
))
514 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
515 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
516 case V4L2_SEL_TGT_COMPOSE
:
517 frame
= &ctx
->s_frame
;
520 case V4L2_SEL_TGT_CROP_BOUNDS
:
521 case V4L2_SEL_TGT_CROP
:
522 case V4L2_SEL_TGT_CROP_DEFAULT
:
523 frame
= &ctx
->d_frame
;
530 /* Check to see if scaling ratio is within supported range */
531 if (gsc_ctx_state_is_set(GSC_DST_FMT
| GSC_SRC_FMT
, ctx
)) {
532 if (s
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) {
533 ret
= gsc_check_scaler_ratio(variant
, sel
.r
.width
,
534 sel
.r
.height
, ctx
->d_frame
.crop
.width
,
535 ctx
->d_frame
.crop
.height
,
536 ctx
->gsc_ctrls
.rotate
->val
, ctx
->out_path
);
538 ret
= gsc_check_scaler_ratio(variant
,
539 ctx
->s_frame
.crop
.width
,
540 ctx
->s_frame
.crop
.height
, sel
.r
.width
,
541 sel
.r
.height
, ctx
->gsc_ctrls
.rotate
->val
,
546 pr_err("Out of scaler range");
553 gsc_ctx_state_lock_set(GSC_PARAMS
, ctx
);
557 static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops
= {
558 .vidioc_querycap
= gsc_m2m_querycap
,
559 .vidioc_enum_fmt_vid_cap
= gsc_m2m_enum_fmt
,
560 .vidioc_enum_fmt_vid_out
= gsc_m2m_enum_fmt
,
561 .vidioc_g_fmt_vid_cap_mplane
= gsc_m2m_g_fmt_mplane
,
562 .vidioc_g_fmt_vid_out_mplane
= gsc_m2m_g_fmt_mplane
,
563 .vidioc_try_fmt_vid_cap_mplane
= gsc_m2m_try_fmt_mplane
,
564 .vidioc_try_fmt_vid_out_mplane
= gsc_m2m_try_fmt_mplane
,
565 .vidioc_s_fmt_vid_cap_mplane
= gsc_m2m_s_fmt_mplane
,
566 .vidioc_s_fmt_vid_out_mplane
= gsc_m2m_s_fmt_mplane
,
567 .vidioc_reqbufs
= gsc_m2m_reqbufs
,
568 .vidioc_expbuf
= gsc_m2m_expbuf
,
569 .vidioc_querybuf
= gsc_m2m_querybuf
,
570 .vidioc_qbuf
= gsc_m2m_qbuf
,
571 .vidioc_dqbuf
= gsc_m2m_dqbuf
,
572 .vidioc_streamon
= gsc_m2m_streamon
,
573 .vidioc_streamoff
= gsc_m2m_streamoff
,
574 .vidioc_g_selection
= gsc_m2m_g_selection
,
575 .vidioc_s_selection
= gsc_m2m_s_selection
578 static int queue_init(void *priv
, struct vb2_queue
*src_vq
,
579 struct vb2_queue
*dst_vq
)
581 struct gsc_ctx
*ctx
= priv
;
584 memset(src_vq
, 0, sizeof(*src_vq
));
585 src_vq
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
586 src_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
587 src_vq
->drv_priv
= ctx
;
588 src_vq
->ops
= &gsc_m2m_qops
;
589 src_vq
->mem_ops
= &vb2_dma_contig_memops
;
590 src_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
591 src_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
592 src_vq
->lock
= &ctx
->gsc_dev
->lock
;
593 src_vq
->dev
= &ctx
->gsc_dev
->pdev
->dev
;
595 ret
= vb2_queue_init(src_vq
);
599 memset(dst_vq
, 0, sizeof(*dst_vq
));
600 dst_vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
601 dst_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
602 dst_vq
->drv_priv
= ctx
;
603 dst_vq
->ops
= &gsc_m2m_qops
;
604 dst_vq
->mem_ops
= &vb2_dma_contig_memops
;
605 dst_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
606 dst_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
607 dst_vq
->lock
= &ctx
->gsc_dev
->lock
;
608 dst_vq
->dev
= &ctx
->gsc_dev
->pdev
->dev
;
610 return vb2_queue_init(dst_vq
);
613 static int gsc_m2m_open(struct file
*file
)
615 struct gsc_dev
*gsc
= video_drvdata(file
);
616 struct gsc_ctx
*ctx
= NULL
;
619 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current
), gsc
->state
);
621 if (mutex_lock_interruptible(&gsc
->lock
))
624 ctx
= kzalloc(sizeof(*ctx
), GFP_KERNEL
);
630 v4l2_fh_init(&ctx
->fh
, gsc
->m2m
.vfd
);
631 ret
= gsc_ctrls_create(ctx
);
635 /* Use separate control handler per file handle */
636 ctx
->fh
.ctrl_handler
= &ctx
->ctrl_handler
;
637 file
->private_data
= &ctx
->fh
;
638 v4l2_fh_add(&ctx
->fh
);
641 /* Default color format */
642 ctx
->s_frame
.fmt
= get_format(0);
643 ctx
->d_frame
.fmt
= get_format(0);
644 /* Setup the device context for mem2mem mode. */
645 ctx
->state
= GSC_CTX_M2M
;
647 ctx
->in_path
= GSC_DMA
;
648 ctx
->out_path
= GSC_DMA
;
650 ctx
->m2m_ctx
= v4l2_m2m_ctx_init(gsc
->m2m
.m2m_dev
, ctx
, queue_init
);
651 if (IS_ERR(ctx
->m2m_ctx
)) {
652 pr_err("Failed to initialize m2m context");
653 ret
= PTR_ERR(ctx
->m2m_ctx
);
657 if (gsc
->m2m
.refcnt
++ == 0)
658 set_bit(ST_M2M_OPEN
, &gsc
->state
);
660 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx
);
662 mutex_unlock(&gsc
->lock
);
666 gsc_ctrls_delete(ctx
);
667 v4l2_fh_del(&ctx
->fh
);
669 v4l2_fh_exit(&ctx
->fh
);
672 mutex_unlock(&gsc
->lock
);
676 static int gsc_m2m_release(struct file
*file
)
678 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
679 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
681 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
682 task_pid_nr(current
), gsc
->state
, gsc
->m2m
.refcnt
);
684 mutex_lock(&gsc
->lock
);
686 v4l2_m2m_ctx_release(ctx
->m2m_ctx
);
687 gsc_ctrls_delete(ctx
);
688 v4l2_fh_del(&ctx
->fh
);
689 v4l2_fh_exit(&ctx
->fh
);
691 if (--gsc
->m2m
.refcnt
<= 0)
692 clear_bit(ST_M2M_OPEN
, &gsc
->state
);
695 mutex_unlock(&gsc
->lock
);
699 static __poll_t
gsc_m2m_poll(struct file
*file
,
700 struct poll_table_struct
*wait
)
702 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
703 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
706 if (mutex_lock_interruptible(&gsc
->lock
))
709 ret
= v4l2_m2m_poll(file
, ctx
->m2m_ctx
, wait
);
710 mutex_unlock(&gsc
->lock
);
715 static int gsc_m2m_mmap(struct file
*file
, struct vm_area_struct
*vma
)
717 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
718 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
721 if (mutex_lock_interruptible(&gsc
->lock
))
724 ret
= v4l2_m2m_mmap(file
, ctx
->m2m_ctx
, vma
);
725 mutex_unlock(&gsc
->lock
);
730 static const struct v4l2_file_operations gsc_m2m_fops
= {
731 .owner
= THIS_MODULE
,
732 .open
= gsc_m2m_open
,
733 .release
= gsc_m2m_release
,
734 .poll
= gsc_m2m_poll
,
735 .unlocked_ioctl
= video_ioctl2
,
736 .mmap
= gsc_m2m_mmap
,
739 static const struct v4l2_m2m_ops gsc_m2m_ops
= {
740 .device_run
= gsc_m2m_device_run
,
741 .job_abort
= gsc_m2m_job_abort
,
744 int gsc_register_m2m_device(struct gsc_dev
*gsc
)
746 struct platform_device
*pdev
;
754 gsc
->vdev
.fops
= &gsc_m2m_fops
;
755 gsc
->vdev
.ioctl_ops
= &gsc_m2m_ioctl_ops
;
756 gsc
->vdev
.release
= video_device_release_empty
;
757 gsc
->vdev
.lock
= &gsc
->lock
;
758 gsc
->vdev
.vfl_dir
= VFL_DIR_M2M
;
759 gsc
->vdev
.v4l2_dev
= &gsc
->v4l2_dev
;
760 gsc
->vdev
.device_caps
= V4L2_CAP_STREAMING
|
761 V4L2_CAP_VIDEO_M2M_MPLANE
;
762 snprintf(gsc
->vdev
.name
, sizeof(gsc
->vdev
.name
), "%s.%d:m2m",
763 GSC_MODULE_NAME
, gsc
->id
);
765 video_set_drvdata(&gsc
->vdev
, gsc
);
767 gsc
->m2m
.vfd
= &gsc
->vdev
;
768 gsc
->m2m
.m2m_dev
= v4l2_m2m_init(&gsc_m2m_ops
);
769 if (IS_ERR(gsc
->m2m
.m2m_dev
)) {
770 dev_err(&pdev
->dev
, "failed to initialize v4l2-m2m device\n");
771 return PTR_ERR(gsc
->m2m
.m2m_dev
);
774 ret
= video_register_device(&gsc
->vdev
, VFL_TYPE_VIDEO
, -1);
777 "%s(): failed to register video device\n", __func__
);
778 goto err_m2m_release
;
781 pr_debug("gsc m2m driver registered as /dev/video%d", gsc
->vdev
.num
);
785 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
790 void gsc_unregister_m2m_device(struct gsc_dev
*gsc
)
793 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
794 video_unregister_device(&gsc
->vdev
);