2 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
3 * http://www.samsung.com
5 * Samsung EXYNOS5 SoC series G-Scaler driver
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published
9 * by the Free Software Foundation, either version 2 of the License,
10 * or (at your option) any later version.
13 #include <linux/module.h>
14 #include <linux/kernel.h>
15 #include <linux/types.h>
16 #include <linux/errno.h>
17 #include <linux/bug.h>
18 #include <linux/interrupt.h>
19 #include <linux/workqueue.h>
20 #include <linux/device.h>
21 #include <linux/platform_device.h>
22 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/clk.h>
27 #include <media/v4l2-ioctl.h>
31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx
*ctx
)
33 struct gsc_ctx
*curr_ctx
;
34 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
37 curr_ctx
= v4l2_m2m_get_curr_priv(gsc
->m2m
.m2m_dev
);
38 if (!gsc_m2m_pending(gsc
) || (curr_ctx
!= ctx
))
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ
, ctx
);
42 ret
= wait_event_timeout(gsc
->irq_queue
,
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ
, ctx
),
44 GSC_SHUTDOWN_TIMEOUT
);
46 return ret
== 0 ? -ETIMEDOUT
: ret
;
49 static void __gsc_m2m_job_abort(struct gsc_ctx
*ctx
)
53 ret
= gsc_m2m_ctx_stop_req(ctx
);
54 if ((ret
== -ETIMEDOUT
) || (ctx
->state
& GSC_CTX_ABORT
)) {
55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ
| GSC_CTX_ABORT
, ctx
);
56 gsc_m2m_job_finish(ctx
, VB2_BUF_STATE_ERROR
);
60 static int gsc_m2m_start_streaming(struct vb2_queue
*q
, unsigned int count
)
62 struct gsc_ctx
*ctx
= q
->drv_priv
;
65 ret
= pm_runtime_get_sync(&ctx
->gsc_dev
->pdev
->dev
);
66 return ret
> 0 ? 0 : ret
;
69 static void __gsc_m2m_cleanup_queue(struct gsc_ctx
*ctx
)
71 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
73 while (v4l2_m2m_num_src_bufs_ready(ctx
->m2m_ctx
) > 0) {
74 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
75 v4l2_m2m_buf_done(src_vb
, VB2_BUF_STATE_ERROR
);
78 while (v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
) > 0) {
79 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
80 v4l2_m2m_buf_done(dst_vb
, VB2_BUF_STATE_ERROR
);
84 static void gsc_m2m_stop_streaming(struct vb2_queue
*q
)
86 struct gsc_ctx
*ctx
= q
->drv_priv
;
88 __gsc_m2m_job_abort(ctx
);
90 __gsc_m2m_cleanup_queue(ctx
);
92 pm_runtime_put(&ctx
->gsc_dev
->pdev
->dev
);
95 void gsc_m2m_job_finish(struct gsc_ctx
*ctx
, int vb_state
)
97 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
99 if (!ctx
|| !ctx
->m2m_ctx
)
102 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
103 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
105 if (src_vb
&& dst_vb
) {
106 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
107 dst_vb
->timecode
= src_vb
->timecode
;
108 dst_vb
->flags
&= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
111 & V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
113 v4l2_m2m_buf_done(src_vb
, vb_state
);
114 v4l2_m2m_buf_done(dst_vb
, vb_state
);
116 v4l2_m2m_job_finish(ctx
->gsc_dev
->m2m
.m2m_dev
,
121 static void gsc_m2m_job_abort(void *priv
)
123 __gsc_m2m_job_abort((struct gsc_ctx
*)priv
);
126 static int gsc_get_bufs(struct gsc_ctx
*ctx
)
128 struct gsc_frame
*s_frame
, *d_frame
;
129 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
132 s_frame
= &ctx
->s_frame
;
133 d_frame
= &ctx
->d_frame
;
135 src_vb
= v4l2_m2m_next_src_buf(ctx
->m2m_ctx
);
136 ret
= gsc_prepare_addr(ctx
, &src_vb
->vb2_buf
, s_frame
, &s_frame
->addr
);
140 dst_vb
= v4l2_m2m_next_dst_buf(ctx
->m2m_ctx
);
141 ret
= gsc_prepare_addr(ctx
, &dst_vb
->vb2_buf
, d_frame
, &d_frame
->addr
);
145 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
150 static void gsc_m2m_device_run(void *priv
)
152 struct gsc_ctx
*ctx
= priv
;
158 if (WARN(!ctx
, "null hardware context\n"))
162 spin_lock_irqsave(&gsc
->slock
, flags
);
164 set_bit(ST_M2M_PEND
, &gsc
->state
);
166 /* Reconfigure hardware if the context has changed. */
167 if (gsc
->m2m
.ctx
!= ctx
) {
168 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
170 ctx
->state
|= GSC_PARAMS
;
174 is_set
= ctx
->state
& GSC_CTX_STOP_REQ
;
176 ctx
->state
&= ~GSC_CTX_STOP_REQ
;
177 ctx
->state
|= GSC_CTX_ABORT
;
178 wake_up(&gsc
->irq_queue
);
182 ret
= gsc_get_bufs(ctx
);
184 pr_err("Wrong address");
188 gsc_set_prefbuf(gsc
, &ctx
->s_frame
);
189 gsc_hw_set_input_addr(gsc
, &ctx
->s_frame
.addr
, GSC_M2M_BUF_NUM
);
190 gsc_hw_set_output_addr(gsc
, &ctx
->d_frame
.addr
, GSC_M2M_BUF_NUM
);
192 if (ctx
->state
& GSC_PARAMS
) {
193 gsc_hw_set_input_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
194 gsc_hw_set_output_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
195 gsc_hw_set_frm_done_irq_mask(gsc
, false);
196 gsc_hw_set_gsc_irq_enable(gsc
, true);
198 if (gsc_set_scaler_info(ctx
)) {
199 pr_err("Scaler setup error");
203 gsc_hw_set_input_path(ctx
);
204 gsc_hw_set_in_size(ctx
);
205 gsc_hw_set_in_image_format(ctx
);
207 gsc_hw_set_output_path(ctx
);
208 gsc_hw_set_out_size(ctx
);
209 gsc_hw_set_out_image_format(ctx
);
211 gsc_hw_set_prescaler(ctx
);
212 gsc_hw_set_mainscaler(ctx
);
213 gsc_hw_set_rotation(ctx
);
214 gsc_hw_set_global_alpha(ctx
);
217 /* update shadow registers */
218 gsc_hw_set_sfr_update(ctx
);
220 ctx
->state
&= ~GSC_PARAMS
;
221 gsc_hw_enable_control(gsc
, true);
223 spin_unlock_irqrestore(&gsc
->slock
, flags
);
227 ctx
->state
&= ~GSC_PARAMS
;
228 spin_unlock_irqrestore(&gsc
->slock
, flags
);
231 static int gsc_m2m_queue_setup(struct vb2_queue
*vq
,
232 unsigned int *num_buffers
, unsigned int *num_planes
,
233 unsigned int sizes
[], struct device
*alloc_devs
[])
235 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vq
);
236 struct gsc_frame
*frame
;
239 frame
= ctx_get_frame(ctx
, vq
->type
);
241 return PTR_ERR(frame
);
246 *num_planes
= frame
->fmt
->num_planes
;
247 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
248 sizes
[i
] = frame
->payload
[i
];
252 static int gsc_m2m_buf_prepare(struct vb2_buffer
*vb
)
254 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
255 struct gsc_frame
*frame
;
258 frame
= ctx_get_frame(ctx
, vb
->vb2_queue
->type
);
260 return PTR_ERR(frame
);
262 if (!V4L2_TYPE_IS_OUTPUT(vb
->vb2_queue
->type
)) {
263 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
264 vb2_set_plane_payload(vb
, i
, frame
->payload
[i
]);
270 static void gsc_m2m_buf_queue(struct vb2_buffer
*vb
)
272 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
273 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
275 pr_debug("ctx: %p, ctx->state: 0x%x", ctx
, ctx
->state
);
278 v4l2_m2m_buf_queue(ctx
->m2m_ctx
, vbuf
);
281 static const struct vb2_ops gsc_m2m_qops
= {
282 .queue_setup
= gsc_m2m_queue_setup
,
283 .buf_prepare
= gsc_m2m_buf_prepare
,
284 .buf_queue
= gsc_m2m_buf_queue
,
285 .wait_prepare
= vb2_ops_wait_prepare
,
286 .wait_finish
= vb2_ops_wait_finish
,
287 .stop_streaming
= gsc_m2m_stop_streaming
,
288 .start_streaming
= gsc_m2m_start_streaming
,
291 static int gsc_m2m_querycap(struct file
*file
, void *fh
,
292 struct v4l2_capability
*cap
)
294 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
295 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
297 strlcpy(cap
->driver
, GSC_MODULE_NAME
, sizeof(cap
->driver
));
298 strlcpy(cap
->card
, GSC_MODULE_NAME
" gscaler", sizeof(cap
->card
));
299 snprintf(cap
->bus_info
, sizeof(cap
->bus_info
), "platform:%s",
300 dev_name(&gsc
->pdev
->dev
));
301 cap
->device_caps
= V4L2_CAP_STREAMING
| V4L2_CAP_VIDEO_M2M_MPLANE
;
302 cap
->capabilities
= cap
->device_caps
| V4L2_CAP_DEVICE_CAPS
;
306 static int gsc_m2m_enum_fmt_mplane(struct file
*file
, void *priv
,
307 struct v4l2_fmtdesc
*f
)
309 return gsc_enum_fmt_mplane(f
);
312 static int gsc_m2m_g_fmt_mplane(struct file
*file
, void *fh
,
313 struct v4l2_format
*f
)
315 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
317 return gsc_g_fmt_mplane(ctx
, f
);
320 static int gsc_m2m_try_fmt_mplane(struct file
*file
, void *fh
,
321 struct v4l2_format
*f
)
323 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
325 return gsc_try_fmt_mplane(ctx
, f
);
328 static int gsc_m2m_s_fmt_mplane(struct file
*file
, void *fh
,
329 struct v4l2_format
*f
)
331 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
332 struct vb2_queue
*vq
;
333 struct gsc_frame
*frame
;
334 struct v4l2_pix_format_mplane
*pix
;
337 ret
= gsc_m2m_try_fmt_mplane(file
, fh
, f
);
341 vq
= v4l2_m2m_get_vq(ctx
->m2m_ctx
, f
->type
);
343 if (vb2_is_streaming(vq
)) {
344 pr_err("queue (%d) busy", f
->type
);
348 if (V4L2_TYPE_IS_OUTPUT(f
->type
))
349 frame
= &ctx
->s_frame
;
351 frame
= &ctx
->d_frame
;
353 pix
= &f
->fmt
.pix_mp
;
354 frame
->fmt
= find_fmt(&pix
->pixelformat
, NULL
, 0);
355 frame
->colorspace
= pix
->colorspace
;
359 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
360 frame
->payload
[i
] = pix
->plane_fmt
[i
].sizeimage
;
362 gsc_set_frame_size(frame
, pix
->width
, pix
->height
);
364 if (f
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
365 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_DST_FMT
, ctx
);
367 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_SRC_FMT
, ctx
);
369 pr_debug("f_w: %d, f_h: %d", frame
->f_width
, frame
->f_height
);
374 static int gsc_m2m_reqbufs(struct file
*file
, void *fh
,
375 struct v4l2_requestbuffers
*reqbufs
)
377 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
378 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
381 max_cnt
= (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) ?
382 gsc
->variant
->in_buf_cnt
: gsc
->variant
->out_buf_cnt
;
383 if (reqbufs
->count
> max_cnt
)
386 return v4l2_m2m_reqbufs(file
, ctx
->m2m_ctx
, reqbufs
);
389 static int gsc_m2m_expbuf(struct file
*file
, void *fh
,
390 struct v4l2_exportbuffer
*eb
)
392 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
393 return v4l2_m2m_expbuf(file
, ctx
->m2m_ctx
, eb
);
396 static int gsc_m2m_querybuf(struct file
*file
, void *fh
,
397 struct v4l2_buffer
*buf
)
399 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
400 return v4l2_m2m_querybuf(file
, ctx
->m2m_ctx
, buf
);
403 static int gsc_m2m_qbuf(struct file
*file
, void *fh
,
404 struct v4l2_buffer
*buf
)
406 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
407 return v4l2_m2m_qbuf(file
, ctx
->m2m_ctx
, buf
);
410 static int gsc_m2m_dqbuf(struct file
*file
, void *fh
,
411 struct v4l2_buffer
*buf
)
413 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
414 return v4l2_m2m_dqbuf(file
, ctx
->m2m_ctx
, buf
);
417 static int gsc_m2m_streamon(struct file
*file
, void *fh
,
418 enum v4l2_buf_type type
)
420 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
422 /* The source and target color format need to be set */
423 if (V4L2_TYPE_IS_OUTPUT(type
)) {
424 if (!gsc_ctx_state_is_set(GSC_SRC_FMT
, ctx
))
426 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT
, ctx
)) {
430 return v4l2_m2m_streamon(file
, ctx
->m2m_ctx
, type
);
433 static int gsc_m2m_streamoff(struct file
*file
, void *fh
,
434 enum v4l2_buf_type type
)
436 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
437 return v4l2_m2m_streamoff(file
, ctx
->m2m_ctx
, type
);
440 /* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
441 static int is_rectangle_enclosed(struct v4l2_rect
*a
, struct v4l2_rect
*b
)
443 if (a
->left
< b
->left
|| a
->top
< b
->top
)
446 if (a
->left
+ a
->width
> b
->left
+ b
->width
)
449 if (a
->top
+ a
->height
> b
->top
+ b
->height
)
455 static int gsc_m2m_g_selection(struct file
*file
, void *fh
,
456 struct v4l2_selection
*s
)
458 struct gsc_frame
*frame
;
459 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
461 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
) &&
462 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
))
465 frame
= ctx_get_frame(ctx
, s
->type
);
467 return PTR_ERR(frame
);
470 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
471 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
472 case V4L2_SEL_TGT_CROP_BOUNDS
:
473 case V4L2_SEL_TGT_CROP_DEFAULT
:
476 s
->r
.width
= frame
->f_width
;
477 s
->r
.height
= frame
->f_height
;
480 case V4L2_SEL_TGT_COMPOSE
:
481 case V4L2_SEL_TGT_CROP
:
482 s
->r
.left
= frame
->crop
.left
;
483 s
->r
.top
= frame
->crop
.top
;
484 s
->r
.width
= frame
->crop
.width
;
485 s
->r
.height
= frame
->crop
.height
;
492 static int gsc_m2m_s_selection(struct file
*file
, void *fh
,
493 struct v4l2_selection
*s
)
495 struct gsc_frame
*frame
;
496 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
498 struct gsc_variant
*variant
= ctx
->gsc_dev
->variant
;
504 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE
) &&
505 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT
))
508 ret
= gsc_try_crop(ctx
, &cr
);
512 if (s
->flags
& V4L2_SEL_FLAG_LE
&&
513 !is_rectangle_enclosed(&cr
.c
, &s
->r
))
516 if (s
->flags
& V4L2_SEL_FLAG_GE
&&
517 !is_rectangle_enclosed(&s
->r
, &cr
.c
))
523 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
524 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
525 case V4L2_SEL_TGT_COMPOSE
:
526 frame
= &ctx
->s_frame
;
529 case V4L2_SEL_TGT_CROP_BOUNDS
:
530 case V4L2_SEL_TGT_CROP
:
531 case V4L2_SEL_TGT_CROP_DEFAULT
:
532 frame
= &ctx
->d_frame
;
539 /* Check to see if scaling ratio is within supported range */
540 if (gsc_ctx_state_is_set(GSC_DST_FMT
| GSC_SRC_FMT
, ctx
)) {
541 if (s
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) {
542 ret
= gsc_check_scaler_ratio(variant
, cr
.c
.width
,
543 cr
.c
.height
, ctx
->d_frame
.crop
.width
,
544 ctx
->d_frame
.crop
.height
,
545 ctx
->gsc_ctrls
.rotate
->val
, ctx
->out_path
);
547 ret
= gsc_check_scaler_ratio(variant
,
548 ctx
->s_frame
.crop
.width
,
549 ctx
->s_frame
.crop
.height
, cr
.c
.width
,
550 cr
.c
.height
, ctx
->gsc_ctrls
.rotate
->val
,
555 pr_err("Out of scaler range");
562 gsc_ctx_state_lock_set(GSC_PARAMS
, ctx
);
566 static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops
= {
567 .vidioc_querycap
= gsc_m2m_querycap
,
568 .vidioc_enum_fmt_vid_cap_mplane
= gsc_m2m_enum_fmt_mplane
,
569 .vidioc_enum_fmt_vid_out_mplane
= gsc_m2m_enum_fmt_mplane
,
570 .vidioc_g_fmt_vid_cap_mplane
= gsc_m2m_g_fmt_mplane
,
571 .vidioc_g_fmt_vid_out_mplane
= gsc_m2m_g_fmt_mplane
,
572 .vidioc_try_fmt_vid_cap_mplane
= gsc_m2m_try_fmt_mplane
,
573 .vidioc_try_fmt_vid_out_mplane
= gsc_m2m_try_fmt_mplane
,
574 .vidioc_s_fmt_vid_cap_mplane
= gsc_m2m_s_fmt_mplane
,
575 .vidioc_s_fmt_vid_out_mplane
= gsc_m2m_s_fmt_mplane
,
576 .vidioc_reqbufs
= gsc_m2m_reqbufs
,
577 .vidioc_expbuf
= gsc_m2m_expbuf
,
578 .vidioc_querybuf
= gsc_m2m_querybuf
,
579 .vidioc_qbuf
= gsc_m2m_qbuf
,
580 .vidioc_dqbuf
= gsc_m2m_dqbuf
,
581 .vidioc_streamon
= gsc_m2m_streamon
,
582 .vidioc_streamoff
= gsc_m2m_streamoff
,
583 .vidioc_g_selection
= gsc_m2m_g_selection
,
584 .vidioc_s_selection
= gsc_m2m_s_selection
587 static int queue_init(void *priv
, struct vb2_queue
*src_vq
,
588 struct vb2_queue
*dst_vq
)
590 struct gsc_ctx
*ctx
= priv
;
593 memset(src_vq
, 0, sizeof(*src_vq
));
594 src_vq
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
595 src_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
596 src_vq
->drv_priv
= ctx
;
597 src_vq
->ops
= &gsc_m2m_qops
;
598 src_vq
->mem_ops
= &vb2_dma_contig_memops
;
599 src_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
600 src_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
601 src_vq
->lock
= &ctx
->gsc_dev
->lock
;
602 src_vq
->dev
= &ctx
->gsc_dev
->pdev
->dev
;
604 ret
= vb2_queue_init(src_vq
);
608 memset(dst_vq
, 0, sizeof(*dst_vq
));
609 dst_vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
610 dst_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
611 dst_vq
->drv_priv
= ctx
;
612 dst_vq
->ops
= &gsc_m2m_qops
;
613 dst_vq
->mem_ops
= &vb2_dma_contig_memops
;
614 dst_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
615 dst_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
616 dst_vq
->lock
= &ctx
->gsc_dev
->lock
;
617 dst_vq
->dev
= &ctx
->gsc_dev
->pdev
->dev
;
619 return vb2_queue_init(dst_vq
);
622 static int gsc_m2m_open(struct file
*file
)
624 struct gsc_dev
*gsc
= video_drvdata(file
);
625 struct gsc_ctx
*ctx
= NULL
;
628 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current
), gsc
->state
);
630 if (mutex_lock_interruptible(&gsc
->lock
))
633 ctx
= kzalloc(sizeof(*ctx
), GFP_KERNEL
);
639 v4l2_fh_init(&ctx
->fh
, gsc
->m2m
.vfd
);
640 ret
= gsc_ctrls_create(ctx
);
644 /* Use separate control handler per file handle */
645 ctx
->fh
.ctrl_handler
= &ctx
->ctrl_handler
;
646 file
->private_data
= &ctx
->fh
;
647 v4l2_fh_add(&ctx
->fh
);
650 /* Default color format */
651 ctx
->s_frame
.fmt
= get_format(0);
652 ctx
->d_frame
.fmt
= get_format(0);
653 /* Setup the device context for mem2mem mode. */
654 ctx
->state
= GSC_CTX_M2M
;
656 ctx
->in_path
= GSC_DMA
;
657 ctx
->out_path
= GSC_DMA
;
659 ctx
->m2m_ctx
= v4l2_m2m_ctx_init(gsc
->m2m
.m2m_dev
, ctx
, queue_init
);
660 if (IS_ERR(ctx
->m2m_ctx
)) {
661 pr_err("Failed to initialize m2m context");
662 ret
= PTR_ERR(ctx
->m2m_ctx
);
666 if (gsc
->m2m
.refcnt
++ == 0)
667 set_bit(ST_M2M_OPEN
, &gsc
->state
);
669 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx
);
671 mutex_unlock(&gsc
->lock
);
675 gsc_ctrls_delete(ctx
);
676 v4l2_fh_del(&ctx
->fh
);
678 v4l2_fh_exit(&ctx
->fh
);
681 mutex_unlock(&gsc
->lock
);
685 static int gsc_m2m_release(struct file
*file
)
687 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
688 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
690 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
691 task_pid_nr(current
), gsc
->state
, gsc
->m2m
.refcnt
);
693 mutex_lock(&gsc
->lock
);
695 v4l2_m2m_ctx_release(ctx
->m2m_ctx
);
696 gsc_ctrls_delete(ctx
);
697 v4l2_fh_del(&ctx
->fh
);
698 v4l2_fh_exit(&ctx
->fh
);
700 if (--gsc
->m2m
.refcnt
<= 0)
701 clear_bit(ST_M2M_OPEN
, &gsc
->state
);
704 mutex_unlock(&gsc
->lock
);
708 static __poll_t
gsc_m2m_poll(struct file
*file
,
709 struct poll_table_struct
*wait
)
711 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
712 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
715 if (mutex_lock_interruptible(&gsc
->lock
))
718 ret
= v4l2_m2m_poll(file
, ctx
->m2m_ctx
, wait
);
719 mutex_unlock(&gsc
->lock
);
724 static int gsc_m2m_mmap(struct file
*file
, struct vm_area_struct
*vma
)
726 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
727 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
730 if (mutex_lock_interruptible(&gsc
->lock
))
733 ret
= v4l2_m2m_mmap(file
, ctx
->m2m_ctx
, vma
);
734 mutex_unlock(&gsc
->lock
);
739 static const struct v4l2_file_operations gsc_m2m_fops
= {
740 .owner
= THIS_MODULE
,
741 .open
= gsc_m2m_open
,
742 .release
= gsc_m2m_release
,
743 .poll
= gsc_m2m_poll
,
744 .unlocked_ioctl
= video_ioctl2
,
745 .mmap
= gsc_m2m_mmap
,
748 static const struct v4l2_m2m_ops gsc_m2m_ops
= {
749 .device_run
= gsc_m2m_device_run
,
750 .job_abort
= gsc_m2m_job_abort
,
753 int gsc_register_m2m_device(struct gsc_dev
*gsc
)
755 struct platform_device
*pdev
;
763 gsc
->vdev
.fops
= &gsc_m2m_fops
;
764 gsc
->vdev
.ioctl_ops
= &gsc_m2m_ioctl_ops
;
765 gsc
->vdev
.release
= video_device_release_empty
;
766 gsc
->vdev
.lock
= &gsc
->lock
;
767 gsc
->vdev
.vfl_dir
= VFL_DIR_M2M
;
768 gsc
->vdev
.v4l2_dev
= &gsc
->v4l2_dev
;
769 snprintf(gsc
->vdev
.name
, sizeof(gsc
->vdev
.name
), "%s.%d:m2m",
770 GSC_MODULE_NAME
, gsc
->id
);
772 video_set_drvdata(&gsc
->vdev
, gsc
);
774 gsc
->m2m
.vfd
= &gsc
->vdev
;
775 gsc
->m2m
.m2m_dev
= v4l2_m2m_init(&gsc_m2m_ops
);
776 if (IS_ERR(gsc
->m2m
.m2m_dev
)) {
777 dev_err(&pdev
->dev
, "failed to initialize v4l2-m2m device\n");
778 return PTR_ERR(gsc
->m2m
.m2m_dev
);
781 ret
= video_register_device(&gsc
->vdev
, VFL_TYPE_GRABBER
, -1);
784 "%s(): failed to register video device\n", __func__
);
785 goto err_m2m_release
;
788 pr_debug("gsc m2m driver registered as /dev/video%d", gsc
->vdev
.num
);
792 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
797 void gsc_unregister_m2m_device(struct gsc_dev
*gsc
)
800 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
801 video_unregister_device(&gsc
->vdev
);