2 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
3 * http://www.samsung.com
5 * Samsung EXYNOS5 SoC series G-Scaler driver
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published
9 * by the Free Software Foundation, either version 2 of the License,
10 * or (at your option) any later version.
13 #include <linux/module.h>
14 #include <linux/kernel.h>
15 #include <linux/types.h>
16 #include <linux/errno.h>
17 #include <linux/bug.h>
18 #include <linux/interrupt.h>
19 #include <linux/workqueue.h>
20 #include <linux/device.h>
21 #include <linux/platform_device.h>
22 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/clk.h>
27 #include <media/v4l2-ioctl.h>
31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx
*ctx
)
33 struct gsc_ctx
*curr_ctx
;
34 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
37 curr_ctx
= v4l2_m2m_get_curr_priv(gsc
->m2m
.m2m_dev
);
38 if (!gsc_m2m_pending(gsc
) || (curr_ctx
!= ctx
))
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ
, ctx
);
42 ret
= wait_event_timeout(gsc
->irq_queue
,
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ
, ctx
),
44 GSC_SHUTDOWN_TIMEOUT
);
46 return ret
== 0 ? -ETIMEDOUT
: ret
;
49 static void __gsc_m2m_job_abort(struct gsc_ctx
*ctx
)
53 ret
= gsc_m2m_ctx_stop_req(ctx
);
54 if ((ret
== -ETIMEDOUT
) || (ctx
->state
& GSC_CTX_ABORT
)) {
55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ
| GSC_CTX_ABORT
, ctx
);
56 gsc_m2m_job_finish(ctx
, VB2_BUF_STATE_ERROR
);
60 static int gsc_m2m_start_streaming(struct vb2_queue
*q
, unsigned int count
)
62 struct gsc_ctx
*ctx
= q
->drv_priv
;
65 ret
= pm_runtime_get_sync(&ctx
->gsc_dev
->pdev
->dev
);
66 return ret
> 0 ? 0 : ret
;
69 static void gsc_m2m_stop_streaming(struct vb2_queue
*q
)
71 struct gsc_ctx
*ctx
= q
->drv_priv
;
73 __gsc_m2m_job_abort(ctx
);
75 pm_runtime_put(&ctx
->gsc_dev
->pdev
->dev
);
78 void gsc_m2m_job_finish(struct gsc_ctx
*ctx
, int vb_state
)
80 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
82 if (!ctx
|| !ctx
->m2m_ctx
)
85 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
86 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
88 if (src_vb
&& dst_vb
) {
89 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
90 dst_vb
->timecode
= src_vb
->timecode
;
91 dst_vb
->flags
&= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
94 & V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
96 v4l2_m2m_buf_done(src_vb
, vb_state
);
97 v4l2_m2m_buf_done(dst_vb
, vb_state
);
99 v4l2_m2m_job_finish(ctx
->gsc_dev
->m2m
.m2m_dev
,
104 static void gsc_m2m_job_abort(void *priv
)
106 __gsc_m2m_job_abort((struct gsc_ctx
*)priv
);
109 static int gsc_get_bufs(struct gsc_ctx
*ctx
)
111 struct gsc_frame
*s_frame
, *d_frame
;
112 struct vb2_v4l2_buffer
*src_vb
, *dst_vb
;
115 s_frame
= &ctx
->s_frame
;
116 d_frame
= &ctx
->d_frame
;
118 src_vb
= v4l2_m2m_next_src_buf(ctx
->m2m_ctx
);
119 ret
= gsc_prepare_addr(ctx
, &src_vb
->vb2_buf
, s_frame
, &s_frame
->addr
);
123 dst_vb
= v4l2_m2m_next_dst_buf(ctx
->m2m_ctx
);
124 ret
= gsc_prepare_addr(ctx
, &dst_vb
->vb2_buf
, d_frame
, &d_frame
->addr
);
128 dst_vb
->vb2_buf
.timestamp
= src_vb
->vb2_buf
.timestamp
;
133 static void gsc_m2m_device_run(void *priv
)
135 struct gsc_ctx
*ctx
= priv
;
141 if (WARN(!ctx
, "null hardware context\n"))
145 spin_lock_irqsave(&gsc
->slock
, flags
);
147 set_bit(ST_M2M_PEND
, &gsc
->state
);
149 /* Reconfigure hardware if the context has changed. */
150 if (gsc
->m2m
.ctx
!= ctx
) {
151 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
153 ctx
->state
|= GSC_PARAMS
;
157 is_set
= ctx
->state
& GSC_CTX_STOP_REQ
;
159 ctx
->state
&= ~GSC_CTX_STOP_REQ
;
160 ctx
->state
|= GSC_CTX_ABORT
;
161 wake_up(&gsc
->irq_queue
);
165 ret
= gsc_get_bufs(ctx
);
167 pr_err("Wrong address");
171 gsc_set_prefbuf(gsc
, &ctx
->s_frame
);
172 gsc_hw_set_input_addr(gsc
, &ctx
->s_frame
.addr
, GSC_M2M_BUF_NUM
);
173 gsc_hw_set_output_addr(gsc
, &ctx
->d_frame
.addr
, GSC_M2M_BUF_NUM
);
175 if (ctx
->state
& GSC_PARAMS
) {
176 gsc_hw_set_input_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
177 gsc_hw_set_output_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
178 gsc_hw_set_frm_done_irq_mask(gsc
, false);
179 gsc_hw_set_gsc_irq_enable(gsc
, true);
181 if (gsc_set_scaler_info(ctx
)) {
182 pr_err("Scaler setup error");
186 gsc_hw_set_input_path(ctx
);
187 gsc_hw_set_in_size(ctx
);
188 gsc_hw_set_in_image_format(ctx
);
190 gsc_hw_set_output_path(ctx
);
191 gsc_hw_set_out_size(ctx
);
192 gsc_hw_set_out_image_format(ctx
);
194 gsc_hw_set_prescaler(ctx
);
195 gsc_hw_set_mainscaler(ctx
);
196 gsc_hw_set_rotation(ctx
);
197 gsc_hw_set_global_alpha(ctx
);
200 /* update shadow registers */
201 gsc_hw_set_sfr_update(ctx
);
203 ctx
->state
&= ~GSC_PARAMS
;
204 gsc_hw_enable_control(gsc
, true);
206 spin_unlock_irqrestore(&gsc
->slock
, flags
);
210 ctx
->state
&= ~GSC_PARAMS
;
211 spin_unlock_irqrestore(&gsc
->slock
, flags
);
214 static int gsc_m2m_queue_setup(struct vb2_queue
*vq
,
215 unsigned int *num_buffers
, unsigned int *num_planes
,
216 unsigned int sizes
[], void *allocators
[])
218 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vq
);
219 struct gsc_frame
*frame
;
222 frame
= ctx_get_frame(ctx
, vq
->type
);
224 return PTR_ERR(frame
);
229 *num_planes
= frame
->fmt
->num_planes
;
230 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++) {
231 sizes
[i
] = frame
->payload
[i
];
232 allocators
[i
] = ctx
->gsc_dev
->alloc_ctx
;
237 static int gsc_m2m_buf_prepare(struct vb2_buffer
*vb
)
239 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
240 struct gsc_frame
*frame
;
243 frame
= ctx_get_frame(ctx
, vb
->vb2_queue
->type
);
245 return PTR_ERR(frame
);
247 if (!V4L2_TYPE_IS_OUTPUT(vb
->vb2_queue
->type
)) {
248 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
249 vb2_set_plane_payload(vb
, i
, frame
->payload
[i
]);
255 static void gsc_m2m_buf_queue(struct vb2_buffer
*vb
)
257 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
258 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
260 pr_debug("ctx: %p, ctx->state: 0x%x", ctx
, ctx
->state
);
263 v4l2_m2m_buf_queue(ctx
->m2m_ctx
, vbuf
);
266 static struct vb2_ops gsc_m2m_qops
= {
267 .queue_setup
= gsc_m2m_queue_setup
,
268 .buf_prepare
= gsc_m2m_buf_prepare
,
269 .buf_queue
= gsc_m2m_buf_queue
,
270 .wait_prepare
= vb2_ops_wait_prepare
,
271 .wait_finish
= vb2_ops_wait_finish
,
272 .stop_streaming
= gsc_m2m_stop_streaming
,
273 .start_streaming
= gsc_m2m_start_streaming
,
276 static int gsc_m2m_querycap(struct file
*file
, void *fh
,
277 struct v4l2_capability
*cap
)
279 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
280 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
282 strlcpy(cap
->driver
, gsc
->pdev
->name
, sizeof(cap
->driver
));
283 strlcpy(cap
->card
, gsc
->pdev
->name
, sizeof(cap
->card
));
284 strlcpy(cap
->bus_info
, "platform", sizeof(cap
->bus_info
));
285 cap
->device_caps
= V4L2_CAP_STREAMING
| V4L2_CAP_VIDEO_M2M_MPLANE
|
286 V4L2_CAP_VIDEO_CAPTURE_MPLANE
| V4L2_CAP_VIDEO_OUTPUT_MPLANE
;
288 cap
->capabilities
= cap
->device_caps
| V4L2_CAP_DEVICE_CAPS
;
292 static int gsc_m2m_enum_fmt_mplane(struct file
*file
, void *priv
,
293 struct v4l2_fmtdesc
*f
)
295 return gsc_enum_fmt_mplane(f
);
298 static int gsc_m2m_g_fmt_mplane(struct file
*file
, void *fh
,
299 struct v4l2_format
*f
)
301 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
303 return gsc_g_fmt_mplane(ctx
, f
);
306 static int gsc_m2m_try_fmt_mplane(struct file
*file
, void *fh
,
307 struct v4l2_format
*f
)
309 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
311 return gsc_try_fmt_mplane(ctx
, f
);
314 static int gsc_m2m_s_fmt_mplane(struct file
*file
, void *fh
,
315 struct v4l2_format
*f
)
317 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
318 struct vb2_queue
*vq
;
319 struct gsc_frame
*frame
;
320 struct v4l2_pix_format_mplane
*pix
;
323 ret
= gsc_m2m_try_fmt_mplane(file
, fh
, f
);
327 vq
= v4l2_m2m_get_vq(ctx
->m2m_ctx
, f
->type
);
329 if (vb2_is_streaming(vq
)) {
330 pr_err("queue (%d) busy", f
->type
);
334 if (V4L2_TYPE_IS_OUTPUT(f
->type
))
335 frame
= &ctx
->s_frame
;
337 frame
= &ctx
->d_frame
;
339 pix
= &f
->fmt
.pix_mp
;
340 frame
->fmt
= find_fmt(&pix
->pixelformat
, NULL
, 0);
341 frame
->colorspace
= pix
->colorspace
;
345 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
346 frame
->payload
[i
] = pix
->plane_fmt
[i
].sizeimage
;
348 gsc_set_frame_size(frame
, pix
->width
, pix
->height
);
350 if (f
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
351 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_DST_FMT
, ctx
);
353 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_SRC_FMT
, ctx
);
355 pr_debug("f_w: %d, f_h: %d", frame
->f_width
, frame
->f_height
);
360 static int gsc_m2m_reqbufs(struct file
*file
, void *fh
,
361 struct v4l2_requestbuffers
*reqbufs
)
363 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
364 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
367 max_cnt
= (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) ?
368 gsc
->variant
->in_buf_cnt
: gsc
->variant
->out_buf_cnt
;
369 if (reqbufs
->count
> max_cnt
) {
371 } else if (reqbufs
->count
== 0) {
372 if (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
)
373 gsc_ctx_state_lock_clear(GSC_SRC_FMT
, ctx
);
375 gsc_ctx_state_lock_clear(GSC_DST_FMT
, ctx
);
378 return v4l2_m2m_reqbufs(file
, ctx
->m2m_ctx
, reqbufs
);
381 static int gsc_m2m_expbuf(struct file
*file
, void *fh
,
382 struct v4l2_exportbuffer
*eb
)
384 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
385 return v4l2_m2m_expbuf(file
, ctx
->m2m_ctx
, eb
);
388 static int gsc_m2m_querybuf(struct file
*file
, void *fh
,
389 struct v4l2_buffer
*buf
)
391 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
392 return v4l2_m2m_querybuf(file
, ctx
->m2m_ctx
, buf
);
395 static int gsc_m2m_qbuf(struct file
*file
, void *fh
,
396 struct v4l2_buffer
*buf
)
398 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
399 return v4l2_m2m_qbuf(file
, ctx
->m2m_ctx
, buf
);
402 static int gsc_m2m_dqbuf(struct file
*file
, void *fh
,
403 struct v4l2_buffer
*buf
)
405 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
406 return v4l2_m2m_dqbuf(file
, ctx
->m2m_ctx
, buf
);
409 static int gsc_m2m_streamon(struct file
*file
, void *fh
,
410 enum v4l2_buf_type type
)
412 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
414 /* The source and target color format need to be set */
415 if (V4L2_TYPE_IS_OUTPUT(type
)) {
416 if (!gsc_ctx_state_is_set(GSC_SRC_FMT
, ctx
))
418 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT
, ctx
)) {
422 return v4l2_m2m_streamon(file
, ctx
->m2m_ctx
, type
);
425 static int gsc_m2m_streamoff(struct file
*file
, void *fh
,
426 enum v4l2_buf_type type
)
428 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
429 return v4l2_m2m_streamoff(file
, ctx
->m2m_ctx
, type
);
432 /* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
433 static int is_rectangle_enclosed(struct v4l2_rect
*a
, struct v4l2_rect
*b
)
435 if (a
->left
< b
->left
|| a
->top
< b
->top
)
438 if (a
->left
+ a
->width
> b
->left
+ b
->width
)
441 if (a
->top
+ a
->height
> b
->top
+ b
->height
)
447 static int gsc_m2m_g_selection(struct file
*file
, void *fh
,
448 struct v4l2_selection
*s
)
450 struct gsc_frame
*frame
;
451 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
453 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
) &&
454 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
))
457 frame
= ctx_get_frame(ctx
, s
->type
);
459 return PTR_ERR(frame
);
462 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
463 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
464 case V4L2_SEL_TGT_CROP_BOUNDS
:
465 case V4L2_SEL_TGT_CROP_DEFAULT
:
468 s
->r
.width
= frame
->f_width
;
469 s
->r
.height
= frame
->f_height
;
472 case V4L2_SEL_TGT_COMPOSE
:
473 case V4L2_SEL_TGT_CROP
:
474 s
->r
.left
= frame
->crop
.left
;
475 s
->r
.top
= frame
->crop
.top
;
476 s
->r
.width
= frame
->crop
.width
;
477 s
->r
.height
= frame
->crop
.height
;
484 static int gsc_m2m_s_selection(struct file
*file
, void *fh
,
485 struct v4l2_selection
*s
)
487 struct gsc_frame
*frame
;
488 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
490 struct gsc_variant
*variant
= ctx
->gsc_dev
->variant
;
496 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
) &&
497 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
))
500 ret
= gsc_try_crop(ctx
, &cr
);
504 if (s
->flags
& V4L2_SEL_FLAG_LE
&&
505 !is_rectangle_enclosed(&cr
.c
, &s
->r
))
508 if (s
->flags
& V4L2_SEL_FLAG_GE
&&
509 !is_rectangle_enclosed(&s
->r
, &cr
.c
))
515 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
516 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
517 case V4L2_SEL_TGT_COMPOSE
:
518 frame
= &ctx
->s_frame
;
521 case V4L2_SEL_TGT_CROP_BOUNDS
:
522 case V4L2_SEL_TGT_CROP
:
523 case V4L2_SEL_TGT_CROP_DEFAULT
:
524 frame
= &ctx
->d_frame
;
531 /* Check to see if scaling ratio is within supported range */
532 if (gsc_ctx_state_is_set(GSC_DST_FMT
| GSC_SRC_FMT
, ctx
)) {
533 if (s
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) {
534 ret
= gsc_check_scaler_ratio(variant
, cr
.c
.width
,
535 cr
.c
.height
, ctx
->d_frame
.crop
.width
,
536 ctx
->d_frame
.crop
.height
,
537 ctx
->gsc_ctrls
.rotate
->val
, ctx
->out_path
);
539 ret
= gsc_check_scaler_ratio(variant
,
540 ctx
->s_frame
.crop
.width
,
541 ctx
->s_frame
.crop
.height
, cr
.c
.width
,
542 cr
.c
.height
, ctx
->gsc_ctrls
.rotate
->val
,
547 pr_err("Out of scaler range");
554 gsc_ctx_state_lock_set(GSC_PARAMS
, ctx
);
558 static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops
= {
559 .vidioc_querycap
= gsc_m2m_querycap
,
560 .vidioc_enum_fmt_vid_cap_mplane
= gsc_m2m_enum_fmt_mplane
,
561 .vidioc_enum_fmt_vid_out_mplane
= gsc_m2m_enum_fmt_mplane
,
562 .vidioc_g_fmt_vid_cap_mplane
= gsc_m2m_g_fmt_mplane
,
563 .vidioc_g_fmt_vid_out_mplane
= gsc_m2m_g_fmt_mplane
,
564 .vidioc_try_fmt_vid_cap_mplane
= gsc_m2m_try_fmt_mplane
,
565 .vidioc_try_fmt_vid_out_mplane
= gsc_m2m_try_fmt_mplane
,
566 .vidioc_s_fmt_vid_cap_mplane
= gsc_m2m_s_fmt_mplane
,
567 .vidioc_s_fmt_vid_out_mplane
= gsc_m2m_s_fmt_mplane
,
568 .vidioc_reqbufs
= gsc_m2m_reqbufs
,
569 .vidioc_expbuf
= gsc_m2m_expbuf
,
570 .vidioc_querybuf
= gsc_m2m_querybuf
,
571 .vidioc_qbuf
= gsc_m2m_qbuf
,
572 .vidioc_dqbuf
= gsc_m2m_dqbuf
,
573 .vidioc_streamon
= gsc_m2m_streamon
,
574 .vidioc_streamoff
= gsc_m2m_streamoff
,
575 .vidioc_g_selection
= gsc_m2m_g_selection
,
576 .vidioc_s_selection
= gsc_m2m_s_selection
579 static int queue_init(void *priv
, struct vb2_queue
*src_vq
,
580 struct vb2_queue
*dst_vq
)
582 struct gsc_ctx
*ctx
= priv
;
585 memset(src_vq
, 0, sizeof(*src_vq
));
586 src_vq
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
587 src_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
588 src_vq
->drv_priv
= ctx
;
589 src_vq
->ops
= &gsc_m2m_qops
;
590 src_vq
->mem_ops
= &vb2_dma_contig_memops
;
591 src_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
592 src_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
593 src_vq
->lock
= &ctx
->gsc_dev
->lock
;
595 ret
= vb2_queue_init(src_vq
);
599 memset(dst_vq
, 0, sizeof(*dst_vq
));
600 dst_vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
601 dst_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
| VB2_DMABUF
;
602 dst_vq
->drv_priv
= ctx
;
603 dst_vq
->ops
= &gsc_m2m_qops
;
604 dst_vq
->mem_ops
= &vb2_dma_contig_memops
;
605 dst_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
606 dst_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
607 dst_vq
->lock
= &ctx
->gsc_dev
->lock
;
609 return vb2_queue_init(dst_vq
);
612 static int gsc_m2m_open(struct file
*file
)
614 struct gsc_dev
*gsc
= video_drvdata(file
);
615 struct gsc_ctx
*ctx
= NULL
;
618 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current
), gsc
->state
);
620 if (mutex_lock_interruptible(&gsc
->lock
))
623 ctx
= kzalloc(sizeof(*ctx
), GFP_KERNEL
);
629 v4l2_fh_init(&ctx
->fh
, gsc
->m2m
.vfd
);
630 ret
= gsc_ctrls_create(ctx
);
634 /* Use separate control handler per file handle */
635 ctx
->fh
.ctrl_handler
= &ctx
->ctrl_handler
;
636 file
->private_data
= &ctx
->fh
;
637 v4l2_fh_add(&ctx
->fh
);
640 /* Default color format */
641 ctx
->s_frame
.fmt
= get_format(0);
642 ctx
->d_frame
.fmt
= get_format(0);
643 /* Setup the device context for mem2mem mode. */
644 ctx
->state
= GSC_CTX_M2M
;
646 ctx
->in_path
= GSC_DMA
;
647 ctx
->out_path
= GSC_DMA
;
649 ctx
->m2m_ctx
= v4l2_m2m_ctx_init(gsc
->m2m
.m2m_dev
, ctx
, queue_init
);
650 if (IS_ERR(ctx
->m2m_ctx
)) {
651 pr_err("Failed to initialize m2m context");
652 ret
= PTR_ERR(ctx
->m2m_ctx
);
656 if (gsc
->m2m
.refcnt
++ == 0)
657 set_bit(ST_M2M_OPEN
, &gsc
->state
);
659 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx
);
661 mutex_unlock(&gsc
->lock
);
665 gsc_ctrls_delete(ctx
);
667 v4l2_fh_del(&ctx
->fh
);
668 v4l2_fh_exit(&ctx
->fh
);
671 mutex_unlock(&gsc
->lock
);
675 static int gsc_m2m_release(struct file
*file
)
677 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
678 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
680 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
681 task_pid_nr(current
), gsc
->state
, gsc
->m2m
.refcnt
);
683 mutex_lock(&gsc
->lock
);
685 v4l2_m2m_ctx_release(ctx
->m2m_ctx
);
686 gsc_ctrls_delete(ctx
);
687 v4l2_fh_del(&ctx
->fh
);
688 v4l2_fh_exit(&ctx
->fh
);
690 if (--gsc
->m2m
.refcnt
<= 0)
691 clear_bit(ST_M2M_OPEN
, &gsc
->state
);
694 mutex_unlock(&gsc
->lock
);
698 static unsigned int gsc_m2m_poll(struct file
*file
,
699 struct poll_table_struct
*wait
)
701 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
702 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
705 if (mutex_lock_interruptible(&gsc
->lock
))
708 ret
= v4l2_m2m_poll(file
, ctx
->m2m_ctx
, wait
);
709 mutex_unlock(&gsc
->lock
);
714 static int gsc_m2m_mmap(struct file
*file
, struct vm_area_struct
*vma
)
716 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
717 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
720 if (mutex_lock_interruptible(&gsc
->lock
))
723 ret
= v4l2_m2m_mmap(file
, ctx
->m2m_ctx
, vma
);
724 mutex_unlock(&gsc
->lock
);
729 static const struct v4l2_file_operations gsc_m2m_fops
= {
730 .owner
= THIS_MODULE
,
731 .open
= gsc_m2m_open
,
732 .release
= gsc_m2m_release
,
733 .poll
= gsc_m2m_poll
,
734 .unlocked_ioctl
= video_ioctl2
,
735 .mmap
= gsc_m2m_mmap
,
738 static struct v4l2_m2m_ops gsc_m2m_ops
= {
739 .device_run
= gsc_m2m_device_run
,
740 .job_abort
= gsc_m2m_job_abort
,
743 int gsc_register_m2m_device(struct gsc_dev
*gsc
)
745 struct platform_device
*pdev
;
753 gsc
->vdev
.fops
= &gsc_m2m_fops
;
754 gsc
->vdev
.ioctl_ops
= &gsc_m2m_ioctl_ops
;
755 gsc
->vdev
.release
= video_device_release_empty
;
756 gsc
->vdev
.lock
= &gsc
->lock
;
757 gsc
->vdev
.vfl_dir
= VFL_DIR_M2M
;
758 gsc
->vdev
.v4l2_dev
= &gsc
->v4l2_dev
;
759 snprintf(gsc
->vdev
.name
, sizeof(gsc
->vdev
.name
), "%s.%d:m2m",
760 GSC_MODULE_NAME
, gsc
->id
);
762 video_set_drvdata(&gsc
->vdev
, gsc
);
764 gsc
->m2m
.vfd
= &gsc
->vdev
;
765 gsc
->m2m
.m2m_dev
= v4l2_m2m_init(&gsc_m2m_ops
);
766 if (IS_ERR(gsc
->m2m
.m2m_dev
)) {
767 dev_err(&pdev
->dev
, "failed to initialize v4l2-m2m device\n");
768 ret
= PTR_ERR(gsc
->m2m
.m2m_dev
);
772 ret
= video_register_device(&gsc
->vdev
, VFL_TYPE_GRABBER
, -1);
775 "%s(): failed to register video device\n", __func__
);
779 pr_debug("gsc m2m driver registered as /dev/video%d", gsc
->vdev
.num
);
783 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
785 video_device_release(gsc
->m2m
.vfd
);
790 void gsc_unregister_m2m_device(struct gsc_dev
*gsc
)
793 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);