perf tools: Don't clone maps from parent when synthesizing forks
[linux/fpc-iii.git] / drivers / media / platform / exynos-gsc / gsc-m2m.c
blobc9d2f6c5311a73fd284fe7d7c32d71ba44c844b8
1 /*
2 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
3 * http://www.samsung.com
5 * Samsung EXYNOS5 SoC series G-Scaler driver
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published
9 * by the Free Software Foundation, either version 2 of the License,
10 * or (at your option) any later version.
13 #include <linux/module.h>
14 #include <linux/kernel.h>
15 #include <linux/types.h>
16 #include <linux/errno.h>
17 #include <linux/bug.h>
18 #include <linux/interrupt.h>
19 #include <linux/workqueue.h>
20 #include <linux/device.h>
21 #include <linux/platform_device.h>
22 #include <linux/list.h>
23 #include <linux/io.h>
24 #include <linux/slab.h>
25 #include <linux/clk.h>
27 #include <media/v4l2-ioctl.h>
29 #include "gsc-core.h"
31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx *ctx)
33 struct gsc_ctx *curr_ctx;
34 struct gsc_dev *gsc = ctx->gsc_dev;
35 int ret;
37 curr_ctx = v4l2_m2m_get_curr_priv(gsc->m2m.m2m_dev);
38 if (!gsc_m2m_pending(gsc) || (curr_ctx != ctx))
39 return 0;
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ, ctx);
42 ret = wait_event_timeout(gsc->irq_queue,
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ, ctx),
44 GSC_SHUTDOWN_TIMEOUT);
46 return ret == 0 ? -ETIMEDOUT : ret;
49 static void __gsc_m2m_job_abort(struct gsc_ctx *ctx)
51 int ret;
53 ret = gsc_m2m_ctx_stop_req(ctx);
54 if ((ret == -ETIMEDOUT) || (ctx->state & GSC_CTX_ABORT)) {
55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ | GSC_CTX_ABORT, ctx);
56 gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
60 static int gsc_m2m_start_streaming(struct vb2_queue *q, unsigned int count)
62 struct gsc_ctx *ctx = q->drv_priv;
63 int ret;
65 ret = pm_runtime_get_sync(&ctx->gsc_dev->pdev->dev);
66 return ret > 0 ? 0 : ret;
69 static void __gsc_m2m_cleanup_queue(struct gsc_ctx *ctx)
71 struct vb2_v4l2_buffer *src_vb, *dst_vb;
73 while (v4l2_m2m_num_src_bufs_ready(ctx->m2m_ctx) > 0) {
74 src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
75 v4l2_m2m_buf_done(src_vb, VB2_BUF_STATE_ERROR);
78 while (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) > 0) {
79 dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
80 v4l2_m2m_buf_done(dst_vb, VB2_BUF_STATE_ERROR);
84 static void gsc_m2m_stop_streaming(struct vb2_queue *q)
86 struct gsc_ctx *ctx = q->drv_priv;
88 __gsc_m2m_job_abort(ctx);
90 __gsc_m2m_cleanup_queue(ctx);
92 pm_runtime_put(&ctx->gsc_dev->pdev->dev);
95 void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state)
97 struct vb2_v4l2_buffer *src_vb, *dst_vb;
99 if (!ctx || !ctx->m2m_ctx)
100 return;
102 src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
103 dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
105 if (src_vb && dst_vb) {
106 dst_vb->vb2_buf.timestamp = src_vb->vb2_buf.timestamp;
107 dst_vb->timecode = src_vb->timecode;
108 dst_vb->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
109 dst_vb->flags |=
110 src_vb->flags
111 & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
113 v4l2_m2m_buf_done(src_vb, vb_state);
114 v4l2_m2m_buf_done(dst_vb, vb_state);
116 v4l2_m2m_job_finish(ctx->gsc_dev->m2m.m2m_dev,
117 ctx->m2m_ctx);
121 static void gsc_m2m_job_abort(void *priv)
123 __gsc_m2m_job_abort((struct gsc_ctx *)priv);
126 static int gsc_get_bufs(struct gsc_ctx *ctx)
128 struct gsc_frame *s_frame, *d_frame;
129 struct vb2_v4l2_buffer *src_vb, *dst_vb;
130 int ret;
132 s_frame = &ctx->s_frame;
133 d_frame = &ctx->d_frame;
135 src_vb = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
136 ret = gsc_prepare_addr(ctx, &src_vb->vb2_buf, s_frame, &s_frame->addr);
137 if (ret)
138 return ret;
140 dst_vb = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
141 ret = gsc_prepare_addr(ctx, &dst_vb->vb2_buf, d_frame, &d_frame->addr);
142 if (ret)
143 return ret;
145 dst_vb->vb2_buf.timestamp = src_vb->vb2_buf.timestamp;
147 return 0;
150 static void gsc_m2m_device_run(void *priv)
152 struct gsc_ctx *ctx = priv;
153 struct gsc_dev *gsc;
154 unsigned long flags;
155 int ret;
156 bool is_set = false;
158 if (WARN(!ctx, "null hardware context\n"))
159 return;
161 gsc = ctx->gsc_dev;
162 spin_lock_irqsave(&gsc->slock, flags);
164 set_bit(ST_M2M_PEND, &gsc->state);
166 /* Reconfigure hardware if the context has changed. */
167 if (gsc->m2m.ctx != ctx) {
168 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
169 gsc->m2m.ctx, ctx);
170 ctx->state |= GSC_PARAMS;
171 gsc->m2m.ctx = ctx;
174 is_set = ctx->state & GSC_CTX_STOP_REQ;
175 if (is_set) {
176 ctx->state &= ~GSC_CTX_STOP_REQ;
177 ctx->state |= GSC_CTX_ABORT;
178 wake_up(&gsc->irq_queue);
179 goto put_device;
182 ret = gsc_get_bufs(ctx);
183 if (ret) {
184 pr_err("Wrong address");
185 goto put_device;
188 gsc_set_prefbuf(gsc, &ctx->s_frame);
189 gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM);
190 gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM);
192 if (ctx->state & GSC_PARAMS) {
193 gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false);
194 gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false);
195 gsc_hw_set_frm_done_irq_mask(gsc, false);
196 gsc_hw_set_gsc_irq_enable(gsc, true);
198 if (gsc_set_scaler_info(ctx)) {
199 pr_err("Scaler setup error");
200 goto put_device;
203 gsc_hw_set_input_path(ctx);
204 gsc_hw_set_in_size(ctx);
205 gsc_hw_set_in_image_format(ctx);
207 gsc_hw_set_output_path(ctx);
208 gsc_hw_set_out_size(ctx);
209 gsc_hw_set_out_image_format(ctx);
211 gsc_hw_set_prescaler(ctx);
212 gsc_hw_set_mainscaler(ctx);
213 gsc_hw_set_rotation(ctx);
214 gsc_hw_set_global_alpha(ctx);
217 /* update shadow registers */
218 gsc_hw_set_sfr_update(ctx);
220 ctx->state &= ~GSC_PARAMS;
221 gsc_hw_enable_control(gsc, true);
223 spin_unlock_irqrestore(&gsc->slock, flags);
224 return;
226 put_device:
227 ctx->state &= ~GSC_PARAMS;
228 spin_unlock_irqrestore(&gsc->slock, flags);
231 static int gsc_m2m_queue_setup(struct vb2_queue *vq,
232 unsigned int *num_buffers, unsigned int *num_planes,
233 unsigned int sizes[], struct device *alloc_devs[])
235 struct gsc_ctx *ctx = vb2_get_drv_priv(vq);
236 struct gsc_frame *frame;
237 int i;
239 frame = ctx_get_frame(ctx, vq->type);
240 if (IS_ERR(frame))
241 return PTR_ERR(frame);
243 if (!frame->fmt)
244 return -EINVAL;
246 *num_planes = frame->fmt->num_planes;
247 for (i = 0; i < frame->fmt->num_planes; i++)
248 sizes[i] = frame->payload[i];
249 return 0;
252 static int gsc_m2m_buf_prepare(struct vb2_buffer *vb)
254 struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
255 struct gsc_frame *frame;
256 int i;
258 frame = ctx_get_frame(ctx, vb->vb2_queue->type);
259 if (IS_ERR(frame))
260 return PTR_ERR(frame);
262 if (!V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type)) {
263 for (i = 0; i < frame->fmt->num_planes; i++)
264 vb2_set_plane_payload(vb, i, frame->payload[i]);
267 return 0;
270 static void gsc_m2m_buf_queue(struct vb2_buffer *vb)
272 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
273 struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
275 pr_debug("ctx: %p, ctx->state: 0x%x", ctx, ctx->state);
277 if (ctx->m2m_ctx)
278 v4l2_m2m_buf_queue(ctx->m2m_ctx, vbuf);
281 static const struct vb2_ops gsc_m2m_qops = {
282 .queue_setup = gsc_m2m_queue_setup,
283 .buf_prepare = gsc_m2m_buf_prepare,
284 .buf_queue = gsc_m2m_buf_queue,
285 .wait_prepare = vb2_ops_wait_prepare,
286 .wait_finish = vb2_ops_wait_finish,
287 .stop_streaming = gsc_m2m_stop_streaming,
288 .start_streaming = gsc_m2m_start_streaming,
291 static int gsc_m2m_querycap(struct file *file, void *fh,
292 struct v4l2_capability *cap)
294 struct gsc_ctx *ctx = fh_to_ctx(fh);
295 struct gsc_dev *gsc = ctx->gsc_dev;
297 strlcpy(cap->driver, GSC_MODULE_NAME, sizeof(cap->driver));
298 strlcpy(cap->card, GSC_MODULE_NAME " gscaler", sizeof(cap->card));
299 snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s",
300 dev_name(&gsc->pdev->dev));
301 cap->device_caps = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE;
302 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
303 return 0;
306 static int gsc_m2m_enum_fmt_mplane(struct file *file, void *priv,
307 struct v4l2_fmtdesc *f)
309 return gsc_enum_fmt_mplane(f);
312 static int gsc_m2m_g_fmt_mplane(struct file *file, void *fh,
313 struct v4l2_format *f)
315 struct gsc_ctx *ctx = fh_to_ctx(fh);
317 return gsc_g_fmt_mplane(ctx, f);
320 static int gsc_m2m_try_fmt_mplane(struct file *file, void *fh,
321 struct v4l2_format *f)
323 struct gsc_ctx *ctx = fh_to_ctx(fh);
325 return gsc_try_fmt_mplane(ctx, f);
328 static int gsc_m2m_s_fmt_mplane(struct file *file, void *fh,
329 struct v4l2_format *f)
331 struct gsc_ctx *ctx = fh_to_ctx(fh);
332 struct vb2_queue *vq;
333 struct gsc_frame *frame;
334 struct v4l2_pix_format_mplane *pix;
335 int i, ret = 0;
337 ret = gsc_m2m_try_fmt_mplane(file, fh, f);
338 if (ret)
339 return ret;
341 vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
343 if (vb2_is_streaming(vq)) {
344 pr_err("queue (%d) busy", f->type);
345 return -EBUSY;
348 if (V4L2_TYPE_IS_OUTPUT(f->type))
349 frame = &ctx->s_frame;
350 else
351 frame = &ctx->d_frame;
353 pix = &f->fmt.pix_mp;
354 frame->fmt = find_fmt(&pix->pixelformat, NULL, 0);
355 frame->colorspace = pix->colorspace;
356 if (!frame->fmt)
357 return -EINVAL;
359 for (i = 0; i < frame->fmt->num_planes; i++)
360 frame->payload[i] = pix->plane_fmt[i].sizeimage;
362 gsc_set_frame_size(frame, pix->width, pix->height);
364 if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
365 gsc_ctx_state_lock_set(GSC_PARAMS | GSC_DST_FMT, ctx);
366 else
367 gsc_ctx_state_lock_set(GSC_PARAMS | GSC_SRC_FMT, ctx);
369 pr_debug("f_w: %d, f_h: %d", frame->f_width, frame->f_height);
371 return 0;
374 static int gsc_m2m_reqbufs(struct file *file, void *fh,
375 struct v4l2_requestbuffers *reqbufs)
377 struct gsc_ctx *ctx = fh_to_ctx(fh);
378 struct gsc_dev *gsc = ctx->gsc_dev;
379 u32 max_cnt;
381 max_cnt = (reqbufs->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) ?
382 gsc->variant->in_buf_cnt : gsc->variant->out_buf_cnt;
383 if (reqbufs->count > max_cnt)
384 return -EINVAL;
386 return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
389 static int gsc_m2m_expbuf(struct file *file, void *fh,
390 struct v4l2_exportbuffer *eb)
392 struct gsc_ctx *ctx = fh_to_ctx(fh);
393 return v4l2_m2m_expbuf(file, ctx->m2m_ctx, eb);
396 static int gsc_m2m_querybuf(struct file *file, void *fh,
397 struct v4l2_buffer *buf)
399 struct gsc_ctx *ctx = fh_to_ctx(fh);
400 return v4l2_m2m_querybuf(file, ctx->m2m_ctx, buf);
403 static int gsc_m2m_qbuf(struct file *file, void *fh,
404 struct v4l2_buffer *buf)
406 struct gsc_ctx *ctx = fh_to_ctx(fh);
407 return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
410 static int gsc_m2m_dqbuf(struct file *file, void *fh,
411 struct v4l2_buffer *buf)
413 struct gsc_ctx *ctx = fh_to_ctx(fh);
414 return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
417 static int gsc_m2m_streamon(struct file *file, void *fh,
418 enum v4l2_buf_type type)
420 struct gsc_ctx *ctx = fh_to_ctx(fh);
422 /* The source and target color format need to be set */
423 if (V4L2_TYPE_IS_OUTPUT(type)) {
424 if (!gsc_ctx_state_is_set(GSC_SRC_FMT, ctx))
425 return -EINVAL;
426 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT, ctx)) {
427 return -EINVAL;
430 return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
433 static int gsc_m2m_streamoff(struct file *file, void *fh,
434 enum v4l2_buf_type type)
436 struct gsc_ctx *ctx = fh_to_ctx(fh);
437 return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
440 /* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
441 static int is_rectangle_enclosed(struct v4l2_rect *a, struct v4l2_rect *b)
443 if (a->left < b->left || a->top < b->top)
444 return 0;
446 if (a->left + a->width > b->left + b->width)
447 return 0;
449 if (a->top + a->height > b->top + b->height)
450 return 0;
452 return 1;
455 static int gsc_m2m_g_selection(struct file *file, void *fh,
456 struct v4l2_selection *s)
458 struct gsc_frame *frame;
459 struct gsc_ctx *ctx = fh_to_ctx(fh);
461 if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
462 (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT))
463 return -EINVAL;
465 frame = ctx_get_frame(ctx, s->type);
466 if (IS_ERR(frame))
467 return PTR_ERR(frame);
469 switch (s->target) {
470 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
471 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
472 case V4L2_SEL_TGT_CROP_BOUNDS:
473 case V4L2_SEL_TGT_CROP_DEFAULT:
474 s->r.left = 0;
475 s->r.top = 0;
476 s->r.width = frame->f_width;
477 s->r.height = frame->f_height;
478 return 0;
480 case V4L2_SEL_TGT_COMPOSE:
481 case V4L2_SEL_TGT_CROP:
482 s->r.left = frame->crop.left;
483 s->r.top = frame->crop.top;
484 s->r.width = frame->crop.width;
485 s->r.height = frame->crop.height;
486 return 0;
489 return -EINVAL;
492 static int gsc_m2m_s_selection(struct file *file, void *fh,
493 struct v4l2_selection *s)
495 struct gsc_frame *frame;
496 struct gsc_ctx *ctx = fh_to_ctx(fh);
497 struct v4l2_crop cr;
498 struct gsc_variant *variant = ctx->gsc_dev->variant;
499 int ret;
501 cr.type = s->type;
502 cr.c = s->r;
504 if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
505 (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT))
506 return -EINVAL;
508 ret = gsc_try_crop(ctx, &cr);
509 if (ret)
510 return ret;
512 if (s->flags & V4L2_SEL_FLAG_LE &&
513 !is_rectangle_enclosed(&cr.c, &s->r))
514 return -ERANGE;
516 if (s->flags & V4L2_SEL_FLAG_GE &&
517 !is_rectangle_enclosed(&s->r, &cr.c))
518 return -ERANGE;
520 s->r = cr.c;
522 switch (s->target) {
523 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
524 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
525 case V4L2_SEL_TGT_COMPOSE:
526 frame = &ctx->s_frame;
527 break;
529 case V4L2_SEL_TGT_CROP_BOUNDS:
530 case V4L2_SEL_TGT_CROP:
531 case V4L2_SEL_TGT_CROP_DEFAULT:
532 frame = &ctx->d_frame;
533 break;
535 default:
536 return -EINVAL;
539 /* Check to see if scaling ratio is within supported range */
540 if (gsc_ctx_state_is_set(GSC_DST_FMT | GSC_SRC_FMT, ctx)) {
541 if (s->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
542 ret = gsc_check_scaler_ratio(variant, cr.c.width,
543 cr.c.height, ctx->d_frame.crop.width,
544 ctx->d_frame.crop.height,
545 ctx->gsc_ctrls.rotate->val, ctx->out_path);
546 } else {
547 ret = gsc_check_scaler_ratio(variant,
548 ctx->s_frame.crop.width,
549 ctx->s_frame.crop.height, cr.c.width,
550 cr.c.height, ctx->gsc_ctrls.rotate->val,
551 ctx->out_path);
554 if (ret) {
555 pr_err("Out of scaler range");
556 return -EINVAL;
560 frame->crop = cr.c;
562 gsc_ctx_state_lock_set(GSC_PARAMS, ctx);
563 return 0;
566 static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops = {
567 .vidioc_querycap = gsc_m2m_querycap,
568 .vidioc_enum_fmt_vid_cap_mplane = gsc_m2m_enum_fmt_mplane,
569 .vidioc_enum_fmt_vid_out_mplane = gsc_m2m_enum_fmt_mplane,
570 .vidioc_g_fmt_vid_cap_mplane = gsc_m2m_g_fmt_mplane,
571 .vidioc_g_fmt_vid_out_mplane = gsc_m2m_g_fmt_mplane,
572 .vidioc_try_fmt_vid_cap_mplane = gsc_m2m_try_fmt_mplane,
573 .vidioc_try_fmt_vid_out_mplane = gsc_m2m_try_fmt_mplane,
574 .vidioc_s_fmt_vid_cap_mplane = gsc_m2m_s_fmt_mplane,
575 .vidioc_s_fmt_vid_out_mplane = gsc_m2m_s_fmt_mplane,
576 .vidioc_reqbufs = gsc_m2m_reqbufs,
577 .vidioc_expbuf = gsc_m2m_expbuf,
578 .vidioc_querybuf = gsc_m2m_querybuf,
579 .vidioc_qbuf = gsc_m2m_qbuf,
580 .vidioc_dqbuf = gsc_m2m_dqbuf,
581 .vidioc_streamon = gsc_m2m_streamon,
582 .vidioc_streamoff = gsc_m2m_streamoff,
583 .vidioc_g_selection = gsc_m2m_g_selection,
584 .vidioc_s_selection = gsc_m2m_s_selection
587 static int queue_init(void *priv, struct vb2_queue *src_vq,
588 struct vb2_queue *dst_vq)
590 struct gsc_ctx *ctx = priv;
591 int ret;
593 memset(src_vq, 0, sizeof(*src_vq));
594 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
595 src_vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
596 src_vq->drv_priv = ctx;
597 src_vq->ops = &gsc_m2m_qops;
598 src_vq->mem_ops = &vb2_dma_contig_memops;
599 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
600 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
601 src_vq->lock = &ctx->gsc_dev->lock;
602 src_vq->dev = &ctx->gsc_dev->pdev->dev;
604 ret = vb2_queue_init(src_vq);
605 if (ret)
606 return ret;
608 memset(dst_vq, 0, sizeof(*dst_vq));
609 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
610 dst_vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
611 dst_vq->drv_priv = ctx;
612 dst_vq->ops = &gsc_m2m_qops;
613 dst_vq->mem_ops = &vb2_dma_contig_memops;
614 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
615 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
616 dst_vq->lock = &ctx->gsc_dev->lock;
617 dst_vq->dev = &ctx->gsc_dev->pdev->dev;
619 return vb2_queue_init(dst_vq);
622 static int gsc_m2m_open(struct file *file)
624 struct gsc_dev *gsc = video_drvdata(file);
625 struct gsc_ctx *ctx = NULL;
626 int ret;
628 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current), gsc->state);
630 if (mutex_lock_interruptible(&gsc->lock))
631 return -ERESTARTSYS;
633 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
634 if (!ctx) {
635 ret = -ENOMEM;
636 goto unlock;
639 v4l2_fh_init(&ctx->fh, gsc->m2m.vfd);
640 ret = gsc_ctrls_create(ctx);
641 if (ret)
642 goto error_fh;
644 /* Use separate control handler per file handle */
645 ctx->fh.ctrl_handler = &ctx->ctrl_handler;
646 file->private_data = &ctx->fh;
647 v4l2_fh_add(&ctx->fh);
649 ctx->gsc_dev = gsc;
650 /* Default color format */
651 ctx->s_frame.fmt = get_format(0);
652 ctx->d_frame.fmt = get_format(0);
653 /* Setup the device context for mem2mem mode. */
654 ctx->state = GSC_CTX_M2M;
655 ctx->flags = 0;
656 ctx->in_path = GSC_DMA;
657 ctx->out_path = GSC_DMA;
659 ctx->m2m_ctx = v4l2_m2m_ctx_init(gsc->m2m.m2m_dev, ctx, queue_init);
660 if (IS_ERR(ctx->m2m_ctx)) {
661 pr_err("Failed to initialize m2m context");
662 ret = PTR_ERR(ctx->m2m_ctx);
663 goto error_ctrls;
666 if (gsc->m2m.refcnt++ == 0)
667 set_bit(ST_M2M_OPEN, &gsc->state);
669 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx);
671 mutex_unlock(&gsc->lock);
672 return 0;
674 error_ctrls:
675 gsc_ctrls_delete(ctx);
676 v4l2_fh_del(&ctx->fh);
677 error_fh:
678 v4l2_fh_exit(&ctx->fh);
679 kfree(ctx);
680 unlock:
681 mutex_unlock(&gsc->lock);
682 return ret;
685 static int gsc_m2m_release(struct file *file)
687 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
688 struct gsc_dev *gsc = ctx->gsc_dev;
690 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
691 task_pid_nr(current), gsc->state, gsc->m2m.refcnt);
693 mutex_lock(&gsc->lock);
695 v4l2_m2m_ctx_release(ctx->m2m_ctx);
696 gsc_ctrls_delete(ctx);
697 v4l2_fh_del(&ctx->fh);
698 v4l2_fh_exit(&ctx->fh);
700 if (--gsc->m2m.refcnt <= 0)
701 clear_bit(ST_M2M_OPEN, &gsc->state);
702 kfree(ctx);
704 mutex_unlock(&gsc->lock);
705 return 0;
708 static __poll_t gsc_m2m_poll(struct file *file,
709 struct poll_table_struct *wait)
711 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
712 struct gsc_dev *gsc = ctx->gsc_dev;
713 __poll_t ret;
715 if (mutex_lock_interruptible(&gsc->lock))
716 return EPOLLERR;
718 ret = v4l2_m2m_poll(file, ctx->m2m_ctx, wait);
719 mutex_unlock(&gsc->lock);
721 return ret;
724 static int gsc_m2m_mmap(struct file *file, struct vm_area_struct *vma)
726 struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
727 struct gsc_dev *gsc = ctx->gsc_dev;
728 int ret;
730 if (mutex_lock_interruptible(&gsc->lock))
731 return -ERESTARTSYS;
733 ret = v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
734 mutex_unlock(&gsc->lock);
736 return ret;
739 static const struct v4l2_file_operations gsc_m2m_fops = {
740 .owner = THIS_MODULE,
741 .open = gsc_m2m_open,
742 .release = gsc_m2m_release,
743 .poll = gsc_m2m_poll,
744 .unlocked_ioctl = video_ioctl2,
745 .mmap = gsc_m2m_mmap,
748 static const struct v4l2_m2m_ops gsc_m2m_ops = {
749 .device_run = gsc_m2m_device_run,
750 .job_abort = gsc_m2m_job_abort,
753 int gsc_register_m2m_device(struct gsc_dev *gsc)
755 struct platform_device *pdev;
756 int ret;
758 if (!gsc)
759 return -ENODEV;
761 pdev = gsc->pdev;
763 gsc->vdev.fops = &gsc_m2m_fops;
764 gsc->vdev.ioctl_ops = &gsc_m2m_ioctl_ops;
765 gsc->vdev.release = video_device_release_empty;
766 gsc->vdev.lock = &gsc->lock;
767 gsc->vdev.vfl_dir = VFL_DIR_M2M;
768 gsc->vdev.v4l2_dev = &gsc->v4l2_dev;
769 snprintf(gsc->vdev.name, sizeof(gsc->vdev.name), "%s.%d:m2m",
770 GSC_MODULE_NAME, gsc->id);
772 video_set_drvdata(&gsc->vdev, gsc);
774 gsc->m2m.vfd = &gsc->vdev;
775 gsc->m2m.m2m_dev = v4l2_m2m_init(&gsc_m2m_ops);
776 if (IS_ERR(gsc->m2m.m2m_dev)) {
777 dev_err(&pdev->dev, "failed to initialize v4l2-m2m device\n");
778 return PTR_ERR(gsc->m2m.m2m_dev);
781 ret = video_register_device(&gsc->vdev, VFL_TYPE_GRABBER, -1);
782 if (ret) {
783 dev_err(&pdev->dev,
784 "%s(): failed to register video device\n", __func__);
785 goto err_m2m_release;
788 pr_debug("gsc m2m driver registered as /dev/video%d", gsc->vdev.num);
789 return 0;
791 err_m2m_release:
792 v4l2_m2m_release(gsc->m2m.m2m_dev);
794 return ret;
797 void gsc_unregister_m2m_device(struct gsc_dev *gsc)
799 if (gsc) {
800 v4l2_m2m_release(gsc->m2m.m2m_dev);
801 video_unregister_device(&gsc->vdev);