1 // SPDX-License-Identifier: GPL-2.0
3 * sh-mobile VEU mem2mem driver
5 * Copyright (C) 2012 Renesas Electronics Corporation
6 * Author: Guennadi Liakhovetski, <g.liakhovetski@gmx.de>
7 * Copyright (C) 2008 Magnus Damm
10 #include <linux/err.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 #include <linux/interrupt.h>
16 #include <linux/platform_device.h>
17 #include <linux/pm_runtime.h>
18 #include <linux/slab.h>
19 #include <linux/types.h>
20 #include <linux/videodev2.h>
22 #include <media/v4l2-dev.h>
23 #include <media/v4l2-device.h>
24 #include <media/v4l2-ioctl.h>
25 #include <media/v4l2-mem2mem.h>
26 #include <media/v4l2-image-sizes.h>
27 #include <media/videobuf2-dma-contig.h>
29 #define VEU_STR 0x00 /* start register */
30 #define VEU_SWR 0x10 /* src: line length */
31 #define VEU_SSR 0x14 /* src: image size */
32 #define VEU_SAYR 0x18 /* src: y/rgb plane address */
33 #define VEU_SACR 0x1c /* src: c plane address */
34 #define VEU_BSSR 0x20 /* bundle mode register */
35 #define VEU_EDWR 0x30 /* dst: line length */
36 #define VEU_DAYR 0x34 /* dst: y/rgb plane address */
37 #define VEU_DACR 0x38 /* dst: c plane address */
38 #define VEU_TRCR 0x50 /* transform control */
39 #define VEU_RFCR 0x54 /* resize scale */
40 #define VEU_RFSR 0x58 /* resize clip */
41 #define VEU_ENHR 0x5c /* enhance */
42 #define VEU_FMCR 0x70 /* filter mode */
43 #define VEU_VTCR 0x74 /* lowpass vertical */
44 #define VEU_HTCR 0x78 /* lowpass horizontal */
45 #define VEU_APCR 0x80 /* color match */
46 #define VEU_ECCR 0x84 /* color replace */
47 #define VEU_AFXR 0x90 /* fixed mode */
48 #define VEU_SWPR 0x94 /* swap */
49 #define VEU_EIER 0xa0 /* interrupt mask */
50 #define VEU_EVTR 0xa4 /* interrupt event */
51 #define VEU_STAR 0xb0 /* status */
52 #define VEU_BSRR 0xb4 /* reset */
54 #define VEU_MCR00 0x200 /* color conversion matrix coefficient 00 */
55 #define VEU_MCR01 0x204 /* color conversion matrix coefficient 01 */
56 #define VEU_MCR02 0x208 /* color conversion matrix coefficient 02 */
57 #define VEU_MCR10 0x20c /* color conversion matrix coefficient 10 */
58 #define VEU_MCR11 0x210 /* color conversion matrix coefficient 11 */
59 #define VEU_MCR12 0x214 /* color conversion matrix coefficient 12 */
60 #define VEU_MCR20 0x218 /* color conversion matrix coefficient 20 */
61 #define VEU_MCR21 0x21c /* color conversion matrix coefficient 21 */
62 #define VEU_MCR22 0x220 /* color conversion matrix coefficient 22 */
63 #define VEU_COFFR 0x224 /* color conversion offset */
64 #define VEU_CBR 0x228 /* color conversion clip */
67 * 4092x4092 max size is the normal case. In some cases it can be reduced to
68 * 2048x2048, in other cases it can be 4092x8188 or even 8188x8188.
76 /* 3 buffers of 2048 x 1536 - 3 megapixels @ 16bpp */
77 #define VIDEO_MEM_LIMIT ALIGN(2048 * 1536 * 2 * 3, 1024 * 1024)
79 #define MEM2MEM_DEF_TRANSLEN 1
85 struct sh_veu_dev
*veu_dev
;
89 struct sh_veu_format
{
95 /* video data format */
97 /* Replace with v4l2_rect */
98 struct v4l2_rect frame
;
99 unsigned int bytesperline
;
100 unsigned int offset_y
;
101 unsigned int offset_c
;
102 const struct sh_veu_format
*fmt
;
106 struct v4l2_device v4l2_dev
;
107 struct video_device vdev
;
108 struct v4l2_m2m_dev
*m2m_dev
;
110 struct v4l2_m2m_ctx
*m2m_ctx
;
111 struct sh_veu_vfmt vfmt_out
;
112 struct sh_veu_vfmt vfmt_in
;
113 /* Only single user per direction so far */
114 struct sh_veu_file
*capture
;
115 struct sh_veu_file
*output
;
116 struct mutex fop_lock
;
120 unsigned int xaction
;
124 enum sh_veu_fmt_idx
{
135 #define DEFAULT_IN_WIDTH VGA_WIDTH
136 #define DEFAULT_IN_HEIGHT VGA_HEIGHT
137 #define DEFAULT_IN_FMTIDX SH_VEU_FMT_NV12
138 #define DEFAULT_OUT_WIDTH VGA_WIDTH
139 #define DEFAULT_OUT_HEIGHT VGA_HEIGHT
140 #define DEFAULT_OUT_FMTIDX SH_VEU_FMT_RGB565
143 * Alignment: Y-plane should be 4-byte aligned for NV12 and NV16, and 8-byte
146 static const struct sh_veu_format sh_veu_fmt
[] = {
147 [SH_VEU_FMT_NV12
] = { .ydepth
= 8, .depth
= 12, .fourcc
= V4L2_PIX_FMT_NV12
},
148 [SH_VEU_FMT_NV16
] = { .ydepth
= 8, .depth
= 16, .fourcc
= V4L2_PIX_FMT_NV16
},
149 [SH_VEU_FMT_NV24
] = { .ydepth
= 8, .depth
= 24, .fourcc
= V4L2_PIX_FMT_NV24
},
150 [SH_VEU_FMT_RGB332
] = { .ydepth
= 8, .depth
= 8, .fourcc
= V4L2_PIX_FMT_RGB332
},
151 [SH_VEU_FMT_RGB444
] = { .ydepth
= 16, .depth
= 16, .fourcc
= V4L2_PIX_FMT_RGB444
},
152 [SH_VEU_FMT_RGB565
] = { .ydepth
= 16, .depth
= 16, .fourcc
= V4L2_PIX_FMT_RGB565
},
153 [SH_VEU_FMT_RGB666
] = { .ydepth
= 32, .depth
= 32, .fourcc
= V4L2_PIX_FMT_BGR666
},
154 [SH_VEU_FMT_RGB24
] = { .ydepth
= 24, .depth
= 24, .fourcc
= V4L2_PIX_FMT_RGB24
},
157 #define DEFAULT_IN_VFMT (struct sh_veu_vfmt){ \
159 .width = VGA_WIDTH, \
160 .height = VGA_HEIGHT, \
162 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_IN_FMTIDX].ydepth) >> 3, \
163 .fmt = &sh_veu_fmt[DEFAULT_IN_FMTIDX], \
166 #define DEFAULT_OUT_VFMT (struct sh_veu_vfmt){ \
168 .width = VGA_WIDTH, \
169 .height = VGA_HEIGHT, \
171 .bytesperline = (VGA_WIDTH * sh_veu_fmt[DEFAULT_OUT_FMTIDX].ydepth) >> 3, \
172 .fmt = &sh_veu_fmt[DEFAULT_OUT_FMTIDX], \
176 * TODO: add support for further output formats:
186 static const int sh_veu_fmt_out
[] = {
191 * TODO: add support for further input formats:
198 static const int sh_veu_fmt_in
[] = {
202 static enum v4l2_colorspace
sh_veu_4cc2cspace(u32 fourcc
)
207 case V4L2_PIX_FMT_NV12
:
208 case V4L2_PIX_FMT_NV16
:
209 case V4L2_PIX_FMT_NV24
:
210 return V4L2_COLORSPACE_SMPTE170M
;
211 case V4L2_PIX_FMT_RGB332
:
212 case V4L2_PIX_FMT_RGB444
:
213 case V4L2_PIX_FMT_RGB565
:
214 case V4L2_PIX_FMT_BGR666
:
215 case V4L2_PIX_FMT_RGB24
:
216 return V4L2_COLORSPACE_SRGB
;
220 static u32
sh_veu_reg_read(struct sh_veu_dev
*veu
, unsigned int reg
)
222 return ioread32(veu
->base
+ reg
);
225 static void sh_veu_reg_write(struct sh_veu_dev
*veu
, unsigned int reg
,
228 iowrite32(value
, veu
->base
+ reg
);
231 /* ========== mem2mem callbacks ========== */
233 static void sh_veu_job_abort(void *priv
)
235 struct sh_veu_dev
*veu
= priv
;
237 /* Will cancel the transaction in the next interrupt handler */
238 veu
->aborting
= true;
241 static void sh_veu_process(struct sh_veu_dev
*veu
,
242 struct vb2_buffer
*src_buf
,
243 struct vb2_buffer
*dst_buf
)
245 dma_addr_t addr
= vb2_dma_contig_plane_dma_addr(dst_buf
, 0);
247 sh_veu_reg_write(veu
, VEU_DAYR
, addr
+ veu
->vfmt_out
.offset_y
);
248 sh_veu_reg_write(veu
, VEU_DACR
, veu
->vfmt_out
.offset_c
?
249 addr
+ veu
->vfmt_out
.offset_c
: 0);
250 dev_dbg(veu
->dev
, "%s(): dst base %lx, y: %x, c: %x\n", __func__
,
252 veu
->vfmt_out
.offset_y
, veu
->vfmt_out
.offset_c
);
254 addr
= vb2_dma_contig_plane_dma_addr(src_buf
, 0);
255 sh_veu_reg_write(veu
, VEU_SAYR
, addr
+ veu
->vfmt_in
.offset_y
);
256 sh_veu_reg_write(veu
, VEU_SACR
, veu
->vfmt_in
.offset_c
?
257 addr
+ veu
->vfmt_in
.offset_c
: 0);
258 dev_dbg(veu
->dev
, "%s(): src base %lx, y: %x, c: %x\n", __func__
,
260 veu
->vfmt_in
.offset_y
, veu
->vfmt_in
.offset_c
);
262 sh_veu_reg_write(veu
, VEU_STR
, 1);
264 sh_veu_reg_write(veu
, VEU_EIER
, 1); /* enable interrupt in VEU */
268 * sh_veu_device_run() - prepares and starts the device
270 * This will be called by the framework when it decides to schedule a particular
273 static void sh_veu_device_run(void *priv
)
275 struct sh_veu_dev
*veu
= priv
;
276 struct vb2_v4l2_buffer
*src_buf
, *dst_buf
;
278 src_buf
= v4l2_m2m_next_src_buf(veu
->m2m_ctx
);
279 dst_buf
= v4l2_m2m_next_dst_buf(veu
->m2m_ctx
);
281 if (src_buf
&& dst_buf
)
282 sh_veu_process(veu
, &src_buf
->vb2_buf
, &dst_buf
->vb2_buf
);
285 /* ========== video ioctls ========== */
287 static bool sh_veu_is_streamer(struct sh_veu_dev
*veu
, struct sh_veu_file
*veu_file
,
288 enum v4l2_buf_type type
)
290 return (type
== V4L2_BUF_TYPE_VIDEO_CAPTURE
&&
291 veu_file
== veu
->capture
) ||
292 (type
== V4L2_BUF_TYPE_VIDEO_OUTPUT
&&
293 veu_file
== veu
->output
);
296 static int sh_veu_queue_init(void *priv
, struct vb2_queue
*src_vq
,
297 struct vb2_queue
*dst_vq
);
300 * It is not unusual to have video nodes open()ed multiple times. While some
301 * V4L2 operations are non-intrusive, like querying formats and various
302 * parameters, others, like setting formats, starting and stopping streaming,
303 * queuing and dequeuing buffers, directly affect hardware configuration and /
304 * or execution. This function verifies availability of the requested interface
305 * and, if available, reserves it for the requesting user.
307 static int sh_veu_stream_init(struct sh_veu_dev
*veu
, struct sh_veu_file
*veu_file
,
308 enum v4l2_buf_type type
)
310 struct sh_veu_file
**stream
;
313 case V4L2_BUF_TYPE_VIDEO_CAPTURE
:
314 stream
= &veu
->capture
;
316 case V4L2_BUF_TYPE_VIDEO_OUTPUT
:
317 stream
= &veu
->output
;
323 if (*stream
== veu_file
)
334 static int sh_veu_context_init(struct sh_veu_dev
*veu
)
339 veu
->m2m_ctx
= v4l2_m2m_ctx_init(veu
->m2m_dev
, veu
,
342 return PTR_ERR_OR_ZERO(veu
->m2m_ctx
);
345 static int sh_veu_querycap(struct file
*file
, void *priv
,
346 struct v4l2_capability
*cap
)
348 strscpy(cap
->driver
, "sh-veu", sizeof(cap
->driver
));
349 strscpy(cap
->card
, "sh-mobile VEU", sizeof(cap
->card
));
350 strscpy(cap
->bus_info
, "platform:sh-veu", sizeof(cap
->bus_info
));
354 static int sh_veu_enum_fmt(struct v4l2_fmtdesc
*f
, const int *fmt
, int fmt_num
)
356 if (f
->index
>= fmt_num
)
359 f
->pixelformat
= sh_veu_fmt
[fmt
[f
->index
]].fourcc
;
363 static int sh_veu_enum_fmt_vid_cap(struct file
*file
, void *priv
,
364 struct v4l2_fmtdesc
*f
)
366 return sh_veu_enum_fmt(f
, sh_veu_fmt_out
, ARRAY_SIZE(sh_veu_fmt_out
));
369 static int sh_veu_enum_fmt_vid_out(struct file
*file
, void *priv
,
370 struct v4l2_fmtdesc
*f
)
372 return sh_veu_enum_fmt(f
, sh_veu_fmt_in
, ARRAY_SIZE(sh_veu_fmt_in
));
375 static struct sh_veu_vfmt
*sh_veu_get_vfmt(struct sh_veu_dev
*veu
,
376 enum v4l2_buf_type type
)
379 case V4L2_BUF_TYPE_VIDEO_CAPTURE
:
380 return &veu
->vfmt_out
;
381 case V4L2_BUF_TYPE_VIDEO_OUTPUT
:
382 return &veu
->vfmt_in
;
388 static int sh_veu_g_fmt(struct sh_veu_file
*veu_file
, struct v4l2_format
*f
)
390 struct v4l2_pix_format
*pix
= &f
->fmt
.pix
;
391 struct sh_veu_dev
*veu
= veu_file
->veu_dev
;
392 struct sh_veu_vfmt
*vfmt
;
394 vfmt
= sh_veu_get_vfmt(veu
, f
->type
);
396 pix
->width
= vfmt
->frame
.width
;
397 pix
->height
= vfmt
->frame
.height
;
398 pix
->field
= V4L2_FIELD_NONE
;
399 pix
->pixelformat
= vfmt
->fmt
->fourcc
;
400 pix
->colorspace
= sh_veu_4cc2cspace(pix
->pixelformat
);
401 pix
->bytesperline
= vfmt
->bytesperline
;
402 pix
->sizeimage
= vfmt
->bytesperline
* pix
->height
*
403 vfmt
->fmt
->depth
/ vfmt
->fmt
->ydepth
;
404 dev_dbg(veu
->dev
, "%s(): type: %d, size %u @ %ux%u, fmt %x\n", __func__
,
405 f
->type
, pix
->sizeimage
, pix
->width
, pix
->height
, pix
->pixelformat
);
410 static int sh_veu_g_fmt_vid_out(struct file
*file
, void *priv
,
411 struct v4l2_format
*f
)
413 return sh_veu_g_fmt(priv
, f
);
416 static int sh_veu_g_fmt_vid_cap(struct file
*file
, void *priv
,
417 struct v4l2_format
*f
)
419 return sh_veu_g_fmt(priv
, f
);
422 static int sh_veu_try_fmt(struct v4l2_format
*f
, const struct sh_veu_format
*fmt
)
424 struct v4l2_pix_format
*pix
= &f
->fmt
.pix
;
425 unsigned int y_bytes_used
;
428 * V4L2 specification suggests, that the driver should correct the
429 * format struct if any of the dimensions is unsupported
431 switch (pix
->field
) {
434 pix
->field
= V4L2_FIELD_NONE
;
435 /* fall through: continue handling V4L2_FIELD_NONE */
436 case V4L2_FIELD_NONE
:
440 v4l_bound_align_image(&pix
->width
, MIN_W
, MAX_W
, ALIGN_W
,
441 &pix
->height
, MIN_H
, MAX_H
, 0, 0);
443 y_bytes_used
= (pix
->width
* fmt
->ydepth
) >> 3;
445 if (pix
->bytesperline
< y_bytes_used
)
446 pix
->bytesperline
= y_bytes_used
;
447 pix
->sizeimage
= pix
->height
* pix
->bytesperline
* fmt
->depth
/ fmt
->ydepth
;
449 pix
->pixelformat
= fmt
->fourcc
;
450 pix
->colorspace
= sh_veu_4cc2cspace(pix
->pixelformat
);
452 pr_debug("%s(): type: %d, size %u\n", __func__
, f
->type
, pix
->sizeimage
);
457 static const struct sh_veu_format
*sh_veu_find_fmt(const struct v4l2_format
*f
)
462 pr_debug("%s(%d;%d)\n", __func__
, f
->type
, f
->fmt
.pix
.field
);
465 case V4L2_BUF_TYPE_VIDEO_CAPTURE
:
466 fmt
= sh_veu_fmt_out
;
467 n
= ARRAY_SIZE(sh_veu_fmt_out
);
468 dflt
= DEFAULT_OUT_FMTIDX
;
470 case V4L2_BUF_TYPE_VIDEO_OUTPUT
:
473 n
= ARRAY_SIZE(sh_veu_fmt_in
);
474 dflt
= DEFAULT_IN_FMTIDX
;
478 for (i
= 0; i
< n
; i
++)
479 if (sh_veu_fmt
[fmt
[i
]].fourcc
== f
->fmt
.pix
.pixelformat
)
480 return &sh_veu_fmt
[fmt
[i
]];
482 return &sh_veu_fmt
[dflt
];
485 static int sh_veu_try_fmt_vid_cap(struct file
*file
, void *priv
,
486 struct v4l2_format
*f
)
488 const struct sh_veu_format
*fmt
;
490 fmt
= sh_veu_find_fmt(f
);
492 return sh_veu_try_fmt(f
, fmt
);
495 static int sh_veu_try_fmt_vid_out(struct file
*file
, void *priv
,
496 struct v4l2_format
*f
)
498 const struct sh_veu_format
*fmt
;
500 fmt
= sh_veu_find_fmt(f
);
502 return sh_veu_try_fmt(f
, fmt
);
505 static void sh_veu_colour_offset(struct sh_veu_dev
*veu
, struct sh_veu_vfmt
*vfmt
)
507 /* dst_left and dst_top validity will be verified in CROP / COMPOSE */
508 unsigned int left
= vfmt
->frame
.left
& ~0x03;
509 unsigned int top
= vfmt
->frame
.top
;
510 dma_addr_t offset
= (dma_addr_t
)top
* veu
->vfmt_out
.bytesperline
+
511 (((dma_addr_t
)left
* veu
->vfmt_out
.fmt
->depth
) >> 3);
514 vfmt
->offset_y
= offset
;
516 switch (vfmt
->fmt
->fourcc
) {
517 case V4L2_PIX_FMT_NV12
:
518 case V4L2_PIX_FMT_NV16
:
519 case V4L2_PIX_FMT_NV24
:
520 y_line
= ALIGN(vfmt
->frame
.width
, 16);
521 vfmt
->offset_c
= offset
+ y_line
* vfmt
->frame
.height
;
523 case V4L2_PIX_FMT_RGB332
:
524 case V4L2_PIX_FMT_RGB444
:
525 case V4L2_PIX_FMT_RGB565
:
526 case V4L2_PIX_FMT_BGR666
:
527 case V4L2_PIX_FMT_RGB24
:
535 static int sh_veu_s_fmt(struct sh_veu_file
*veu_file
, struct v4l2_format
*f
)
537 struct v4l2_pix_format
*pix
= &f
->fmt
.pix
;
538 struct sh_veu_dev
*veu
= veu_file
->veu_dev
;
539 struct sh_veu_vfmt
*vfmt
;
540 struct vb2_queue
*vq
;
541 int ret
= sh_veu_context_init(veu
);
545 vq
= v4l2_m2m_get_vq(veu
->m2m_ctx
, f
->type
);
549 if (vb2_is_busy(vq
)) {
550 v4l2_err(&veu_file
->veu_dev
->v4l2_dev
, "%s queue busy\n", __func__
);
554 vfmt
= sh_veu_get_vfmt(veu
, f
->type
);
555 /* called after try_fmt(), hence vfmt != NULL. Implicit BUG_ON() below */
557 vfmt
->fmt
= sh_veu_find_fmt(f
);
558 /* vfmt->fmt != NULL following the same argument as above */
559 vfmt
->frame
.width
= pix
->width
;
560 vfmt
->frame
.height
= pix
->height
;
561 vfmt
->bytesperline
= pix
->bytesperline
;
563 sh_veu_colour_offset(veu
, vfmt
);
566 * We could also verify and require configuration only if any parameters
567 * actually have changed, but it is unlikely, that the user requests the
568 * same configuration several times without closing the device.
570 veu_file
->cfg_needed
= true;
573 "Setting format for type %d, wxh: %dx%d, fmt: %x\n",
574 f
->type
, pix
->width
, pix
->height
, vfmt
->fmt
->fourcc
);
579 static int sh_veu_s_fmt_vid_cap(struct file
*file
, void *priv
,
580 struct v4l2_format
*f
)
582 int ret
= sh_veu_try_fmt_vid_cap(file
, priv
, f
);
586 return sh_veu_s_fmt(priv
, f
);
589 static int sh_veu_s_fmt_vid_out(struct file
*file
, void *priv
,
590 struct v4l2_format
*f
)
592 int ret
= sh_veu_try_fmt_vid_out(file
, priv
, f
);
596 return sh_veu_s_fmt(priv
, f
);
599 static int sh_veu_reqbufs(struct file
*file
, void *priv
,
600 struct v4l2_requestbuffers
*reqbufs
)
602 struct sh_veu_file
*veu_file
= priv
;
603 struct sh_veu_dev
*veu
= veu_file
->veu_dev
;
604 int ret
= sh_veu_context_init(veu
);
608 ret
= sh_veu_stream_init(veu
, veu_file
, reqbufs
->type
);
612 return v4l2_m2m_reqbufs(file
, veu
->m2m_ctx
, reqbufs
);
615 static int sh_veu_querybuf(struct file
*file
, void *priv
,
616 struct v4l2_buffer
*buf
)
618 struct sh_veu_file
*veu_file
= priv
;
620 if (!sh_veu_is_streamer(veu_file
->veu_dev
, veu_file
, buf
->type
))
623 return v4l2_m2m_querybuf(file
, veu_file
->veu_dev
->m2m_ctx
, buf
);
626 static int sh_veu_qbuf(struct file
*file
, void *priv
, struct v4l2_buffer
*buf
)
628 struct sh_veu_file
*veu_file
= priv
;
630 dev_dbg(veu_file
->veu_dev
->dev
, "%s(%d)\n", __func__
, buf
->type
);
631 if (!sh_veu_is_streamer(veu_file
->veu_dev
, veu_file
, buf
->type
))
634 return v4l2_m2m_qbuf(file
, veu_file
->veu_dev
->m2m_ctx
, buf
);
637 static int sh_veu_dqbuf(struct file
*file
, void *priv
, struct v4l2_buffer
*buf
)
639 struct sh_veu_file
*veu_file
= priv
;
641 dev_dbg(veu_file
->veu_dev
->dev
, "%s(%d)\n", __func__
, buf
->type
);
642 if (!sh_veu_is_streamer(veu_file
->veu_dev
, veu_file
, buf
->type
))
645 return v4l2_m2m_dqbuf(file
, veu_file
->veu_dev
->m2m_ctx
, buf
);
648 static void sh_veu_calc_scale(struct sh_veu_dev
*veu
,
649 int size_in
, int size_out
, int crop_out
,
650 u32
*mant
, u32
*frac
, u32
*rep
)
654 /* calculate FRAC and MANT */
655 *rep
= *mant
= *frac
= 0;
657 if (size_in
== size_out
) {
658 if (crop_out
!= size_out
)
659 *mant
= 1; /* needed for cropping */
663 /* VEU2H special upscale */
664 if (veu
->is_2h
&& size_out
> size_in
) {
665 u32 fixpoint
= (4096 * size_in
) / size_out
;
666 *mant
= fixpoint
/ 4096;
667 *frac
= (fixpoint
- (*mant
* 4096)) & ~0x07;
684 fixpoint
= (4096 * (size_in
- 1)) / (size_out
+ 1);
685 *mant
= fixpoint
/ 4096;
686 *frac
= fixpoint
- (*mant
* 4096);
690 * FIXME: do we really have to round down twice in the
694 if (size_out
> size_in
)
695 *frac
-= 8; /* round down if scaling up */
697 *frac
+= 8; /* round up if scaling down */
701 static unsigned long sh_veu_scale_v(struct sh_veu_dev
*veu
,
702 int size_in
, int size_out
, int crop_out
)
704 u32 mant
, frac
, value
, rep
;
706 sh_veu_calc_scale(veu
, size_in
, size_out
, crop_out
, &mant
, &frac
, &rep
);
709 value
= (sh_veu_reg_read(veu
, VEU_RFCR
) & ~0xffff0000) |
710 (((mant
<< 12) | frac
) << 16);
712 sh_veu_reg_write(veu
, VEU_RFCR
, value
);
715 value
= (sh_veu_reg_read(veu
, VEU_RFSR
) & ~0xffff0000) |
716 (((rep
<< 12) | crop_out
) << 16);
718 sh_veu_reg_write(veu
, VEU_RFSR
, value
);
720 return ALIGN((size_in
* crop_out
) / size_out
, 4);
723 static unsigned long sh_veu_scale_h(struct sh_veu_dev
*veu
,
724 int size_in
, int size_out
, int crop_out
)
726 u32 mant
, frac
, value
, rep
;
728 sh_veu_calc_scale(veu
, size_in
, size_out
, crop_out
, &mant
, &frac
, &rep
);
731 value
= (sh_veu_reg_read(veu
, VEU_RFCR
) & ~0xffff) |
734 sh_veu_reg_write(veu
, VEU_RFCR
, value
);
737 value
= (sh_veu_reg_read(veu
, VEU_RFSR
) & ~0xffff) |
738 (rep
<< 12) | crop_out
;
740 sh_veu_reg_write(veu
, VEU_RFSR
, value
);
742 return ALIGN((size_in
* crop_out
) / size_out
, 4);
745 static void sh_veu_configure(struct sh_veu_dev
*veu
)
747 u32 src_width
, src_stride
, src_height
;
748 u32 dst_width
, dst_stride
, dst_height
;
752 sh_veu_reg_write(veu
, VEU_BSRR
, 0x100);
754 src_width
= veu
->vfmt_in
.frame
.width
;
755 src_height
= veu
->vfmt_in
.frame
.height
;
756 src_stride
= ALIGN(veu
->vfmt_in
.frame
.width
, 16);
758 dst_width
= real_w
= veu
->vfmt_out
.frame
.width
;
759 dst_height
= real_h
= veu
->vfmt_out
.frame
.height
;
760 /* Datasheet is unclear - whether it's always number of bytes or not */
761 dst_stride
= veu
->vfmt_out
.bytesperline
;
764 * So far real_w == dst_width && real_h == dst_height, but it wasn't
765 * necessarily the case in the original vidix driver, so, it may change
766 * here in the future too.
768 src_width
= sh_veu_scale_h(veu
, src_width
, real_w
, dst_width
);
769 src_height
= sh_veu_scale_v(veu
, src_height
, real_h
, dst_height
);
771 sh_veu_reg_write(veu
, VEU_SWR
, src_stride
);
772 sh_veu_reg_write(veu
, VEU_SSR
, src_width
| (src_height
<< 16));
773 sh_veu_reg_write(veu
, VEU_BSSR
, 0); /* not using bundle mode */
775 sh_veu_reg_write(veu
, VEU_EDWR
, dst_stride
);
776 sh_veu_reg_write(veu
, VEU_DACR
, 0); /* unused for RGB */
778 sh_veu_reg_write(veu
, VEU_SWPR
, 0x67);
779 sh_veu_reg_write(veu
, VEU_TRCR
, (6 << 16) | (0 << 14) | 2 | 4);
782 sh_veu_reg_write(veu
, VEU_MCR00
, 0x0cc5);
783 sh_veu_reg_write(veu
, VEU_MCR01
, 0x0950);
784 sh_veu_reg_write(veu
, VEU_MCR02
, 0x0000);
786 sh_veu_reg_write(veu
, VEU_MCR10
, 0x397f);
787 sh_veu_reg_write(veu
, VEU_MCR11
, 0x0950);
788 sh_veu_reg_write(veu
, VEU_MCR12
, 0x3ccd);
790 sh_veu_reg_write(veu
, VEU_MCR20
, 0x0000);
791 sh_veu_reg_write(veu
, VEU_MCR21
, 0x0950);
792 sh_veu_reg_write(veu
, VEU_MCR22
, 0x1023);
794 sh_veu_reg_write(veu
, VEU_COFFR
, 0x00800010);
798 static int sh_veu_streamon(struct file
*file
, void *priv
,
799 enum v4l2_buf_type type
)
801 struct sh_veu_file
*veu_file
= priv
;
803 if (!sh_veu_is_streamer(veu_file
->veu_dev
, veu_file
, type
))
806 if (veu_file
->cfg_needed
) {
807 struct sh_veu_dev
*veu
= veu_file
->veu_dev
;
808 veu_file
->cfg_needed
= false;
809 sh_veu_configure(veu_file
->veu_dev
);
811 veu
->aborting
= false;
814 return v4l2_m2m_streamon(file
, veu_file
->veu_dev
->m2m_ctx
, type
);
817 static int sh_veu_streamoff(struct file
*file
, void *priv
,
818 enum v4l2_buf_type type
)
820 struct sh_veu_file
*veu_file
= priv
;
822 if (!sh_veu_is_streamer(veu_file
->veu_dev
, veu_file
, type
))
825 return v4l2_m2m_streamoff(file
, veu_file
->veu_dev
->m2m_ctx
, type
);
828 static const struct v4l2_ioctl_ops sh_veu_ioctl_ops
= {
829 .vidioc_querycap
= sh_veu_querycap
,
831 .vidioc_enum_fmt_vid_cap
= sh_veu_enum_fmt_vid_cap
,
832 .vidioc_g_fmt_vid_cap
= sh_veu_g_fmt_vid_cap
,
833 .vidioc_try_fmt_vid_cap
= sh_veu_try_fmt_vid_cap
,
834 .vidioc_s_fmt_vid_cap
= sh_veu_s_fmt_vid_cap
,
836 .vidioc_enum_fmt_vid_out
= sh_veu_enum_fmt_vid_out
,
837 .vidioc_g_fmt_vid_out
= sh_veu_g_fmt_vid_out
,
838 .vidioc_try_fmt_vid_out
= sh_veu_try_fmt_vid_out
,
839 .vidioc_s_fmt_vid_out
= sh_veu_s_fmt_vid_out
,
841 .vidioc_reqbufs
= sh_veu_reqbufs
,
842 .vidioc_querybuf
= sh_veu_querybuf
,
844 .vidioc_qbuf
= sh_veu_qbuf
,
845 .vidioc_dqbuf
= sh_veu_dqbuf
,
847 .vidioc_streamon
= sh_veu_streamon
,
848 .vidioc_streamoff
= sh_veu_streamoff
,
851 /* ========== Queue operations ========== */
853 static int sh_veu_queue_setup(struct vb2_queue
*vq
,
854 unsigned int *nbuffers
, unsigned int *nplanes
,
855 unsigned int sizes
[], struct device
*alloc_devs
[])
857 struct sh_veu_dev
*veu
= vb2_get_drv_priv(vq
);
858 struct sh_veu_vfmt
*vfmt
= sh_veu_get_vfmt(veu
, vq
->type
);
859 unsigned int count
= *nbuffers
;
860 unsigned int size
= vfmt
->bytesperline
* vfmt
->frame
.height
*
861 vfmt
->fmt
->depth
/ vfmt
->fmt
->ydepth
;
864 *nbuffers
= count
= 2;
866 if (size
* count
> VIDEO_MEM_LIMIT
) {
867 count
= VIDEO_MEM_LIMIT
/ size
;
872 return sizes
[0] < size
? -EINVAL
: 0;
877 dev_dbg(veu
->dev
, "get %d buffer(s) of size %d each.\n", count
, size
);
882 static int sh_veu_buf_prepare(struct vb2_buffer
*vb
)
884 struct sh_veu_dev
*veu
= vb2_get_drv_priv(vb
->vb2_queue
);
885 struct sh_veu_vfmt
*vfmt
;
886 unsigned int sizeimage
;
888 vfmt
= sh_veu_get_vfmt(veu
, vb
->vb2_queue
->type
);
889 sizeimage
= vfmt
->bytesperline
* vfmt
->frame
.height
*
890 vfmt
->fmt
->depth
/ vfmt
->fmt
->ydepth
;
892 if (vb2_plane_size(vb
, 0) < sizeimage
) {
893 dev_dbg(veu
->dev
, "%s data will not fit into plane (%lu < %u)\n",
894 __func__
, vb2_plane_size(vb
, 0), sizeimage
);
898 vb2_set_plane_payload(vb
, 0, sizeimage
);
903 static void sh_veu_buf_queue(struct vb2_buffer
*vb
)
905 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
906 struct sh_veu_dev
*veu
= vb2_get_drv_priv(vb
->vb2_queue
);
907 dev_dbg(veu
->dev
, "%s(%d)\n", __func__
, vb
->type
);
908 v4l2_m2m_buf_queue(veu
->m2m_ctx
, vbuf
);
911 static const struct vb2_ops sh_veu_qops
= {
912 .queue_setup
= sh_veu_queue_setup
,
913 .buf_prepare
= sh_veu_buf_prepare
,
914 .buf_queue
= sh_veu_buf_queue
,
915 .wait_prepare
= vb2_ops_wait_prepare
,
916 .wait_finish
= vb2_ops_wait_finish
,
919 static int sh_veu_queue_init(void *priv
, struct vb2_queue
*src_vq
,
920 struct vb2_queue
*dst_vq
)
922 struct sh_veu_dev
*veu
= priv
;
925 memset(src_vq
, 0, sizeof(*src_vq
));
926 src_vq
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT
;
927 src_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
;
928 src_vq
->drv_priv
= veu
;
929 src_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
930 src_vq
->ops
= &sh_veu_qops
;
931 src_vq
->mem_ops
= &vb2_dma_contig_memops
;
932 src_vq
->lock
= &veu
->fop_lock
;
933 src_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
934 src_vq
->dev
= veu
->v4l2_dev
.dev
;
936 ret
= vb2_queue_init(src_vq
);
940 memset(dst_vq
, 0, sizeof(*dst_vq
));
941 dst_vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE
;
942 dst_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
;
943 dst_vq
->drv_priv
= veu
;
944 dst_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
945 dst_vq
->ops
= &sh_veu_qops
;
946 dst_vq
->mem_ops
= &vb2_dma_contig_memops
;
947 dst_vq
->lock
= &veu
->fop_lock
;
948 dst_vq
->timestamp_flags
= V4L2_BUF_FLAG_TIMESTAMP_COPY
;
949 dst_vq
->dev
= veu
->v4l2_dev
.dev
;
951 return vb2_queue_init(dst_vq
);
954 /* ========== File operations ========== */
956 static int sh_veu_open(struct file
*file
)
958 struct sh_veu_dev
*veu
= video_drvdata(file
);
959 struct sh_veu_file
*veu_file
;
961 veu_file
= kzalloc(sizeof(*veu_file
), GFP_KERNEL
);
965 v4l2_fh_init(&veu_file
->fh
, video_devdata(file
));
966 veu_file
->veu_dev
= veu
;
967 veu_file
->cfg_needed
= true;
969 file
->private_data
= veu_file
;
971 pm_runtime_get_sync(veu
->dev
);
972 v4l2_fh_add(&veu_file
->fh
);
974 dev_dbg(veu
->dev
, "Created instance %p\n", veu_file
);
979 static int sh_veu_release(struct file
*file
)
981 struct sh_veu_dev
*veu
= video_drvdata(file
);
982 struct sh_veu_file
*veu_file
= file
->private_data
;
984 dev_dbg(veu
->dev
, "Releasing instance %p\n", veu_file
);
986 if (veu_file
== veu
->capture
) {
988 vb2_queue_release(v4l2_m2m_get_vq(veu
->m2m_ctx
, V4L2_BUF_TYPE_VIDEO_CAPTURE
));
991 if (veu_file
== veu
->output
) {
993 vb2_queue_release(v4l2_m2m_get_vq(veu
->m2m_ctx
, V4L2_BUF_TYPE_VIDEO_OUTPUT
));
996 if (!veu
->output
&& !veu
->capture
&& veu
->m2m_ctx
) {
997 v4l2_m2m_ctx_release(veu
->m2m_ctx
);
1001 pm_runtime_put(veu
->dev
);
1002 v4l2_fh_del(&veu_file
->fh
);
1003 v4l2_fh_exit(&veu_file
->fh
);
1010 static __poll_t
sh_veu_poll(struct file
*file
,
1011 struct poll_table_struct
*wait
)
1013 struct sh_veu_file
*veu_file
= file
->private_data
;
1015 return v4l2_m2m_poll(file
, veu_file
->veu_dev
->m2m_ctx
, wait
);
1018 static int sh_veu_mmap(struct file
*file
, struct vm_area_struct
*vma
)
1020 struct sh_veu_file
*veu_file
= file
->private_data
;
1022 return v4l2_m2m_mmap(file
, veu_file
->veu_dev
->m2m_ctx
, vma
);
1025 static const struct v4l2_file_operations sh_veu_fops
= {
1026 .owner
= THIS_MODULE
,
1027 .open
= sh_veu_open
,
1028 .release
= sh_veu_release
,
1029 .poll
= sh_veu_poll
,
1030 .unlocked_ioctl
= video_ioctl2
,
1031 .mmap
= sh_veu_mmap
,
1034 static const struct video_device sh_veu_videodev
= {
1036 .fops
= &sh_veu_fops
,
1037 .ioctl_ops
= &sh_veu_ioctl_ops
,
1039 .release
= video_device_release_empty
,
1040 .vfl_dir
= VFL_DIR_M2M
,
1041 .device_caps
= V4L2_CAP_VIDEO_M2M
| V4L2_CAP_STREAMING
,
1044 static const struct v4l2_m2m_ops sh_veu_m2m_ops
= {
1045 .device_run
= sh_veu_device_run
,
1046 .job_abort
= sh_veu_job_abort
,
1049 static irqreturn_t
sh_veu_bh(int irq
, void *dev_id
)
1051 struct sh_veu_dev
*veu
= dev_id
;
1053 if (veu
->xaction
== MEM2MEM_DEF_TRANSLEN
|| veu
->aborting
) {
1054 v4l2_m2m_job_finish(veu
->m2m_dev
, veu
->m2m_ctx
);
1057 sh_veu_device_run(veu
);
1063 static irqreturn_t
sh_veu_isr(int irq
, void *dev_id
)
1065 struct sh_veu_dev
*veu
= dev_id
;
1066 struct vb2_v4l2_buffer
*dst
;
1067 struct vb2_v4l2_buffer
*src
;
1068 u32 status
= sh_veu_reg_read(veu
, VEU_EVTR
);
1070 /* bundle read mode not used */
1074 /* disable interrupt in VEU */
1075 sh_veu_reg_write(veu
, VEU_EIER
, 0);
1076 /* halt operation */
1077 sh_veu_reg_write(veu
, VEU_STR
, 0);
1078 /* ack int, write 0 to clear bits */
1079 sh_veu_reg_write(veu
, VEU_EVTR
, status
& ~1);
1081 /* conversion completed */
1082 dst
= v4l2_m2m_dst_buf_remove(veu
->m2m_ctx
);
1083 src
= v4l2_m2m_src_buf_remove(veu
->m2m_ctx
);
1087 dst
->vb2_buf
.timestamp
= src
->vb2_buf
.timestamp
;
1088 dst
->flags
&= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
1090 src
->flags
& V4L2_BUF_FLAG_TSTAMP_SRC_MASK
;
1091 dst
->timecode
= src
->timecode
;
1093 spin_lock(&veu
->lock
);
1094 v4l2_m2m_buf_done(src
, VB2_BUF_STATE_DONE
);
1095 v4l2_m2m_buf_done(dst
, VB2_BUF_STATE_DONE
);
1096 spin_unlock(&veu
->lock
);
1100 return IRQ_WAKE_THREAD
;
1103 static int sh_veu_probe(struct platform_device
*pdev
)
1105 struct sh_veu_dev
*veu
;
1106 struct resource
*reg_res
;
1107 struct video_device
*vdev
;
1110 reg_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1111 irq
= platform_get_irq(pdev
, 0);
1113 if (!reg_res
|| irq
<= 0) {
1114 dev_err(&pdev
->dev
, "Insufficient VEU platform information.\n");
1118 veu
= devm_kzalloc(&pdev
->dev
, sizeof(*veu
), GFP_KERNEL
);
1122 veu
->is_2h
= resource_size(reg_res
) == 0x22c;
1124 veu
->base
= devm_ioremap_resource(&pdev
->dev
, reg_res
);
1125 if (IS_ERR(veu
->base
))
1126 return PTR_ERR(veu
->base
);
1128 ret
= devm_request_threaded_irq(&pdev
->dev
, irq
, sh_veu_isr
, sh_veu_bh
,
1133 ret
= v4l2_device_register(&pdev
->dev
, &veu
->v4l2_dev
);
1135 dev_err(&pdev
->dev
, "Error registering v4l2 device\n");
1141 *vdev
= sh_veu_videodev
;
1142 vdev
->v4l2_dev
= &veu
->v4l2_dev
;
1143 spin_lock_init(&veu
->lock
);
1144 mutex_init(&veu
->fop_lock
);
1145 vdev
->lock
= &veu
->fop_lock
;
1147 video_set_drvdata(vdev
, veu
);
1149 veu
->dev
= &pdev
->dev
;
1150 veu
->vfmt_out
= DEFAULT_OUT_VFMT
;
1151 veu
->vfmt_in
= DEFAULT_IN_VFMT
;
1153 veu
->m2m_dev
= v4l2_m2m_init(&sh_veu_m2m_ops
);
1154 if (IS_ERR(veu
->m2m_dev
)) {
1155 ret
= PTR_ERR(veu
->m2m_dev
);
1156 v4l2_err(&veu
->v4l2_dev
, "Failed to init mem2mem device: %d\n", ret
);
1160 pm_runtime_enable(&pdev
->dev
);
1161 pm_runtime_resume(&pdev
->dev
);
1163 ret
= video_register_device(vdev
, VFL_TYPE_GRABBER
, -1);
1164 pm_runtime_suspend(&pdev
->dev
);
1171 pm_runtime_disable(&pdev
->dev
);
1172 v4l2_m2m_release(veu
->m2m_dev
);
1174 v4l2_device_unregister(&veu
->v4l2_dev
);
1178 static int sh_veu_remove(struct platform_device
*pdev
)
1180 struct v4l2_device
*v4l2_dev
= platform_get_drvdata(pdev
);
1181 struct sh_veu_dev
*veu
= container_of(v4l2_dev
,
1182 struct sh_veu_dev
, v4l2_dev
);
1184 video_unregister_device(&veu
->vdev
);
1185 pm_runtime_disable(&pdev
->dev
);
1186 v4l2_m2m_release(veu
->m2m_dev
);
1187 v4l2_device_unregister(&veu
->v4l2_dev
);
1192 static struct platform_driver __refdata sh_veu_pdrv
= {
1193 .remove
= sh_veu_remove
,
1199 module_platform_driver_probe(sh_veu_pdrv
, sh_veu_probe
);
1201 MODULE_DESCRIPTION("sh-mobile VEU mem2mem driver");
1202 MODULE_AUTHOR("Guennadi Liakhovetski, <g.liakhovetski@gmx.de>");
1203 MODULE_LICENSE("GPL v2");