blk-mq: always free hctx after request queue is freed
[linux/fpc-iii.git] / drivers / media / platform / qcom / venus / helpers.c
blob5cad601d4c5798c5b5bd6a7f7954dffa1a7cb7f2
1 /*
2 * Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2017 Linaro Ltd.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 and
7 * only version 2 as published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
15 #include <linux/clk.h>
16 #include <linux/iopoll.h>
17 #include <linux/list.h>
18 #include <linux/mutex.h>
19 #include <linux/pm_runtime.h>
20 #include <linux/slab.h>
21 #include <media/videobuf2-dma-sg.h>
22 #include <media/v4l2-mem2mem.h>
23 #include <asm/div64.h>
25 #include "core.h"
26 #include "helpers.h"
27 #include "hfi_helper.h"
28 #include "hfi_venus_io.h"
30 struct intbuf {
31 struct list_head list;
32 u32 type;
33 size_t size;
34 void *va;
35 dma_addr_t da;
36 unsigned long attrs;
39 bool venus_helper_check_codec(struct venus_inst *inst, u32 v4l2_pixfmt)
41 struct venus_core *core = inst->core;
42 u32 session_type = inst->session_type;
43 u32 codec;
45 switch (v4l2_pixfmt) {
46 case V4L2_PIX_FMT_H264:
47 codec = HFI_VIDEO_CODEC_H264;
48 break;
49 case V4L2_PIX_FMT_H263:
50 codec = HFI_VIDEO_CODEC_H263;
51 break;
52 case V4L2_PIX_FMT_MPEG1:
53 codec = HFI_VIDEO_CODEC_MPEG1;
54 break;
55 case V4L2_PIX_FMT_MPEG2:
56 codec = HFI_VIDEO_CODEC_MPEG2;
57 break;
58 case V4L2_PIX_FMT_MPEG4:
59 codec = HFI_VIDEO_CODEC_MPEG4;
60 break;
61 case V4L2_PIX_FMT_VC1_ANNEX_G:
62 case V4L2_PIX_FMT_VC1_ANNEX_L:
63 codec = HFI_VIDEO_CODEC_VC1;
64 break;
65 case V4L2_PIX_FMT_VP8:
66 codec = HFI_VIDEO_CODEC_VP8;
67 break;
68 case V4L2_PIX_FMT_VP9:
69 codec = HFI_VIDEO_CODEC_VP9;
70 break;
71 case V4L2_PIX_FMT_XVID:
72 codec = HFI_VIDEO_CODEC_DIVX;
73 break;
74 case V4L2_PIX_FMT_HEVC:
75 codec = HFI_VIDEO_CODEC_HEVC;
76 break;
77 default:
78 return false;
81 if (session_type == VIDC_SESSION_TYPE_ENC && core->enc_codecs & codec)
82 return true;
84 if (session_type == VIDC_SESSION_TYPE_DEC && core->dec_codecs & codec)
85 return true;
87 return false;
89 EXPORT_SYMBOL_GPL(venus_helper_check_codec);
91 static int venus_helper_queue_dpb_bufs(struct venus_inst *inst)
93 struct intbuf *buf;
94 int ret = 0;
96 list_for_each_entry(buf, &inst->dpbbufs, list) {
97 struct hfi_frame_data fdata;
99 memset(&fdata, 0, sizeof(fdata));
100 fdata.alloc_len = buf->size;
101 fdata.device_addr = buf->da;
102 fdata.buffer_type = buf->type;
104 ret = hfi_session_process_buf(inst, &fdata);
105 if (ret)
106 goto fail;
109 fail:
110 return ret;
113 int venus_helper_free_dpb_bufs(struct venus_inst *inst)
115 struct intbuf *buf, *n;
117 list_for_each_entry_safe(buf, n, &inst->dpbbufs, list) {
118 list_del_init(&buf->list);
119 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
120 buf->attrs);
121 kfree(buf);
124 INIT_LIST_HEAD(&inst->dpbbufs);
126 return 0;
128 EXPORT_SYMBOL_GPL(venus_helper_free_dpb_bufs);
130 int venus_helper_alloc_dpb_bufs(struct venus_inst *inst)
132 struct venus_core *core = inst->core;
133 struct device *dev = core->dev;
134 enum hfi_version ver = core->res->hfi_version;
135 struct hfi_buffer_requirements bufreq;
136 u32 buftype = inst->dpb_buftype;
137 unsigned int dpb_size = 0;
138 struct intbuf *buf;
139 unsigned int i;
140 u32 count;
141 int ret;
143 /* no need to allocate dpb buffers */
144 if (!inst->dpb_fmt)
145 return 0;
147 if (inst->dpb_buftype == HFI_BUFFER_OUTPUT)
148 dpb_size = inst->output_buf_size;
149 else if (inst->dpb_buftype == HFI_BUFFER_OUTPUT2)
150 dpb_size = inst->output2_buf_size;
152 if (!dpb_size)
153 return 0;
155 ret = venus_helper_get_bufreq(inst, buftype, &bufreq);
156 if (ret)
157 return ret;
159 count = HFI_BUFREQ_COUNT_MIN(&bufreq, ver);
161 for (i = 0; i < count; i++) {
162 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
163 if (!buf) {
164 ret = -ENOMEM;
165 goto fail;
168 buf->type = buftype;
169 buf->size = dpb_size;
170 buf->attrs = DMA_ATTR_WRITE_COMBINE |
171 DMA_ATTR_NO_KERNEL_MAPPING;
172 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
173 buf->attrs);
174 if (!buf->va) {
175 kfree(buf);
176 ret = -ENOMEM;
177 goto fail;
180 list_add_tail(&buf->list, &inst->dpbbufs);
183 return 0;
185 fail:
186 venus_helper_free_dpb_bufs(inst);
187 return ret;
189 EXPORT_SYMBOL_GPL(venus_helper_alloc_dpb_bufs);
191 static int intbufs_set_buffer(struct venus_inst *inst, u32 type)
193 struct venus_core *core = inst->core;
194 struct device *dev = core->dev;
195 struct hfi_buffer_requirements bufreq;
196 struct hfi_buffer_desc bd;
197 struct intbuf *buf;
198 unsigned int i;
199 int ret;
201 ret = venus_helper_get_bufreq(inst, type, &bufreq);
202 if (ret)
203 return 0;
205 if (!bufreq.size)
206 return 0;
208 for (i = 0; i < bufreq.count_actual; i++) {
209 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
210 if (!buf) {
211 ret = -ENOMEM;
212 goto fail;
215 buf->type = bufreq.type;
216 buf->size = bufreq.size;
217 buf->attrs = DMA_ATTR_WRITE_COMBINE |
218 DMA_ATTR_NO_KERNEL_MAPPING;
219 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
220 buf->attrs);
221 if (!buf->va) {
222 ret = -ENOMEM;
223 goto fail;
226 memset(&bd, 0, sizeof(bd));
227 bd.buffer_size = buf->size;
228 bd.buffer_type = buf->type;
229 bd.num_buffers = 1;
230 bd.device_addr = buf->da;
232 ret = hfi_session_set_buffers(inst, &bd);
233 if (ret) {
234 dev_err(dev, "set session buffers failed\n");
235 goto dma_free;
238 list_add_tail(&buf->list, &inst->internalbufs);
241 return 0;
243 dma_free:
244 dma_free_attrs(dev, buf->size, buf->va, buf->da, buf->attrs);
245 fail:
246 kfree(buf);
247 return ret;
250 static int intbufs_unset_buffers(struct venus_inst *inst)
252 struct hfi_buffer_desc bd = {0};
253 struct intbuf *buf, *n;
254 int ret = 0;
256 list_for_each_entry_safe(buf, n, &inst->internalbufs, list) {
257 bd.buffer_size = buf->size;
258 bd.buffer_type = buf->type;
259 bd.num_buffers = 1;
260 bd.device_addr = buf->da;
261 bd.response_required = true;
263 ret = hfi_session_unset_buffers(inst, &bd);
265 list_del_init(&buf->list);
266 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
267 buf->attrs);
268 kfree(buf);
271 return ret;
274 static const unsigned int intbuf_types_1xx[] = {
275 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_1XX),
276 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_1XX),
277 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_1XX),
278 HFI_BUFFER_INTERNAL_PERSIST,
279 HFI_BUFFER_INTERNAL_PERSIST_1,
282 static const unsigned int intbuf_types_4xx[] = {
283 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_4XX),
284 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_4XX),
285 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_4XX),
286 HFI_BUFFER_INTERNAL_PERSIST,
287 HFI_BUFFER_INTERNAL_PERSIST_1,
290 static int intbufs_alloc(struct venus_inst *inst)
292 const unsigned int *intbuf;
293 size_t arr_sz, i;
294 int ret;
296 if (IS_V4(inst->core)) {
297 arr_sz = ARRAY_SIZE(intbuf_types_4xx);
298 intbuf = intbuf_types_4xx;
299 } else {
300 arr_sz = ARRAY_SIZE(intbuf_types_1xx);
301 intbuf = intbuf_types_1xx;
304 for (i = 0; i < arr_sz; i++) {
305 ret = intbufs_set_buffer(inst, intbuf[i]);
306 if (ret)
307 goto error;
310 return 0;
312 error:
313 intbufs_unset_buffers(inst);
314 return ret;
317 static int intbufs_free(struct venus_inst *inst)
319 return intbufs_unset_buffers(inst);
322 static u32 load_per_instance(struct venus_inst *inst)
324 u32 mbs;
326 if (!inst || !(inst->state >= INST_INIT && inst->state < INST_STOP))
327 return 0;
329 mbs = (ALIGN(inst->width, 16) / 16) * (ALIGN(inst->height, 16) / 16);
331 return mbs * inst->fps;
334 static u32 load_per_type(struct venus_core *core, u32 session_type)
336 struct venus_inst *inst = NULL;
337 u32 mbs_per_sec = 0;
339 mutex_lock(&core->lock);
340 list_for_each_entry(inst, &core->instances, list) {
341 if (inst->session_type != session_type)
342 continue;
344 mbs_per_sec += load_per_instance(inst);
346 mutex_unlock(&core->lock);
348 return mbs_per_sec;
351 static int load_scale_clocks(struct venus_core *core)
353 const struct freq_tbl *table = core->res->freq_tbl;
354 unsigned int num_rows = core->res->freq_tbl_size;
355 unsigned long freq = table[0].freq;
356 struct clk *clk = core->clks[0];
357 struct device *dev = core->dev;
358 u32 mbs_per_sec;
359 unsigned int i;
360 int ret;
362 mbs_per_sec = load_per_type(core, VIDC_SESSION_TYPE_ENC) +
363 load_per_type(core, VIDC_SESSION_TYPE_DEC);
365 if (mbs_per_sec > core->res->max_load)
366 dev_warn(dev, "HW is overloaded, needed: %d max: %d\n",
367 mbs_per_sec, core->res->max_load);
369 if (!mbs_per_sec && num_rows > 1) {
370 freq = table[num_rows - 1].freq;
371 goto set_freq;
374 for (i = 0; i < num_rows; i++) {
375 if (mbs_per_sec > table[i].load)
376 break;
377 freq = table[i].freq;
380 set_freq:
382 ret = clk_set_rate(clk, freq);
383 if (ret)
384 goto err;
386 ret = clk_set_rate(core->core0_clk, freq);
387 if (ret)
388 goto err;
390 ret = clk_set_rate(core->core1_clk, freq);
391 if (ret)
392 goto err;
394 return 0;
396 err:
397 dev_err(dev, "failed to set clock rate %lu (%d)\n", freq, ret);
398 return ret;
401 static void fill_buffer_desc(const struct venus_buffer *buf,
402 struct hfi_buffer_desc *bd, bool response)
404 memset(bd, 0, sizeof(*bd));
405 bd->buffer_type = HFI_BUFFER_OUTPUT;
406 bd->buffer_size = buf->size;
407 bd->num_buffers = 1;
408 bd->device_addr = buf->dma_addr;
409 bd->response_required = response;
412 static void return_buf_error(struct venus_inst *inst,
413 struct vb2_v4l2_buffer *vbuf)
415 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
417 if (vbuf->vb2_buf.type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
418 v4l2_m2m_src_buf_remove_by_buf(m2m_ctx, vbuf);
419 else
420 v4l2_m2m_dst_buf_remove_by_buf(m2m_ctx, vbuf);
422 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
425 static int
426 session_process_buf(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
428 struct venus_buffer *buf = to_venus_buffer(vbuf);
429 struct vb2_buffer *vb = &vbuf->vb2_buf;
430 unsigned int type = vb->type;
431 struct hfi_frame_data fdata;
432 int ret;
434 memset(&fdata, 0, sizeof(fdata));
435 fdata.alloc_len = buf->size;
436 fdata.device_addr = buf->dma_addr;
437 fdata.timestamp = vb->timestamp;
438 do_div(fdata.timestamp, NSEC_PER_USEC);
439 fdata.flags = 0;
440 fdata.clnt_data = vbuf->vb2_buf.index;
442 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
443 fdata.buffer_type = HFI_BUFFER_INPUT;
444 fdata.filled_len = vb2_get_plane_payload(vb, 0);
445 fdata.offset = vb->planes[0].data_offset;
447 if (vbuf->flags & V4L2_BUF_FLAG_LAST || !fdata.filled_len)
448 fdata.flags |= HFI_BUFFERFLAG_EOS;
449 } else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
450 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
451 fdata.buffer_type = HFI_BUFFER_OUTPUT;
452 else
453 fdata.buffer_type = inst->opb_buftype;
454 fdata.filled_len = 0;
455 fdata.offset = 0;
458 ret = hfi_session_process_buf(inst, &fdata);
459 if (ret)
460 return ret;
462 return 0;
465 static bool is_dynamic_bufmode(struct venus_inst *inst)
467 struct venus_core *core = inst->core;
468 struct venus_caps *caps;
470 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
471 if (!caps)
472 return false;
474 return caps->cap_bufs_mode_dynamic;
477 static int session_unregister_bufs(struct venus_inst *inst)
479 struct venus_buffer *buf, *n;
480 struct hfi_buffer_desc bd;
481 int ret = 0;
483 if (is_dynamic_bufmode(inst))
484 return 0;
486 list_for_each_entry_safe(buf, n, &inst->registeredbufs, reg_list) {
487 fill_buffer_desc(buf, &bd, true);
488 ret = hfi_session_unset_buffers(inst, &bd);
489 list_del_init(&buf->reg_list);
492 return ret;
495 static int session_register_bufs(struct venus_inst *inst)
497 struct venus_core *core = inst->core;
498 struct device *dev = core->dev;
499 struct hfi_buffer_desc bd;
500 struct venus_buffer *buf;
501 int ret = 0;
503 if (is_dynamic_bufmode(inst))
504 return 0;
506 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
507 fill_buffer_desc(buf, &bd, false);
508 ret = hfi_session_set_buffers(inst, &bd);
509 if (ret) {
510 dev_err(dev, "%s: set buffer failed\n", __func__);
511 break;
515 return ret;
518 static u32 to_hfi_raw_fmt(u32 v4l2_fmt)
520 switch (v4l2_fmt) {
521 case V4L2_PIX_FMT_NV12:
522 return HFI_COLOR_FORMAT_NV12;
523 case V4L2_PIX_FMT_NV21:
524 return HFI_COLOR_FORMAT_NV21;
525 default:
526 break;
529 return 0;
532 int venus_helper_get_bufreq(struct venus_inst *inst, u32 type,
533 struct hfi_buffer_requirements *req)
535 u32 ptype = HFI_PROPERTY_CONFIG_BUFFER_REQUIREMENTS;
536 union hfi_get_property hprop;
537 unsigned int i;
538 int ret;
540 if (req)
541 memset(req, 0, sizeof(*req));
543 ret = hfi_session_get_property(inst, ptype, &hprop);
544 if (ret)
545 return ret;
547 ret = -EINVAL;
549 for (i = 0; i < HFI_BUFFER_TYPE_MAX; i++) {
550 if (hprop.bufreq[i].type != type)
551 continue;
553 if (req)
554 memcpy(req, &hprop.bufreq[i], sizeof(*req));
555 ret = 0;
556 break;
559 return ret;
561 EXPORT_SYMBOL_GPL(venus_helper_get_bufreq);
563 static u32 get_framesize_raw_nv12(u32 width, u32 height)
565 u32 y_stride, uv_stride, y_plane;
566 u32 y_sclines, uv_sclines, uv_plane;
567 u32 size;
569 y_stride = ALIGN(width, 128);
570 uv_stride = ALIGN(width, 128);
571 y_sclines = ALIGN(height, 32);
572 uv_sclines = ALIGN(((height + 1) >> 1), 16);
574 y_plane = y_stride * y_sclines;
575 uv_plane = uv_stride * uv_sclines + SZ_4K;
576 size = y_plane + uv_plane + SZ_8K;
578 return ALIGN(size, SZ_4K);
581 static u32 get_framesize_raw_nv12_ubwc(u32 width, u32 height)
583 u32 y_meta_stride, y_meta_plane;
584 u32 y_stride, y_plane;
585 u32 uv_meta_stride, uv_meta_plane;
586 u32 uv_stride, uv_plane;
587 u32 extradata = SZ_16K;
589 y_meta_stride = ALIGN(DIV_ROUND_UP(width, 32), 64);
590 y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(height, 8), 16);
591 y_meta_plane = ALIGN(y_meta_plane, SZ_4K);
593 y_stride = ALIGN(width, 128);
594 y_plane = ALIGN(y_stride * ALIGN(height, 32), SZ_4K);
596 uv_meta_stride = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
597 uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(height / 2, 8), 16);
598 uv_meta_plane = ALIGN(uv_meta_plane, SZ_4K);
600 uv_stride = ALIGN(width, 128);
601 uv_plane = ALIGN(uv_stride * ALIGN(height / 2, 32), SZ_4K);
603 return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane +
604 max(extradata, y_stride * 48), SZ_4K);
607 u32 venus_helper_get_framesz_raw(u32 hfi_fmt, u32 width, u32 height)
609 switch (hfi_fmt) {
610 case HFI_COLOR_FORMAT_NV12:
611 case HFI_COLOR_FORMAT_NV21:
612 return get_framesize_raw_nv12(width, height);
613 case HFI_COLOR_FORMAT_NV12_UBWC:
614 return get_framesize_raw_nv12_ubwc(width, height);
615 default:
616 return 0;
619 EXPORT_SYMBOL_GPL(venus_helper_get_framesz_raw);
621 u32 venus_helper_get_framesz(u32 v4l2_fmt, u32 width, u32 height)
623 u32 hfi_fmt, sz;
624 bool compressed;
626 switch (v4l2_fmt) {
627 case V4L2_PIX_FMT_MPEG:
628 case V4L2_PIX_FMT_H264:
629 case V4L2_PIX_FMT_H264_NO_SC:
630 case V4L2_PIX_FMT_H264_MVC:
631 case V4L2_PIX_FMT_H263:
632 case V4L2_PIX_FMT_MPEG1:
633 case V4L2_PIX_FMT_MPEG2:
634 case V4L2_PIX_FMT_MPEG4:
635 case V4L2_PIX_FMT_XVID:
636 case V4L2_PIX_FMT_VC1_ANNEX_G:
637 case V4L2_PIX_FMT_VC1_ANNEX_L:
638 case V4L2_PIX_FMT_VP8:
639 case V4L2_PIX_FMT_VP9:
640 case V4L2_PIX_FMT_HEVC:
641 compressed = true;
642 break;
643 default:
644 compressed = false;
645 break;
648 if (compressed) {
649 sz = ALIGN(height, 32) * ALIGN(width, 32) * 3 / 2 / 2;
650 return ALIGN(sz, SZ_4K);
653 hfi_fmt = to_hfi_raw_fmt(v4l2_fmt);
654 if (!hfi_fmt)
655 return 0;
657 return venus_helper_get_framesz_raw(hfi_fmt, width, height);
659 EXPORT_SYMBOL_GPL(venus_helper_get_framesz);
661 int venus_helper_set_input_resolution(struct venus_inst *inst,
662 unsigned int width, unsigned int height)
664 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
665 struct hfi_framesize fs;
667 fs.buffer_type = HFI_BUFFER_INPUT;
668 fs.width = width;
669 fs.height = height;
671 return hfi_session_set_property(inst, ptype, &fs);
673 EXPORT_SYMBOL_GPL(venus_helper_set_input_resolution);
675 int venus_helper_set_output_resolution(struct venus_inst *inst,
676 unsigned int width, unsigned int height,
677 u32 buftype)
679 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
680 struct hfi_framesize fs;
682 fs.buffer_type = buftype;
683 fs.width = width;
684 fs.height = height;
686 return hfi_session_set_property(inst, ptype, &fs);
688 EXPORT_SYMBOL_GPL(venus_helper_set_output_resolution);
690 int venus_helper_set_work_mode(struct venus_inst *inst, u32 mode)
692 const u32 ptype = HFI_PROPERTY_PARAM_WORK_MODE;
693 struct hfi_video_work_mode wm;
695 if (!IS_V4(inst->core))
696 return 0;
698 wm.video_work_mode = mode;
700 return hfi_session_set_property(inst, ptype, &wm);
702 EXPORT_SYMBOL_GPL(venus_helper_set_work_mode);
704 int venus_helper_set_core_usage(struct venus_inst *inst, u32 usage)
706 const u32 ptype = HFI_PROPERTY_CONFIG_VIDEOCORES_USAGE;
707 struct hfi_videocores_usage_type cu;
709 if (!IS_V4(inst->core))
710 return 0;
712 cu.video_core_enable_mask = usage;
714 return hfi_session_set_property(inst, ptype, &cu);
716 EXPORT_SYMBOL_GPL(venus_helper_set_core_usage);
718 int venus_helper_set_num_bufs(struct venus_inst *inst, unsigned int input_bufs,
719 unsigned int output_bufs,
720 unsigned int output2_bufs)
722 u32 ptype = HFI_PROPERTY_PARAM_BUFFER_COUNT_ACTUAL;
723 struct hfi_buffer_count_actual buf_count;
724 int ret;
726 buf_count.type = HFI_BUFFER_INPUT;
727 buf_count.count_actual = input_bufs;
729 ret = hfi_session_set_property(inst, ptype, &buf_count);
730 if (ret)
731 return ret;
733 buf_count.type = HFI_BUFFER_OUTPUT;
734 buf_count.count_actual = output_bufs;
736 ret = hfi_session_set_property(inst, ptype, &buf_count);
737 if (ret)
738 return ret;
740 if (output2_bufs) {
741 buf_count.type = HFI_BUFFER_OUTPUT2;
742 buf_count.count_actual = output2_bufs;
744 ret = hfi_session_set_property(inst, ptype, &buf_count);
747 return ret;
749 EXPORT_SYMBOL_GPL(venus_helper_set_num_bufs);
751 int venus_helper_set_raw_format(struct venus_inst *inst, u32 hfi_format,
752 u32 buftype)
754 const u32 ptype = HFI_PROPERTY_PARAM_UNCOMPRESSED_FORMAT_SELECT;
755 struct hfi_uncompressed_format_select fmt;
757 fmt.buffer_type = buftype;
758 fmt.format = hfi_format;
760 return hfi_session_set_property(inst, ptype, &fmt);
762 EXPORT_SYMBOL_GPL(venus_helper_set_raw_format);
764 int venus_helper_set_color_format(struct venus_inst *inst, u32 pixfmt)
766 u32 hfi_format, buftype;
768 if (inst->session_type == VIDC_SESSION_TYPE_DEC)
769 buftype = HFI_BUFFER_OUTPUT;
770 else if (inst->session_type == VIDC_SESSION_TYPE_ENC)
771 buftype = HFI_BUFFER_INPUT;
772 else
773 return -EINVAL;
775 hfi_format = to_hfi_raw_fmt(pixfmt);
776 if (!hfi_format)
777 return -EINVAL;
779 return venus_helper_set_raw_format(inst, hfi_format, buftype);
781 EXPORT_SYMBOL_GPL(venus_helper_set_color_format);
783 int venus_helper_set_multistream(struct venus_inst *inst, bool out_en,
784 bool out2_en)
786 struct hfi_multi_stream multi = {0};
787 u32 ptype = HFI_PROPERTY_PARAM_VDEC_MULTI_STREAM;
788 int ret;
790 multi.buffer_type = HFI_BUFFER_OUTPUT;
791 multi.enable = out_en;
793 ret = hfi_session_set_property(inst, ptype, &multi);
794 if (ret)
795 return ret;
797 multi.buffer_type = HFI_BUFFER_OUTPUT2;
798 multi.enable = out2_en;
800 return hfi_session_set_property(inst, ptype, &multi);
802 EXPORT_SYMBOL_GPL(venus_helper_set_multistream);
804 int venus_helper_set_dyn_bufmode(struct venus_inst *inst)
806 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_ALLOC_MODE;
807 struct hfi_buffer_alloc_mode mode;
808 int ret;
810 if (!is_dynamic_bufmode(inst))
811 return 0;
813 mode.type = HFI_BUFFER_OUTPUT;
814 mode.mode = HFI_BUFFER_MODE_DYNAMIC;
816 ret = hfi_session_set_property(inst, ptype, &mode);
817 if (ret)
818 return ret;
820 mode.type = HFI_BUFFER_OUTPUT2;
822 return hfi_session_set_property(inst, ptype, &mode);
824 EXPORT_SYMBOL_GPL(venus_helper_set_dyn_bufmode);
826 int venus_helper_set_bufsize(struct venus_inst *inst, u32 bufsize, u32 buftype)
828 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_SIZE_ACTUAL;
829 struct hfi_buffer_size_actual bufsz;
831 bufsz.type = buftype;
832 bufsz.size = bufsize;
834 return hfi_session_set_property(inst, ptype, &bufsz);
836 EXPORT_SYMBOL_GPL(venus_helper_set_bufsize);
838 unsigned int venus_helper_get_opb_size(struct venus_inst *inst)
840 /* the encoder has only one output */
841 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
842 return inst->output_buf_size;
844 if (inst->opb_buftype == HFI_BUFFER_OUTPUT)
845 return inst->output_buf_size;
846 else if (inst->opb_buftype == HFI_BUFFER_OUTPUT2)
847 return inst->output2_buf_size;
849 return 0;
851 EXPORT_SYMBOL_GPL(venus_helper_get_opb_size);
853 static void delayed_process_buf_func(struct work_struct *work)
855 struct venus_buffer *buf, *n;
856 struct venus_inst *inst;
857 int ret;
859 inst = container_of(work, struct venus_inst, delayed_process_work);
861 mutex_lock(&inst->lock);
863 if (!(inst->streamon_out & inst->streamon_cap))
864 goto unlock;
866 list_for_each_entry_safe(buf, n, &inst->delayed_process, ref_list) {
867 if (buf->flags & HFI_BUFFERFLAG_READONLY)
868 continue;
870 ret = session_process_buf(inst, &buf->vb);
871 if (ret)
872 return_buf_error(inst, &buf->vb);
874 list_del_init(&buf->ref_list);
876 unlock:
877 mutex_unlock(&inst->lock);
880 void venus_helper_release_buf_ref(struct venus_inst *inst, unsigned int idx)
882 struct venus_buffer *buf;
884 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
885 if (buf->vb.vb2_buf.index == idx) {
886 buf->flags &= ~HFI_BUFFERFLAG_READONLY;
887 schedule_work(&inst->delayed_process_work);
888 break;
892 EXPORT_SYMBOL_GPL(venus_helper_release_buf_ref);
894 void venus_helper_acquire_buf_ref(struct vb2_v4l2_buffer *vbuf)
896 struct venus_buffer *buf = to_venus_buffer(vbuf);
898 buf->flags |= HFI_BUFFERFLAG_READONLY;
900 EXPORT_SYMBOL_GPL(venus_helper_acquire_buf_ref);
902 static int is_buf_refed(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
904 struct venus_buffer *buf = to_venus_buffer(vbuf);
906 if (buf->flags & HFI_BUFFERFLAG_READONLY) {
907 list_add_tail(&buf->ref_list, &inst->delayed_process);
908 schedule_work(&inst->delayed_process_work);
909 return 1;
912 return 0;
915 struct vb2_v4l2_buffer *
916 venus_helper_find_buf(struct venus_inst *inst, unsigned int type, u32 idx)
918 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
920 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
921 return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
922 else
923 return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
925 EXPORT_SYMBOL_GPL(venus_helper_find_buf);
927 int venus_helper_vb2_buf_init(struct vb2_buffer *vb)
929 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
930 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
931 struct venus_buffer *buf = to_venus_buffer(vbuf);
932 struct sg_table *sgt;
934 sgt = vb2_dma_sg_plane_desc(vb, 0);
935 if (!sgt)
936 return -EFAULT;
938 buf->size = vb2_plane_size(vb, 0);
939 buf->dma_addr = sg_dma_address(sgt->sgl);
941 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
942 list_add_tail(&buf->reg_list, &inst->registeredbufs);
944 return 0;
946 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_init);
948 int venus_helper_vb2_buf_prepare(struct vb2_buffer *vb)
950 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
951 unsigned int out_buf_size = venus_helper_get_opb_size(inst);
953 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE &&
954 vb2_plane_size(vb, 0) < out_buf_size)
955 return -EINVAL;
956 if (vb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE &&
957 vb2_plane_size(vb, 0) < inst->input_buf_size)
958 return -EINVAL;
960 return 0;
962 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_prepare);
964 void venus_helper_vb2_buf_queue(struct vb2_buffer *vb)
966 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
967 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
968 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
969 int ret;
971 mutex_lock(&inst->lock);
973 v4l2_m2m_buf_queue(m2m_ctx, vbuf);
975 if (!(inst->streamon_out & inst->streamon_cap))
976 goto unlock;
978 ret = is_buf_refed(inst, vbuf);
979 if (ret)
980 goto unlock;
982 ret = session_process_buf(inst, vbuf);
983 if (ret)
984 return_buf_error(inst, vbuf);
986 unlock:
987 mutex_unlock(&inst->lock);
989 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_queue);
991 void venus_helper_buffers_done(struct venus_inst *inst,
992 enum vb2_buffer_state state)
994 struct vb2_v4l2_buffer *buf;
996 while ((buf = v4l2_m2m_src_buf_remove(inst->m2m_ctx)))
997 v4l2_m2m_buf_done(buf, state);
998 while ((buf = v4l2_m2m_dst_buf_remove(inst->m2m_ctx)))
999 v4l2_m2m_buf_done(buf, state);
1001 EXPORT_SYMBOL_GPL(venus_helper_buffers_done);
1003 void venus_helper_vb2_stop_streaming(struct vb2_queue *q)
1005 struct venus_inst *inst = vb2_get_drv_priv(q);
1006 struct venus_core *core = inst->core;
1007 int ret;
1009 mutex_lock(&inst->lock);
1011 if (inst->streamon_out & inst->streamon_cap) {
1012 ret = hfi_session_stop(inst);
1013 ret |= hfi_session_unload_res(inst);
1014 ret |= session_unregister_bufs(inst);
1015 ret |= intbufs_free(inst);
1016 ret |= hfi_session_deinit(inst);
1018 if (inst->session_error || core->sys_error)
1019 ret = -EIO;
1021 if (ret)
1022 hfi_session_abort(inst);
1024 venus_helper_free_dpb_bufs(inst);
1026 load_scale_clocks(core);
1027 INIT_LIST_HEAD(&inst->registeredbufs);
1030 venus_helper_buffers_done(inst, VB2_BUF_STATE_ERROR);
1032 if (q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
1033 inst->streamon_out = 0;
1034 else
1035 inst->streamon_cap = 0;
1037 mutex_unlock(&inst->lock);
1039 EXPORT_SYMBOL_GPL(venus_helper_vb2_stop_streaming);
1041 int venus_helper_vb2_start_streaming(struct venus_inst *inst)
1043 struct venus_core *core = inst->core;
1044 int ret;
1046 ret = intbufs_alloc(inst);
1047 if (ret)
1048 return ret;
1050 ret = session_register_bufs(inst);
1051 if (ret)
1052 goto err_bufs_free;
1054 load_scale_clocks(core);
1056 ret = hfi_session_load_res(inst);
1057 if (ret)
1058 goto err_unreg_bufs;
1060 ret = hfi_session_start(inst);
1061 if (ret)
1062 goto err_unload_res;
1064 ret = venus_helper_queue_dpb_bufs(inst);
1065 if (ret)
1066 goto err_session_stop;
1068 return 0;
1070 err_session_stop:
1071 hfi_session_stop(inst);
1072 err_unload_res:
1073 hfi_session_unload_res(inst);
1074 err_unreg_bufs:
1075 session_unregister_bufs(inst);
1076 err_bufs_free:
1077 intbufs_free(inst);
1078 return ret;
1080 EXPORT_SYMBOL_GPL(venus_helper_vb2_start_streaming);
1082 void venus_helper_m2m_device_run(void *priv)
1084 struct venus_inst *inst = priv;
1085 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1086 struct v4l2_m2m_buffer *buf, *n;
1087 int ret;
1089 mutex_lock(&inst->lock);
1091 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buf, n) {
1092 ret = session_process_buf(inst, &buf->vb);
1093 if (ret)
1094 return_buf_error(inst, &buf->vb);
1097 v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buf, n) {
1098 ret = session_process_buf(inst, &buf->vb);
1099 if (ret)
1100 return_buf_error(inst, &buf->vb);
1103 mutex_unlock(&inst->lock);
1105 EXPORT_SYMBOL_GPL(venus_helper_m2m_device_run);
1107 void venus_helper_m2m_job_abort(void *priv)
1109 struct venus_inst *inst = priv;
1111 v4l2_m2m_job_finish(inst->m2m_dev, inst->m2m_ctx);
1113 EXPORT_SYMBOL_GPL(venus_helper_m2m_job_abort);
1115 void venus_helper_init_instance(struct venus_inst *inst)
1117 if (inst->session_type == VIDC_SESSION_TYPE_DEC) {
1118 INIT_LIST_HEAD(&inst->delayed_process);
1119 INIT_WORK(&inst->delayed_process_work,
1120 delayed_process_buf_func);
1123 EXPORT_SYMBOL_GPL(venus_helper_init_instance);
1125 static bool find_fmt_from_caps(struct venus_caps *caps, u32 buftype, u32 fmt)
1127 unsigned int i;
1129 for (i = 0; i < caps->num_fmts; i++) {
1130 if (caps->fmts[i].buftype == buftype &&
1131 caps->fmts[i].fmt == fmt)
1132 return true;
1135 return false;
1138 int venus_helper_get_out_fmts(struct venus_inst *inst, u32 v4l2_fmt,
1139 u32 *out_fmt, u32 *out2_fmt, bool ubwc)
1141 struct venus_core *core = inst->core;
1142 struct venus_caps *caps;
1143 u32 ubwc_fmt, fmt = to_hfi_raw_fmt(v4l2_fmt);
1144 bool found, found_ubwc;
1146 *out_fmt = *out2_fmt = 0;
1148 if (!fmt)
1149 return -EINVAL;
1151 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
1152 if (!caps)
1153 return -EINVAL;
1155 if (ubwc) {
1156 ubwc_fmt = fmt | HFI_COLOR_FORMAT_UBWC_BASE;
1157 found_ubwc = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT,
1158 ubwc_fmt);
1159 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1161 if (found_ubwc && found) {
1162 *out_fmt = ubwc_fmt;
1163 *out2_fmt = fmt;
1164 return 0;
1168 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT, fmt);
1169 if (found) {
1170 *out_fmt = fmt;
1171 *out2_fmt = 0;
1172 return 0;
1175 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1176 if (found) {
1177 *out_fmt = 0;
1178 *out2_fmt = fmt;
1179 return 0;
1182 return -EINVAL;
1184 EXPORT_SYMBOL_GPL(venus_helper_get_out_fmts);
1186 int venus_helper_power_enable(struct venus_core *core, u32 session_type,
1187 bool enable)
1189 void __iomem *ctrl, *stat;
1190 u32 val;
1191 int ret;
1193 if (!IS_V3(core) && !IS_V4(core))
1194 return 0;
1196 if (IS_V3(core)) {
1197 if (session_type == VIDC_SESSION_TYPE_DEC)
1198 ctrl = core->base + WRAPPER_VDEC_VCODEC_POWER_CONTROL;
1199 else
1200 ctrl = core->base + WRAPPER_VENC_VCODEC_POWER_CONTROL;
1201 if (enable)
1202 writel(0, ctrl);
1203 else
1204 writel(1, ctrl);
1206 return 0;
1209 if (session_type == VIDC_SESSION_TYPE_DEC) {
1210 ctrl = core->base + WRAPPER_VCODEC0_MMCC_POWER_CONTROL;
1211 stat = core->base + WRAPPER_VCODEC0_MMCC_POWER_STATUS;
1212 } else {
1213 ctrl = core->base + WRAPPER_VCODEC1_MMCC_POWER_CONTROL;
1214 stat = core->base + WRAPPER_VCODEC1_MMCC_POWER_STATUS;
1217 if (enable) {
1218 writel(0, ctrl);
1220 ret = readl_poll_timeout(stat, val, val & BIT(1), 1, 100);
1221 if (ret)
1222 return ret;
1223 } else {
1224 writel(1, ctrl);
1226 ret = readl_poll_timeout(stat, val, !(val & BIT(1)), 1, 100);
1227 if (ret)
1228 return ret;
1231 return 0;
1233 EXPORT_SYMBOL_GPL(venus_helper_power_enable);