2 * Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2017 Linaro Ltd.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 and
7 * only version 2 as published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
15 #include <linux/clk.h>
16 #include <linux/iopoll.h>
17 #include <linux/list.h>
18 #include <linux/mutex.h>
19 #include <linux/pm_runtime.h>
20 #include <linux/slab.h>
21 #include <media/videobuf2-dma-sg.h>
22 #include <media/v4l2-mem2mem.h>
23 #include <asm/div64.h>
27 #include "hfi_helper.h"
28 #include "hfi_venus_io.h"
31 struct list_head list
;
39 bool venus_helper_check_codec(struct venus_inst
*inst
, u32 v4l2_pixfmt
)
41 struct venus_core
*core
= inst
->core
;
42 u32 session_type
= inst
->session_type
;
45 switch (v4l2_pixfmt
) {
46 case V4L2_PIX_FMT_H264
:
47 codec
= HFI_VIDEO_CODEC_H264
;
49 case V4L2_PIX_FMT_H263
:
50 codec
= HFI_VIDEO_CODEC_H263
;
52 case V4L2_PIX_FMT_MPEG1
:
53 codec
= HFI_VIDEO_CODEC_MPEG1
;
55 case V4L2_PIX_FMT_MPEG2
:
56 codec
= HFI_VIDEO_CODEC_MPEG2
;
58 case V4L2_PIX_FMT_MPEG4
:
59 codec
= HFI_VIDEO_CODEC_MPEG4
;
61 case V4L2_PIX_FMT_VC1_ANNEX_G
:
62 case V4L2_PIX_FMT_VC1_ANNEX_L
:
63 codec
= HFI_VIDEO_CODEC_VC1
;
65 case V4L2_PIX_FMT_VP8
:
66 codec
= HFI_VIDEO_CODEC_VP8
;
68 case V4L2_PIX_FMT_VP9
:
69 codec
= HFI_VIDEO_CODEC_VP9
;
71 case V4L2_PIX_FMT_XVID
:
72 codec
= HFI_VIDEO_CODEC_DIVX
;
74 case V4L2_PIX_FMT_HEVC
:
75 codec
= HFI_VIDEO_CODEC_HEVC
;
81 if (session_type
== VIDC_SESSION_TYPE_ENC
&& core
->enc_codecs
& codec
)
84 if (session_type
== VIDC_SESSION_TYPE_DEC
&& core
->dec_codecs
& codec
)
89 EXPORT_SYMBOL_GPL(venus_helper_check_codec
);
91 static int venus_helper_queue_dpb_bufs(struct venus_inst
*inst
)
96 list_for_each_entry(buf
, &inst
->dpbbufs
, list
) {
97 struct hfi_frame_data fdata
;
99 memset(&fdata
, 0, sizeof(fdata
));
100 fdata
.alloc_len
= buf
->size
;
101 fdata
.device_addr
= buf
->da
;
102 fdata
.buffer_type
= buf
->type
;
104 ret
= hfi_session_process_buf(inst
, &fdata
);
113 int venus_helper_free_dpb_bufs(struct venus_inst
*inst
)
115 struct intbuf
*buf
, *n
;
117 list_for_each_entry_safe(buf
, n
, &inst
->dpbbufs
, list
) {
118 list_del_init(&buf
->list
);
119 dma_free_attrs(inst
->core
->dev
, buf
->size
, buf
->va
, buf
->da
,
124 INIT_LIST_HEAD(&inst
->dpbbufs
);
128 EXPORT_SYMBOL_GPL(venus_helper_free_dpb_bufs
);
130 int venus_helper_alloc_dpb_bufs(struct venus_inst
*inst
)
132 struct venus_core
*core
= inst
->core
;
133 struct device
*dev
= core
->dev
;
134 enum hfi_version ver
= core
->res
->hfi_version
;
135 struct hfi_buffer_requirements bufreq
;
136 u32 buftype
= inst
->dpb_buftype
;
137 unsigned int dpb_size
= 0;
143 /* no need to allocate dpb buffers */
147 if (inst
->dpb_buftype
== HFI_BUFFER_OUTPUT
)
148 dpb_size
= inst
->output_buf_size
;
149 else if (inst
->dpb_buftype
== HFI_BUFFER_OUTPUT2
)
150 dpb_size
= inst
->output2_buf_size
;
155 ret
= venus_helper_get_bufreq(inst
, buftype
, &bufreq
);
159 count
= HFI_BUFREQ_COUNT_MIN(&bufreq
, ver
);
161 for (i
= 0; i
< count
; i
++) {
162 buf
= kzalloc(sizeof(*buf
), GFP_KERNEL
);
169 buf
->size
= dpb_size
;
170 buf
->attrs
= DMA_ATTR_WRITE_COMBINE
|
171 DMA_ATTR_NO_KERNEL_MAPPING
;
172 buf
->va
= dma_alloc_attrs(dev
, buf
->size
, &buf
->da
, GFP_KERNEL
,
180 list_add_tail(&buf
->list
, &inst
->dpbbufs
);
186 venus_helper_free_dpb_bufs(inst
);
189 EXPORT_SYMBOL_GPL(venus_helper_alloc_dpb_bufs
);
191 static int intbufs_set_buffer(struct venus_inst
*inst
, u32 type
)
193 struct venus_core
*core
= inst
->core
;
194 struct device
*dev
= core
->dev
;
195 struct hfi_buffer_requirements bufreq
;
196 struct hfi_buffer_desc bd
;
201 ret
= venus_helper_get_bufreq(inst
, type
, &bufreq
);
208 for (i
= 0; i
< bufreq
.count_actual
; i
++) {
209 buf
= kzalloc(sizeof(*buf
), GFP_KERNEL
);
215 buf
->type
= bufreq
.type
;
216 buf
->size
= bufreq
.size
;
217 buf
->attrs
= DMA_ATTR_WRITE_COMBINE
|
218 DMA_ATTR_NO_KERNEL_MAPPING
;
219 buf
->va
= dma_alloc_attrs(dev
, buf
->size
, &buf
->da
, GFP_KERNEL
,
226 memset(&bd
, 0, sizeof(bd
));
227 bd
.buffer_size
= buf
->size
;
228 bd
.buffer_type
= buf
->type
;
230 bd
.device_addr
= buf
->da
;
232 ret
= hfi_session_set_buffers(inst
, &bd
);
234 dev_err(dev
, "set session buffers failed\n");
238 list_add_tail(&buf
->list
, &inst
->internalbufs
);
244 dma_free_attrs(dev
, buf
->size
, buf
->va
, buf
->da
, buf
->attrs
);
250 static int intbufs_unset_buffers(struct venus_inst
*inst
)
252 struct hfi_buffer_desc bd
= {0};
253 struct intbuf
*buf
, *n
;
256 list_for_each_entry_safe(buf
, n
, &inst
->internalbufs
, list
) {
257 bd
.buffer_size
= buf
->size
;
258 bd
.buffer_type
= buf
->type
;
260 bd
.device_addr
= buf
->da
;
261 bd
.response_required
= true;
263 ret
= hfi_session_unset_buffers(inst
, &bd
);
265 list_del_init(&buf
->list
);
266 dma_free_attrs(inst
->core
->dev
, buf
->size
, buf
->va
, buf
->da
,
274 static const unsigned int intbuf_types_1xx
[] = {
275 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_1XX
),
276 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_1XX
),
277 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_1XX
),
278 HFI_BUFFER_INTERNAL_PERSIST
,
279 HFI_BUFFER_INTERNAL_PERSIST_1
,
282 static const unsigned int intbuf_types_4xx
[] = {
283 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_4XX
),
284 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_4XX
),
285 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_4XX
),
286 HFI_BUFFER_INTERNAL_PERSIST
,
287 HFI_BUFFER_INTERNAL_PERSIST_1
,
290 static int intbufs_alloc(struct venus_inst
*inst
)
292 const unsigned int *intbuf
;
296 if (IS_V4(inst
->core
)) {
297 arr_sz
= ARRAY_SIZE(intbuf_types_4xx
);
298 intbuf
= intbuf_types_4xx
;
300 arr_sz
= ARRAY_SIZE(intbuf_types_1xx
);
301 intbuf
= intbuf_types_1xx
;
304 for (i
= 0; i
< arr_sz
; i
++) {
305 ret
= intbufs_set_buffer(inst
, intbuf
[i
]);
313 intbufs_unset_buffers(inst
);
317 static int intbufs_free(struct venus_inst
*inst
)
319 return intbufs_unset_buffers(inst
);
322 static u32
load_per_instance(struct venus_inst
*inst
)
326 if (!inst
|| !(inst
->state
>= INST_INIT
&& inst
->state
< INST_STOP
))
329 mbs
= (ALIGN(inst
->width
, 16) / 16) * (ALIGN(inst
->height
, 16) / 16);
331 return mbs
* inst
->fps
;
334 static u32
load_per_type(struct venus_core
*core
, u32 session_type
)
336 struct venus_inst
*inst
= NULL
;
339 mutex_lock(&core
->lock
);
340 list_for_each_entry(inst
, &core
->instances
, list
) {
341 if (inst
->session_type
!= session_type
)
344 mbs_per_sec
+= load_per_instance(inst
);
346 mutex_unlock(&core
->lock
);
351 static int load_scale_clocks(struct venus_core
*core
)
353 const struct freq_tbl
*table
= core
->res
->freq_tbl
;
354 unsigned int num_rows
= core
->res
->freq_tbl_size
;
355 unsigned long freq
= table
[0].freq
;
356 struct clk
*clk
= core
->clks
[0];
357 struct device
*dev
= core
->dev
;
362 mbs_per_sec
= load_per_type(core
, VIDC_SESSION_TYPE_ENC
) +
363 load_per_type(core
, VIDC_SESSION_TYPE_DEC
);
365 if (mbs_per_sec
> core
->res
->max_load
)
366 dev_warn(dev
, "HW is overloaded, needed: %d max: %d\n",
367 mbs_per_sec
, core
->res
->max_load
);
369 if (!mbs_per_sec
&& num_rows
> 1) {
370 freq
= table
[num_rows
- 1].freq
;
374 for (i
= 0; i
< num_rows
; i
++) {
375 if (mbs_per_sec
> table
[i
].load
)
377 freq
= table
[i
].freq
;
382 ret
= clk_set_rate(clk
, freq
);
386 ret
= clk_set_rate(core
->core0_clk
, freq
);
390 ret
= clk_set_rate(core
->core1_clk
, freq
);
397 dev_err(dev
, "failed to set clock rate %lu (%d)\n", freq
, ret
);
401 static void fill_buffer_desc(const struct venus_buffer
*buf
,
402 struct hfi_buffer_desc
*bd
, bool response
)
404 memset(bd
, 0, sizeof(*bd
));
405 bd
->buffer_type
= HFI_BUFFER_OUTPUT
;
406 bd
->buffer_size
= buf
->size
;
408 bd
->device_addr
= buf
->dma_addr
;
409 bd
->response_required
= response
;
412 static void return_buf_error(struct venus_inst
*inst
,
413 struct vb2_v4l2_buffer
*vbuf
)
415 struct v4l2_m2m_ctx
*m2m_ctx
= inst
->m2m_ctx
;
417 if (vbuf
->vb2_buf
.type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
)
418 v4l2_m2m_src_buf_remove_by_buf(m2m_ctx
, vbuf
);
420 v4l2_m2m_dst_buf_remove_by_buf(m2m_ctx
, vbuf
);
422 v4l2_m2m_buf_done(vbuf
, VB2_BUF_STATE_ERROR
);
426 session_process_buf(struct venus_inst
*inst
, struct vb2_v4l2_buffer
*vbuf
)
428 struct venus_buffer
*buf
= to_venus_buffer(vbuf
);
429 struct vb2_buffer
*vb
= &vbuf
->vb2_buf
;
430 unsigned int type
= vb
->type
;
431 struct hfi_frame_data fdata
;
434 memset(&fdata
, 0, sizeof(fdata
));
435 fdata
.alloc_len
= buf
->size
;
436 fdata
.device_addr
= buf
->dma_addr
;
437 fdata
.timestamp
= vb
->timestamp
;
438 do_div(fdata
.timestamp
, NSEC_PER_USEC
);
440 fdata
.clnt_data
= vbuf
->vb2_buf
.index
;
442 if (!fdata
.timestamp
)
443 fdata
.flags
|= HFI_BUFFERFLAG_TIMESTAMPINVALID
;
445 if (type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) {
446 fdata
.buffer_type
= HFI_BUFFER_INPUT
;
447 fdata
.filled_len
= vb2_get_plane_payload(vb
, 0);
448 fdata
.offset
= vb
->planes
[0].data_offset
;
450 if (vbuf
->flags
& V4L2_BUF_FLAG_LAST
|| !fdata
.filled_len
)
451 fdata
.flags
|= HFI_BUFFERFLAG_EOS
;
452 } else if (type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
) {
453 if (inst
->session_type
== VIDC_SESSION_TYPE_ENC
)
454 fdata
.buffer_type
= HFI_BUFFER_OUTPUT
;
456 fdata
.buffer_type
= inst
->opb_buftype
;
457 fdata
.filled_len
= 0;
461 ret
= hfi_session_process_buf(inst
, &fdata
);
468 static bool is_dynamic_bufmode(struct venus_inst
*inst
)
470 struct venus_core
*core
= inst
->core
;
471 struct venus_caps
*caps
;
473 caps
= venus_caps_by_codec(core
, inst
->hfi_codec
, inst
->session_type
);
477 return caps
->cap_bufs_mode_dynamic
;
480 static int session_unregister_bufs(struct venus_inst
*inst
)
482 struct venus_buffer
*buf
, *n
;
483 struct hfi_buffer_desc bd
;
486 if (is_dynamic_bufmode(inst
))
489 list_for_each_entry_safe(buf
, n
, &inst
->registeredbufs
, reg_list
) {
490 fill_buffer_desc(buf
, &bd
, true);
491 ret
= hfi_session_unset_buffers(inst
, &bd
);
492 list_del_init(&buf
->reg_list
);
498 static int session_register_bufs(struct venus_inst
*inst
)
500 struct venus_core
*core
= inst
->core
;
501 struct device
*dev
= core
->dev
;
502 struct hfi_buffer_desc bd
;
503 struct venus_buffer
*buf
;
506 if (is_dynamic_bufmode(inst
))
509 list_for_each_entry(buf
, &inst
->registeredbufs
, reg_list
) {
510 fill_buffer_desc(buf
, &bd
, false);
511 ret
= hfi_session_set_buffers(inst
, &bd
);
513 dev_err(dev
, "%s: set buffer failed\n", __func__
);
521 static u32
to_hfi_raw_fmt(u32 v4l2_fmt
)
524 case V4L2_PIX_FMT_NV12
:
525 return HFI_COLOR_FORMAT_NV12
;
526 case V4L2_PIX_FMT_NV21
:
527 return HFI_COLOR_FORMAT_NV21
;
535 int venus_helper_get_bufreq(struct venus_inst
*inst
, u32 type
,
536 struct hfi_buffer_requirements
*req
)
538 u32 ptype
= HFI_PROPERTY_CONFIG_BUFFER_REQUIREMENTS
;
539 union hfi_get_property hprop
;
544 memset(req
, 0, sizeof(*req
));
546 ret
= hfi_session_get_property(inst
, ptype
, &hprop
);
552 for (i
= 0; i
< HFI_BUFFER_TYPE_MAX
; i
++) {
553 if (hprop
.bufreq
[i
].type
!= type
)
557 memcpy(req
, &hprop
.bufreq
[i
], sizeof(*req
));
564 EXPORT_SYMBOL_GPL(venus_helper_get_bufreq
);
566 static u32
get_framesize_raw_nv12(u32 width
, u32 height
)
568 u32 y_stride
, uv_stride
, y_plane
;
569 u32 y_sclines
, uv_sclines
, uv_plane
;
572 y_stride
= ALIGN(width
, 128);
573 uv_stride
= ALIGN(width
, 128);
574 y_sclines
= ALIGN(height
, 32);
575 uv_sclines
= ALIGN(((height
+ 1) >> 1), 16);
577 y_plane
= y_stride
* y_sclines
;
578 uv_plane
= uv_stride
* uv_sclines
+ SZ_4K
;
579 size
= y_plane
+ uv_plane
+ SZ_8K
;
581 return ALIGN(size
, SZ_4K
);
584 static u32
get_framesize_raw_nv12_ubwc(u32 width
, u32 height
)
586 u32 y_meta_stride
, y_meta_plane
;
587 u32 y_stride
, y_plane
;
588 u32 uv_meta_stride
, uv_meta_plane
;
589 u32 uv_stride
, uv_plane
;
590 u32 extradata
= SZ_16K
;
592 y_meta_stride
= ALIGN(DIV_ROUND_UP(width
, 32), 64);
593 y_meta_plane
= y_meta_stride
* ALIGN(DIV_ROUND_UP(height
, 8), 16);
594 y_meta_plane
= ALIGN(y_meta_plane
, SZ_4K
);
596 y_stride
= ALIGN(width
, 128);
597 y_plane
= ALIGN(y_stride
* ALIGN(height
, 32), SZ_4K
);
599 uv_meta_stride
= ALIGN(DIV_ROUND_UP(width
/ 2, 16), 64);
600 uv_meta_plane
= uv_meta_stride
* ALIGN(DIV_ROUND_UP(height
/ 2, 8), 16);
601 uv_meta_plane
= ALIGN(uv_meta_plane
, SZ_4K
);
603 uv_stride
= ALIGN(width
, 128);
604 uv_plane
= ALIGN(uv_stride
* ALIGN(height
/ 2, 32), SZ_4K
);
606 return ALIGN(y_meta_plane
+ y_plane
+ uv_meta_plane
+ uv_plane
+
607 max(extradata
, y_stride
* 48), SZ_4K
);
610 u32
venus_helper_get_framesz_raw(u32 hfi_fmt
, u32 width
, u32 height
)
613 case HFI_COLOR_FORMAT_NV12
:
614 case HFI_COLOR_FORMAT_NV21
:
615 return get_framesize_raw_nv12(width
, height
);
616 case HFI_COLOR_FORMAT_NV12_UBWC
:
617 return get_framesize_raw_nv12_ubwc(width
, height
);
622 EXPORT_SYMBOL_GPL(venus_helper_get_framesz_raw
);
624 u32
venus_helper_get_framesz(u32 v4l2_fmt
, u32 width
, u32 height
)
630 case V4L2_PIX_FMT_MPEG
:
631 case V4L2_PIX_FMT_H264
:
632 case V4L2_PIX_FMT_H264_NO_SC
:
633 case V4L2_PIX_FMT_H264_MVC
:
634 case V4L2_PIX_FMT_H263
:
635 case V4L2_PIX_FMT_MPEG1
:
636 case V4L2_PIX_FMT_MPEG2
:
637 case V4L2_PIX_FMT_MPEG4
:
638 case V4L2_PIX_FMT_XVID
:
639 case V4L2_PIX_FMT_VC1_ANNEX_G
:
640 case V4L2_PIX_FMT_VC1_ANNEX_L
:
641 case V4L2_PIX_FMT_VP8
:
642 case V4L2_PIX_FMT_VP9
:
643 case V4L2_PIX_FMT_HEVC
:
652 sz
= ALIGN(height
, 32) * ALIGN(width
, 32) * 3 / 2 / 2;
653 return ALIGN(sz
, SZ_4K
);
656 hfi_fmt
= to_hfi_raw_fmt(v4l2_fmt
);
660 return venus_helper_get_framesz_raw(hfi_fmt
, width
, height
);
662 EXPORT_SYMBOL_GPL(venus_helper_get_framesz
);
664 int venus_helper_set_input_resolution(struct venus_inst
*inst
,
665 unsigned int width
, unsigned int height
)
667 u32 ptype
= HFI_PROPERTY_PARAM_FRAME_SIZE
;
668 struct hfi_framesize fs
;
670 fs
.buffer_type
= HFI_BUFFER_INPUT
;
674 return hfi_session_set_property(inst
, ptype
, &fs
);
676 EXPORT_SYMBOL_GPL(venus_helper_set_input_resolution
);
678 int venus_helper_set_output_resolution(struct venus_inst
*inst
,
679 unsigned int width
, unsigned int height
,
682 u32 ptype
= HFI_PROPERTY_PARAM_FRAME_SIZE
;
683 struct hfi_framesize fs
;
685 fs
.buffer_type
= buftype
;
689 return hfi_session_set_property(inst
, ptype
, &fs
);
691 EXPORT_SYMBOL_GPL(venus_helper_set_output_resolution
);
693 int venus_helper_set_work_mode(struct venus_inst
*inst
, u32 mode
)
695 const u32 ptype
= HFI_PROPERTY_PARAM_WORK_MODE
;
696 struct hfi_video_work_mode wm
;
698 if (!IS_V4(inst
->core
))
701 wm
.video_work_mode
= mode
;
703 return hfi_session_set_property(inst
, ptype
, &wm
);
705 EXPORT_SYMBOL_GPL(venus_helper_set_work_mode
);
707 int venus_helper_set_core_usage(struct venus_inst
*inst
, u32 usage
)
709 const u32 ptype
= HFI_PROPERTY_CONFIG_VIDEOCORES_USAGE
;
710 struct hfi_videocores_usage_type cu
;
712 if (!IS_V4(inst
->core
))
715 cu
.video_core_enable_mask
= usage
;
717 return hfi_session_set_property(inst
, ptype
, &cu
);
719 EXPORT_SYMBOL_GPL(venus_helper_set_core_usage
);
721 int venus_helper_set_num_bufs(struct venus_inst
*inst
, unsigned int input_bufs
,
722 unsigned int output_bufs
,
723 unsigned int output2_bufs
)
725 u32 ptype
= HFI_PROPERTY_PARAM_BUFFER_COUNT_ACTUAL
;
726 struct hfi_buffer_count_actual buf_count
;
729 buf_count
.type
= HFI_BUFFER_INPUT
;
730 buf_count
.count_actual
= input_bufs
;
732 ret
= hfi_session_set_property(inst
, ptype
, &buf_count
);
736 buf_count
.type
= HFI_BUFFER_OUTPUT
;
737 buf_count
.count_actual
= output_bufs
;
739 ret
= hfi_session_set_property(inst
, ptype
, &buf_count
);
744 buf_count
.type
= HFI_BUFFER_OUTPUT2
;
745 buf_count
.count_actual
= output2_bufs
;
747 ret
= hfi_session_set_property(inst
, ptype
, &buf_count
);
752 EXPORT_SYMBOL_GPL(venus_helper_set_num_bufs
);
754 int venus_helper_set_raw_format(struct venus_inst
*inst
, u32 hfi_format
,
757 const u32 ptype
= HFI_PROPERTY_PARAM_UNCOMPRESSED_FORMAT_SELECT
;
758 struct hfi_uncompressed_format_select fmt
;
760 fmt
.buffer_type
= buftype
;
761 fmt
.format
= hfi_format
;
763 return hfi_session_set_property(inst
, ptype
, &fmt
);
765 EXPORT_SYMBOL_GPL(venus_helper_set_raw_format
);
767 int venus_helper_set_color_format(struct venus_inst
*inst
, u32 pixfmt
)
769 u32 hfi_format
, buftype
;
771 if (inst
->session_type
== VIDC_SESSION_TYPE_DEC
)
772 buftype
= HFI_BUFFER_OUTPUT
;
773 else if (inst
->session_type
== VIDC_SESSION_TYPE_ENC
)
774 buftype
= HFI_BUFFER_INPUT
;
778 hfi_format
= to_hfi_raw_fmt(pixfmt
);
782 return venus_helper_set_raw_format(inst
, hfi_format
, buftype
);
784 EXPORT_SYMBOL_GPL(venus_helper_set_color_format
);
786 int venus_helper_set_multistream(struct venus_inst
*inst
, bool out_en
,
789 struct hfi_multi_stream multi
= {0};
790 u32 ptype
= HFI_PROPERTY_PARAM_VDEC_MULTI_STREAM
;
793 multi
.buffer_type
= HFI_BUFFER_OUTPUT
;
794 multi
.enable
= out_en
;
796 ret
= hfi_session_set_property(inst
, ptype
, &multi
);
800 multi
.buffer_type
= HFI_BUFFER_OUTPUT2
;
801 multi
.enable
= out2_en
;
803 return hfi_session_set_property(inst
, ptype
, &multi
);
805 EXPORT_SYMBOL_GPL(venus_helper_set_multistream
);
807 int venus_helper_set_dyn_bufmode(struct venus_inst
*inst
)
809 const u32 ptype
= HFI_PROPERTY_PARAM_BUFFER_ALLOC_MODE
;
810 struct hfi_buffer_alloc_mode mode
;
813 if (!is_dynamic_bufmode(inst
))
816 mode
.type
= HFI_BUFFER_OUTPUT
;
817 mode
.mode
= HFI_BUFFER_MODE_DYNAMIC
;
819 ret
= hfi_session_set_property(inst
, ptype
, &mode
);
823 mode
.type
= HFI_BUFFER_OUTPUT2
;
825 return hfi_session_set_property(inst
, ptype
, &mode
);
827 EXPORT_SYMBOL_GPL(venus_helper_set_dyn_bufmode
);
829 int venus_helper_set_bufsize(struct venus_inst
*inst
, u32 bufsize
, u32 buftype
)
831 const u32 ptype
= HFI_PROPERTY_PARAM_BUFFER_SIZE_ACTUAL
;
832 struct hfi_buffer_size_actual bufsz
;
834 bufsz
.type
= buftype
;
835 bufsz
.size
= bufsize
;
837 return hfi_session_set_property(inst
, ptype
, &bufsz
);
839 EXPORT_SYMBOL_GPL(venus_helper_set_bufsize
);
841 unsigned int venus_helper_get_opb_size(struct venus_inst
*inst
)
843 /* the encoder has only one output */
844 if (inst
->session_type
== VIDC_SESSION_TYPE_ENC
)
845 return inst
->output_buf_size
;
847 if (inst
->opb_buftype
== HFI_BUFFER_OUTPUT
)
848 return inst
->output_buf_size
;
849 else if (inst
->opb_buftype
== HFI_BUFFER_OUTPUT2
)
850 return inst
->output2_buf_size
;
854 EXPORT_SYMBOL_GPL(venus_helper_get_opb_size
);
856 static void delayed_process_buf_func(struct work_struct
*work
)
858 struct venus_buffer
*buf
, *n
;
859 struct venus_inst
*inst
;
862 inst
= container_of(work
, struct venus_inst
, delayed_process_work
);
864 mutex_lock(&inst
->lock
);
866 if (!(inst
->streamon_out
& inst
->streamon_cap
))
869 list_for_each_entry_safe(buf
, n
, &inst
->delayed_process
, ref_list
) {
870 if (buf
->flags
& HFI_BUFFERFLAG_READONLY
)
873 ret
= session_process_buf(inst
, &buf
->vb
);
875 return_buf_error(inst
, &buf
->vb
);
877 list_del_init(&buf
->ref_list
);
880 mutex_unlock(&inst
->lock
);
883 void venus_helper_release_buf_ref(struct venus_inst
*inst
, unsigned int idx
)
885 struct venus_buffer
*buf
;
887 list_for_each_entry(buf
, &inst
->registeredbufs
, reg_list
) {
888 if (buf
->vb
.vb2_buf
.index
== idx
) {
889 buf
->flags
&= ~HFI_BUFFERFLAG_READONLY
;
890 schedule_work(&inst
->delayed_process_work
);
895 EXPORT_SYMBOL_GPL(venus_helper_release_buf_ref
);
897 void venus_helper_acquire_buf_ref(struct vb2_v4l2_buffer
*vbuf
)
899 struct venus_buffer
*buf
= to_venus_buffer(vbuf
);
901 buf
->flags
|= HFI_BUFFERFLAG_READONLY
;
903 EXPORT_SYMBOL_GPL(venus_helper_acquire_buf_ref
);
905 static int is_buf_refed(struct venus_inst
*inst
, struct vb2_v4l2_buffer
*vbuf
)
907 struct venus_buffer
*buf
= to_venus_buffer(vbuf
);
909 if (buf
->flags
& HFI_BUFFERFLAG_READONLY
) {
910 list_add_tail(&buf
->ref_list
, &inst
->delayed_process
);
911 schedule_work(&inst
->delayed_process_work
);
918 struct vb2_v4l2_buffer
*
919 venus_helper_find_buf(struct venus_inst
*inst
, unsigned int type
, u32 idx
)
921 struct v4l2_m2m_ctx
*m2m_ctx
= inst
->m2m_ctx
;
923 if (type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
)
924 return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx
, idx
);
926 return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx
, idx
);
928 EXPORT_SYMBOL_GPL(venus_helper_find_buf
);
930 int venus_helper_vb2_buf_init(struct vb2_buffer
*vb
)
932 struct venus_inst
*inst
= vb2_get_drv_priv(vb
->vb2_queue
);
933 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
934 struct venus_buffer
*buf
= to_venus_buffer(vbuf
);
935 struct sg_table
*sgt
;
937 sgt
= vb2_dma_sg_plane_desc(vb
, 0);
941 buf
->size
= vb2_plane_size(vb
, 0);
942 buf
->dma_addr
= sg_dma_address(sgt
->sgl
);
944 if (vb
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
945 list_add_tail(&buf
->reg_list
, &inst
->registeredbufs
);
949 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_init
);
951 int venus_helper_vb2_buf_prepare(struct vb2_buffer
*vb
)
953 struct venus_inst
*inst
= vb2_get_drv_priv(vb
->vb2_queue
);
954 unsigned int out_buf_size
= venus_helper_get_opb_size(inst
);
956 if (vb
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
&&
957 vb2_plane_size(vb
, 0) < out_buf_size
)
959 if (vb
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
&&
960 vb2_plane_size(vb
, 0) < inst
->input_buf_size
)
965 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_prepare
);
967 void venus_helper_vb2_buf_queue(struct vb2_buffer
*vb
)
969 struct vb2_v4l2_buffer
*vbuf
= to_vb2_v4l2_buffer(vb
);
970 struct venus_inst
*inst
= vb2_get_drv_priv(vb
->vb2_queue
);
971 struct v4l2_m2m_ctx
*m2m_ctx
= inst
->m2m_ctx
;
974 mutex_lock(&inst
->lock
);
976 v4l2_m2m_buf_queue(m2m_ctx
, vbuf
);
978 if (!(inst
->streamon_out
& inst
->streamon_cap
))
981 ret
= is_buf_refed(inst
, vbuf
);
985 ret
= session_process_buf(inst
, vbuf
);
987 return_buf_error(inst
, vbuf
);
990 mutex_unlock(&inst
->lock
);
992 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_queue
);
994 void venus_helper_buffers_done(struct venus_inst
*inst
,
995 enum vb2_buffer_state state
)
997 struct vb2_v4l2_buffer
*buf
;
999 while ((buf
= v4l2_m2m_src_buf_remove(inst
->m2m_ctx
)))
1000 v4l2_m2m_buf_done(buf
, state
);
1001 while ((buf
= v4l2_m2m_dst_buf_remove(inst
->m2m_ctx
)))
1002 v4l2_m2m_buf_done(buf
, state
);
1004 EXPORT_SYMBOL_GPL(venus_helper_buffers_done
);
1006 void venus_helper_vb2_stop_streaming(struct vb2_queue
*q
)
1008 struct venus_inst
*inst
= vb2_get_drv_priv(q
);
1009 struct venus_core
*core
= inst
->core
;
1012 mutex_lock(&inst
->lock
);
1014 if (inst
->streamon_out
& inst
->streamon_cap
) {
1015 ret
= hfi_session_stop(inst
);
1016 ret
|= hfi_session_unload_res(inst
);
1017 ret
|= session_unregister_bufs(inst
);
1018 ret
|= intbufs_free(inst
);
1019 ret
|= hfi_session_deinit(inst
);
1021 if (inst
->session_error
|| core
->sys_error
)
1025 hfi_session_abort(inst
);
1027 venus_helper_free_dpb_bufs(inst
);
1029 load_scale_clocks(core
);
1030 INIT_LIST_HEAD(&inst
->registeredbufs
);
1033 venus_helper_buffers_done(inst
, VB2_BUF_STATE_ERROR
);
1035 if (q
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
)
1036 inst
->streamon_out
= 0;
1038 inst
->streamon_cap
= 0;
1040 mutex_unlock(&inst
->lock
);
1042 EXPORT_SYMBOL_GPL(venus_helper_vb2_stop_streaming
);
1044 int venus_helper_vb2_start_streaming(struct venus_inst
*inst
)
1046 struct venus_core
*core
= inst
->core
;
1049 ret
= intbufs_alloc(inst
);
1053 ret
= session_register_bufs(inst
);
1057 load_scale_clocks(core
);
1059 ret
= hfi_session_load_res(inst
);
1061 goto err_unreg_bufs
;
1063 ret
= hfi_session_start(inst
);
1065 goto err_unload_res
;
1067 ret
= venus_helper_queue_dpb_bufs(inst
);
1069 goto err_session_stop
;
1074 hfi_session_stop(inst
);
1076 hfi_session_unload_res(inst
);
1078 session_unregister_bufs(inst
);
1083 EXPORT_SYMBOL_GPL(venus_helper_vb2_start_streaming
);
1085 void venus_helper_m2m_device_run(void *priv
)
1087 struct venus_inst
*inst
= priv
;
1088 struct v4l2_m2m_ctx
*m2m_ctx
= inst
->m2m_ctx
;
1089 struct v4l2_m2m_buffer
*buf
, *n
;
1092 mutex_lock(&inst
->lock
);
1094 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx
, buf
, n
) {
1095 ret
= session_process_buf(inst
, &buf
->vb
);
1097 return_buf_error(inst
, &buf
->vb
);
1100 v4l2_m2m_for_each_src_buf_safe(m2m_ctx
, buf
, n
) {
1101 ret
= session_process_buf(inst
, &buf
->vb
);
1103 return_buf_error(inst
, &buf
->vb
);
1106 mutex_unlock(&inst
->lock
);
1108 EXPORT_SYMBOL_GPL(venus_helper_m2m_device_run
);
1110 void venus_helper_m2m_job_abort(void *priv
)
1112 struct venus_inst
*inst
= priv
;
1114 v4l2_m2m_job_finish(inst
->m2m_dev
, inst
->m2m_ctx
);
1116 EXPORT_SYMBOL_GPL(venus_helper_m2m_job_abort
);
1118 void venus_helper_init_instance(struct venus_inst
*inst
)
1120 if (inst
->session_type
== VIDC_SESSION_TYPE_DEC
) {
1121 INIT_LIST_HEAD(&inst
->delayed_process
);
1122 INIT_WORK(&inst
->delayed_process_work
,
1123 delayed_process_buf_func
);
1126 EXPORT_SYMBOL_GPL(venus_helper_init_instance
);
1128 static bool find_fmt_from_caps(struct venus_caps
*caps
, u32 buftype
, u32 fmt
)
1132 for (i
= 0; i
< caps
->num_fmts
; i
++) {
1133 if (caps
->fmts
[i
].buftype
== buftype
&&
1134 caps
->fmts
[i
].fmt
== fmt
)
1141 int venus_helper_get_out_fmts(struct venus_inst
*inst
, u32 v4l2_fmt
,
1142 u32
*out_fmt
, u32
*out2_fmt
, bool ubwc
)
1144 struct venus_core
*core
= inst
->core
;
1145 struct venus_caps
*caps
;
1146 u32 ubwc_fmt
, fmt
= to_hfi_raw_fmt(v4l2_fmt
);
1147 bool found
, found_ubwc
;
1149 *out_fmt
= *out2_fmt
= 0;
1154 caps
= venus_caps_by_codec(core
, inst
->hfi_codec
, inst
->session_type
);
1159 ubwc_fmt
= fmt
| HFI_COLOR_FORMAT_UBWC_BASE
;
1160 found_ubwc
= find_fmt_from_caps(caps
, HFI_BUFFER_OUTPUT
,
1162 found
= find_fmt_from_caps(caps
, HFI_BUFFER_OUTPUT2
, fmt
);
1164 if (found_ubwc
&& found
) {
1165 *out_fmt
= ubwc_fmt
;
1171 found
= find_fmt_from_caps(caps
, HFI_BUFFER_OUTPUT
, fmt
);
1178 found
= find_fmt_from_caps(caps
, HFI_BUFFER_OUTPUT2
, fmt
);
1187 EXPORT_SYMBOL_GPL(venus_helper_get_out_fmts
);
1189 int venus_helper_power_enable(struct venus_core
*core
, u32 session_type
,
1192 void __iomem
*ctrl
, *stat
;
1196 if (!IS_V3(core
) && !IS_V4(core
))
1200 if (session_type
== VIDC_SESSION_TYPE_DEC
)
1201 ctrl
= core
->base
+ WRAPPER_VDEC_VCODEC_POWER_CONTROL
;
1203 ctrl
= core
->base
+ WRAPPER_VENC_VCODEC_POWER_CONTROL
;
1212 if (session_type
== VIDC_SESSION_TYPE_DEC
) {
1213 ctrl
= core
->base
+ WRAPPER_VCODEC0_MMCC_POWER_CONTROL
;
1214 stat
= core
->base
+ WRAPPER_VCODEC0_MMCC_POWER_STATUS
;
1216 ctrl
= core
->base
+ WRAPPER_VCODEC1_MMCC_POWER_CONTROL
;
1217 stat
= core
->base
+ WRAPPER_VCODEC1_MMCC_POWER_STATUS
;
1223 ret
= readl_poll_timeout(stat
, val
, val
& BIT(1), 1, 100);
1229 ret
= readl_poll_timeout(stat
, val
, !(val
& BIT(1)), 1, 100);
1236 EXPORT_SYMBOL_GPL(venus_helper_power_enable
);