1 // SPDX-License-Identifier: GPL-2.0
3 * Hantro VP9 codec driver
5 * Copyright (C) 2021 Collabora Ltd.
7 #include "media/videobuf2-core.h"
8 #include "media/videobuf2-dma-contig.h"
9 #include "media/videobuf2-v4l2.h"
10 #include <linux/kernel.h>
11 #include <linux/vmalloc.h>
12 #include <media/v4l2-mem2mem.h>
13 #include <media/v4l2-vp9.h>
16 #include "hantro_vp9.h"
17 #include "hantro_g2_regs.h"
19 enum hantro_ref_frames
{
27 static int start_prepare_run(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
**dec_params
)
29 const struct v4l2_ctrl_vp9_compressed_hdr
*prob_updates
;
30 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
31 struct v4l2_ctrl
*ctrl
;
32 unsigned int fctx_idx
;
34 /* v4l2-specific stuff */
35 hantro_start_prepare_run(ctx
);
37 ctrl
= v4l2_ctrl_find(&ctx
->ctrl_handler
, V4L2_CID_STATELESS_VP9_FRAME
);
40 *dec_params
= ctrl
->p_cur
.p
;
42 ctrl
= v4l2_ctrl_find(&ctx
->ctrl_handler
, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR
);
45 prob_updates
= ctrl
->p_cur
.p
;
46 vp9_ctx
->cur
.tx_mode
= prob_updates
->tx_mode
;
51 * by this point the userspace has done all parts of 6.2 uncompressed_header()
52 * except this fragment:
53 * if ( FrameIsIntra || error_resilient_mode ) {
54 * setup_past_independence ( )
55 * if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
56 * reset_frame_context == 3 ) {
57 * for ( i = 0; i < 4; i ++ ) {
60 * } else if ( reset_frame_context == 2 ) {
61 * save_probs( frame_context_idx )
63 * frame_context_idx = 0
66 fctx_idx
= v4l2_vp9_reset_frame_ctx(*dec_params
, vp9_ctx
->frame_context
);
67 vp9_ctx
->cur
.frame_context_idx
= fctx_idx
;
69 /* 6.1 frame(sz): load_probs() and load_probs2() */
70 vp9_ctx
->probability_tables
= vp9_ctx
->frame_context
[fctx_idx
];
73 * The userspace has also performed 6.3 compressed_header(), but handling the
74 * probs in a special way. All probs which need updating, except MV-related,
75 * have been read from the bitstream and translated through inv_map_table[],
76 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
77 * by userspace are either translated values (there are no 0 values in
78 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
79 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
80 * performed. The values passed by userspace are either new values
81 * to replace old ones (the above mentioned shift and bitwise or never result in
82 * a zero) or zero to indicate no update.
83 * fw_update_probs() performs actual probs updates or leaves probs as-is
84 * for values for which a zero was passed from userspace.
86 v4l2_vp9_fw_update_probs(&vp9_ctx
->probability_tables
, prob_updates
, *dec_params
);
91 static struct hantro_decoded_buffer
*
92 get_ref_buf(struct hantro_ctx
*ctx
, struct vb2_v4l2_buffer
*dst
, u64 timestamp
)
94 struct v4l2_m2m_ctx
*m2m_ctx
= ctx
->fh
.m2m_ctx
;
95 struct vb2_queue
*cap_q
= &m2m_ctx
->cap_q_ctx
.q
;
96 struct vb2_buffer
*buf
;
99 * If a ref is unused or invalid, address of current destination
100 * buffer is returned.
102 buf
= vb2_find_buffer(cap_q
, timestamp
);
106 return vb2_to_hantro_decoded_buf(buf
);
109 static void update_dec_buf_info(struct hantro_decoded_buffer
*buf
,
110 const struct v4l2_ctrl_vp9_frame
*dec_params
)
112 buf
->vp9
.width
= dec_params
->frame_width_minus_1
+ 1;
113 buf
->vp9
.height
= dec_params
->frame_height_minus_1
+ 1;
114 buf
->vp9
.bit_depth
= dec_params
->bit_depth
;
117 static void update_ctx_cur_info(struct hantro_vp9_dec_hw_ctx
*vp9_ctx
,
118 struct hantro_decoded_buffer
*buf
,
119 const struct v4l2_ctrl_vp9_frame
*dec_params
)
121 vp9_ctx
->cur
.valid
= true;
122 vp9_ctx
->cur
.reference_mode
= dec_params
->reference_mode
;
123 vp9_ctx
->cur
.interpolation_filter
= dec_params
->interpolation_filter
;
124 vp9_ctx
->cur
.flags
= dec_params
->flags
;
125 vp9_ctx
->cur
.timestamp
= buf
->base
.vb
.vb2_buf
.timestamp
;
128 static void config_output(struct hantro_ctx
*ctx
,
129 struct hantro_decoded_buffer
*dst
,
130 const struct v4l2_ctrl_vp9_frame
*dec_params
)
132 dma_addr_t luma_addr
, chroma_addr
, mv_addr
;
134 hantro_reg_write(ctx
->dev
, &g2_out_dis
, 0);
135 if (!ctx
->dev
->variant
->legacy_regs
)
136 hantro_reg_write(ctx
->dev
, &g2_output_format
, 0);
138 luma_addr
= hantro_get_dec_buf_addr(ctx
, &dst
->base
.vb
.vb2_buf
);
139 hantro_write_addr(ctx
->dev
, G2_OUT_LUMA_ADDR
, luma_addr
);
141 chroma_addr
= luma_addr
+ hantro_g2_chroma_offset(ctx
);
142 hantro_write_addr(ctx
->dev
, G2_OUT_CHROMA_ADDR
, chroma_addr
);
143 dst
->vp9
.chroma_offset
= hantro_g2_chroma_offset(ctx
);
145 mv_addr
= luma_addr
+ hantro_g2_motion_vectors_offset(ctx
);
146 hantro_write_addr(ctx
->dev
, G2_OUT_MV_ADDR
, mv_addr
);
147 dst
->vp9
.mv_offset
= hantro_g2_motion_vectors_offset(ctx
);
150 struct hantro_vp9_ref_reg
{
151 const struct hantro_reg width
;
152 const struct hantro_reg height
;
153 const struct hantro_reg hor_scale
;
154 const struct hantro_reg ver_scale
;
159 static void config_ref(struct hantro_ctx
*ctx
,
160 struct hantro_decoded_buffer
*dst
,
161 const struct hantro_vp9_ref_reg
*ref_reg
,
162 const struct v4l2_ctrl_vp9_frame
*dec_params
,
165 struct hantro_decoded_buffer
*buf
;
166 dma_addr_t luma_addr
, chroma_addr
;
169 buf
= get_ref_buf(ctx
, &dst
->base
.vb
, ref_ts
);
170 refw
= buf
->vp9
.width
;
171 refh
= buf
->vp9
.height
;
173 hantro_reg_write(ctx
->dev
, &ref_reg
->width
, refw
);
174 hantro_reg_write(ctx
->dev
, &ref_reg
->height
, refh
);
176 hantro_reg_write(ctx
->dev
, &ref_reg
->hor_scale
, (refw
<< 14) / dst
->vp9
.width
);
177 hantro_reg_write(ctx
->dev
, &ref_reg
->ver_scale
, (refh
<< 14) / dst
->vp9
.height
);
179 luma_addr
= hantro_get_dec_buf_addr(ctx
, &buf
->base
.vb
.vb2_buf
);
180 hantro_write_addr(ctx
->dev
, ref_reg
->y_base
, luma_addr
);
182 chroma_addr
= luma_addr
+ buf
->vp9
.chroma_offset
;
183 hantro_write_addr(ctx
->dev
, ref_reg
->c_base
, chroma_addr
);
186 static void config_ref_registers(struct hantro_ctx
*ctx
,
187 const struct v4l2_ctrl_vp9_frame
*dec_params
,
188 struct hantro_decoded_buffer
*dst
,
189 struct hantro_decoded_buffer
*mv_ref
)
191 static const struct hantro_vp9_ref_reg ref_regs
[] = {
194 .width
= vp9_lref_width
,
195 .height
= vp9_lref_height
,
196 .hor_scale
= vp9_lref_hor_scale
,
197 .ver_scale
= vp9_lref_ver_scale
,
198 .y_base
= G2_REF_LUMA_ADDR(0),
199 .c_base
= G2_REF_CHROMA_ADDR(0),
202 .width
= vp9_gref_width
,
203 .height
= vp9_gref_height
,
204 .hor_scale
= vp9_gref_hor_scale
,
205 .ver_scale
= vp9_gref_ver_scale
,
206 .y_base
= G2_REF_LUMA_ADDR(4),
207 .c_base
= G2_REF_CHROMA_ADDR(4),
210 .width
= vp9_aref_width
,
211 .height
= vp9_aref_height
,
212 .hor_scale
= vp9_aref_hor_scale
,
213 .ver_scale
= vp9_aref_ver_scale
,
214 .y_base
= G2_REF_LUMA_ADDR(5),
215 .c_base
= G2_REF_CHROMA_ADDR(5),
220 config_ref(ctx
, dst
, &ref_regs
[0], dec_params
, dec_params
->last_frame_ts
);
221 config_ref(ctx
, dst
, &ref_regs
[1], dec_params
, dec_params
->golden_frame_ts
);
222 config_ref(ctx
, dst
, &ref_regs
[2], dec_params
, dec_params
->alt_frame_ts
);
224 mv_addr
= hantro_get_dec_buf_addr(ctx
, &mv_ref
->base
.vb
.vb2_buf
) +
225 mv_ref
->vp9
.mv_offset
;
226 hantro_write_addr(ctx
->dev
, G2_REF_MV_ADDR(0), mv_addr
);
228 hantro_reg_write(ctx
->dev
, &vp9_last_sign_bias
,
229 dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_LAST
? 1 : 0);
231 hantro_reg_write(ctx
->dev
, &vp9_gref_sign_bias
,
232 dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_GOLDEN
? 1 : 0);
234 hantro_reg_write(ctx
->dev
, &vp9_aref_sign_bias
,
235 dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_ALT
? 1 : 0);
238 static void recompute_tile_info(unsigned short *tile_info
, unsigned int tiles
, unsigned int sbs
)
241 unsigned int accumulated
= 0;
242 unsigned int next_accumulated
;
244 for (i
= 1; i
<= tiles
; ++i
) {
245 next_accumulated
= i
* sbs
/ tiles
;
246 *tile_info
++ = next_accumulated
- accumulated
;
247 accumulated
= next_accumulated
;
252 recompute_tile_rc_info(struct hantro_ctx
*ctx
,
253 unsigned int tile_r
, unsigned int tile_c
,
254 unsigned int sbs_r
, unsigned int sbs_c
)
256 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
258 recompute_tile_info(vp9_ctx
->tile_r_info
, tile_r
, sbs_r
);
259 recompute_tile_info(vp9_ctx
->tile_c_info
, tile_c
, sbs_c
);
261 vp9_ctx
->last_tile_r
= tile_r
;
262 vp9_ctx
->last_tile_c
= tile_c
;
263 vp9_ctx
->last_sbs_r
= sbs_r
;
264 vp9_ctx
->last_sbs_c
= sbs_c
;
267 static inline unsigned int first_tile_row(unsigned int tile_r
, unsigned int sbs_r
)
269 if (tile_r
== sbs_r
+ 1)
272 if (tile_r
== sbs_r
+ 2)
279 fill_tile_info(struct hantro_ctx
*ctx
,
280 unsigned int tile_r
, unsigned int tile_c
,
281 unsigned int sbs_r
, unsigned int sbs_c
,
282 unsigned short *tile_mem
)
284 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
288 for (i
= first_tile_row(tile_r
, sbs_r
); i
< tile_r
; ++i
) {
289 unsigned short r_info
= vp9_ctx
->tile_r_info
[i
];
293 r_info
+= vp9_ctx
->tile_r_info
[0];
295 r_info
+= vp9_ctx
->tile_r_info
[1];
298 for (j
= 0; j
< tile_c
; ++j
) {
299 *tile_mem
++ = vp9_ctx
->tile_c_info
[j
];
300 *tile_mem
++ = r_info
;
306 config_tiles(struct hantro_ctx
*ctx
,
307 const struct v4l2_ctrl_vp9_frame
*dec_params
,
308 struct hantro_decoded_buffer
*dst
)
310 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
311 struct hantro_aux_buf
*misc
= &vp9_ctx
->misc
;
312 struct hantro_aux_buf
*tile_edge
= &vp9_ctx
->tile_edge
;
314 unsigned short *tile_mem
;
315 unsigned int rows
, cols
;
317 addr
= misc
->dma
+ vp9_ctx
->tile_info_offset
;
318 hantro_write_addr(ctx
->dev
, G2_TILE_SIZES_ADDR
, addr
);
320 tile_mem
= misc
->cpu
+ vp9_ctx
->tile_info_offset
;
321 if (dec_params
->tile_cols_log2
|| dec_params
->tile_rows_log2
) {
322 unsigned int tile_r
= (1 << dec_params
->tile_rows_log2
);
323 unsigned int tile_c
= (1 << dec_params
->tile_cols_log2
);
324 unsigned int sbs_r
= hantro_vp9_num_sbs(dst
->vp9
.height
);
325 unsigned int sbs_c
= hantro_vp9_num_sbs(dst
->vp9
.width
);
327 if (tile_r
!= vp9_ctx
->last_tile_r
|| tile_c
!= vp9_ctx
->last_tile_c
||
328 sbs_r
!= vp9_ctx
->last_sbs_r
|| sbs_c
!= vp9_ctx
->last_sbs_c
)
329 recompute_tile_rc_info(ctx
, tile_r
, tile_c
, sbs_r
, sbs_c
);
331 fill_tile_info(ctx
, tile_r
, tile_c
, sbs_r
, sbs_c
, tile_mem
);
335 hantro_reg_write(ctx
->dev
, &g2_tile_e
, 1);
337 tile_mem
[0] = hantro_vp9_num_sbs(dst
->vp9
.width
);
338 tile_mem
[1] = hantro_vp9_num_sbs(dst
->vp9
.height
);
342 hantro_reg_write(ctx
->dev
, &g2_tile_e
, 0);
345 if (ctx
->dev
->variant
->legacy_regs
) {
346 hantro_reg_write(ctx
->dev
, &g2_num_tile_cols_old
, cols
);
347 hantro_reg_write(ctx
->dev
, &g2_num_tile_rows_old
, rows
);
349 hantro_reg_write(ctx
->dev
, &g2_num_tile_cols
, cols
);
350 hantro_reg_write(ctx
->dev
, &g2_num_tile_rows
, rows
);
353 /* provide aux buffers even if no tiles are used */
354 addr
= tile_edge
->dma
;
355 hantro_write_addr(ctx
->dev
, G2_TILE_FILTER_ADDR
, addr
);
357 addr
= tile_edge
->dma
+ vp9_ctx
->bsd_ctrl_offset
;
358 hantro_write_addr(ctx
->dev
, G2_TILE_BSD_ADDR
, addr
);
362 update_feat_and_flag(struct hantro_vp9_dec_hw_ctx
*vp9_ctx
,
363 const struct v4l2_vp9_segmentation
*seg
,
364 unsigned int feature
,
367 u8 mask
= V4L2_VP9_SEGMENT_FEATURE_ENABLED(feature
);
369 vp9_ctx
->feature_data
[segid
][feature
] = seg
->feature_data
[segid
][feature
];
370 vp9_ctx
->feature_enabled
[segid
] &= ~mask
;
371 vp9_ctx
->feature_enabled
[segid
] |= (seg
->feature_enabled
[segid
] & mask
);
374 static inline s16
clip3(s16 x
, s16 y
, s16 z
)
376 return (z
< x
) ? x
: (z
> y
) ? y
: z
;
379 static s16
feat_val_clip3(s16 feat_val
, s16 feature_data
, bool absolute
, u8 clip
)
384 return clip3(0, 255, feat_val
+ feature_data
);
387 static void config_segment(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
)
389 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
390 const struct v4l2_vp9_segmentation
*seg
;
392 unsigned char feat_id
;
394 bool segment_enabled
, absolute
, update_data
;
396 static const struct hantro_reg seg_regs
[8][V4L2_VP9_SEG_LVL_MAX
] = {
397 { vp9_quant_seg0
, vp9_filt_level_seg0
, vp9_refpic_seg0
, vp9_skip_seg0
},
398 { vp9_quant_seg1
, vp9_filt_level_seg1
, vp9_refpic_seg1
, vp9_skip_seg1
},
399 { vp9_quant_seg2
, vp9_filt_level_seg2
, vp9_refpic_seg2
, vp9_skip_seg2
},
400 { vp9_quant_seg3
, vp9_filt_level_seg3
, vp9_refpic_seg3
, vp9_skip_seg3
},
401 { vp9_quant_seg4
, vp9_filt_level_seg4
, vp9_refpic_seg4
, vp9_skip_seg4
},
402 { vp9_quant_seg5
, vp9_filt_level_seg5
, vp9_refpic_seg5
, vp9_skip_seg5
},
403 { vp9_quant_seg6
, vp9_filt_level_seg6
, vp9_refpic_seg6
, vp9_skip_seg6
},
404 { vp9_quant_seg7
, vp9_filt_level_seg7
, vp9_refpic_seg7
, vp9_skip_seg7
},
407 segment_enabled
= !!(dec_params
->seg
.flags
& V4L2_VP9_SEGMENTATION_FLAG_ENABLED
);
408 hantro_reg_write(ctx
->dev
, &vp9_segment_e
, segment_enabled
);
409 hantro_reg_write(ctx
->dev
, &vp9_segment_upd_e
,
410 !!(dec_params
->seg
.flags
& V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP
));
411 hantro_reg_write(ctx
->dev
, &vp9_segment_temp_upd_e
,
412 !!(dec_params
->seg
.flags
& V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE
));
414 seg
= &dec_params
->seg
;
415 absolute
= !!(seg
->flags
& V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE
);
416 update_data
= !!(seg
->flags
& V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA
);
418 for (segid
= 0; segid
< 8; ++segid
) {
419 /* Quantizer segment feature */
420 feat_id
= V4L2_VP9_SEG_LVL_ALT_Q
;
421 feat_val
= dec_params
->quant
.base_q_idx
;
422 if (segment_enabled
) {
424 update_feat_and_flag(vp9_ctx
, seg
, feat_id
, segid
);
425 if (v4l2_vp9_seg_feat_enabled(vp9_ctx
->feature_enabled
, feat_id
, segid
))
426 feat_val
= feat_val_clip3(feat_val
,
427 vp9_ctx
->feature_data
[segid
][feat_id
],
430 hantro_reg_write(ctx
->dev
, &seg_regs
[segid
][feat_id
], feat_val
);
432 /* Loop filter segment feature */
433 feat_id
= V4L2_VP9_SEG_LVL_ALT_L
;
434 feat_val
= dec_params
->lf
.level
;
435 if (segment_enabled
) {
437 update_feat_and_flag(vp9_ctx
, seg
, feat_id
, segid
);
438 if (v4l2_vp9_seg_feat_enabled(vp9_ctx
->feature_enabled
, feat_id
, segid
))
439 feat_val
= feat_val_clip3(feat_val
,
440 vp9_ctx
->feature_data
[segid
][feat_id
],
443 hantro_reg_write(ctx
->dev
, &seg_regs
[segid
][feat_id
], feat_val
);
445 /* Reference frame segment feature */
446 feat_id
= V4L2_VP9_SEG_LVL_REF_FRAME
;
448 if (segment_enabled
) {
450 update_feat_and_flag(vp9_ctx
, seg
, feat_id
, segid
);
451 if (!(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_KEY_FRAME
) &&
452 v4l2_vp9_seg_feat_enabled(vp9_ctx
->feature_enabled
, feat_id
, segid
))
453 feat_val
= vp9_ctx
->feature_data
[segid
][feat_id
] + 1;
455 hantro_reg_write(ctx
->dev
, &seg_regs
[segid
][feat_id
], feat_val
);
457 /* Skip segment feature */
458 feat_id
= V4L2_VP9_SEG_LVL_SKIP
;
460 if (segment_enabled
) {
462 update_feat_and_flag(vp9_ctx
, seg
, feat_id
, segid
);
463 feat_val
= v4l2_vp9_seg_feat_enabled(vp9_ctx
->feature_enabled
,
464 feat_id
, segid
) ? 1 : 0;
466 hantro_reg_write(ctx
->dev
, &seg_regs
[segid
][feat_id
], feat_val
);
470 static void config_loop_filter(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
)
472 bool d
= dec_params
->lf
.flags
& V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED
;
474 hantro_reg_write(ctx
->dev
, &vp9_filt_level
, dec_params
->lf
.level
);
475 hantro_reg_write(ctx
->dev
, &g2_out_filtering_dis
, dec_params
->lf
.level
== 0);
476 hantro_reg_write(ctx
->dev
, &vp9_filt_sharpness
, dec_params
->lf
.sharpness
);
478 hantro_reg_write(ctx
->dev
, &vp9_filt_ref_adj_0
, d
? dec_params
->lf
.ref_deltas
[0] : 0);
479 hantro_reg_write(ctx
->dev
, &vp9_filt_ref_adj_1
, d
? dec_params
->lf
.ref_deltas
[1] : 0);
480 hantro_reg_write(ctx
->dev
, &vp9_filt_ref_adj_2
, d
? dec_params
->lf
.ref_deltas
[2] : 0);
481 hantro_reg_write(ctx
->dev
, &vp9_filt_ref_adj_3
, d
? dec_params
->lf
.ref_deltas
[3] : 0);
482 hantro_reg_write(ctx
->dev
, &vp9_filt_mb_adj_0
, d
? dec_params
->lf
.mode_deltas
[0] : 0);
483 hantro_reg_write(ctx
->dev
, &vp9_filt_mb_adj_1
, d
? dec_params
->lf
.mode_deltas
[1] : 0);
486 static void config_picture_dimensions(struct hantro_ctx
*ctx
, struct hantro_decoded_buffer
*dst
)
488 u32 pic_w_4x4
, pic_h_4x4
;
490 hantro_reg_write(ctx
->dev
, &g2_pic_width_in_cbs
, (dst
->vp9
.width
+ 7) / 8);
491 hantro_reg_write(ctx
->dev
, &g2_pic_height_in_cbs
, (dst
->vp9
.height
+ 7) / 8);
492 pic_w_4x4
= roundup(dst
->vp9
.width
, 8) >> 2;
493 pic_h_4x4
= roundup(dst
->vp9
.height
, 8) >> 2;
494 hantro_reg_write(ctx
->dev
, &g2_pic_width_4x4
, pic_w_4x4
);
495 hantro_reg_write(ctx
->dev
, &g2_pic_height_4x4
, pic_h_4x4
);
499 config_bit_depth(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
)
501 if (ctx
->dev
->variant
->legacy_regs
) {
502 hantro_reg_write(ctx
->dev
, &g2_bit_depth_y
, dec_params
->bit_depth
);
503 hantro_reg_write(ctx
->dev
, &g2_bit_depth_c
, dec_params
->bit_depth
);
504 hantro_reg_write(ctx
->dev
, &g2_pix_shift
, 0);
506 hantro_reg_write(ctx
->dev
, &g2_bit_depth_y_minus8
, dec_params
->bit_depth
- 8);
507 hantro_reg_write(ctx
->dev
, &g2_bit_depth_c_minus8
, dec_params
->bit_depth
- 8);
511 static inline bool is_lossless(const struct v4l2_vp9_quantization
*quant
)
513 return quant
->base_q_idx
== 0 && quant
->delta_q_uv_ac
== 0 &&
514 quant
->delta_q_uv_dc
== 0 && quant
->delta_q_y_dc
== 0;
518 config_quant(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
)
520 hantro_reg_write(ctx
->dev
, &vp9_qp_delta_y_dc
, dec_params
->quant
.delta_q_y_dc
);
521 hantro_reg_write(ctx
->dev
, &vp9_qp_delta_ch_dc
, dec_params
->quant
.delta_q_uv_dc
);
522 hantro_reg_write(ctx
->dev
, &vp9_qp_delta_ch_ac
, dec_params
->quant
.delta_q_uv_ac
);
523 hantro_reg_write(ctx
->dev
, &vp9_lossless_e
, is_lossless(&dec_params
->quant
));
527 hantro_interp_filter_from_v4l2(unsigned int interpolation_filter
)
529 switch (interpolation_filter
) {
530 case V4L2_VP9_INTERP_FILTER_EIGHTTAP
:
532 case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SMOOTH
:
534 case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SHARP
:
536 case V4L2_VP9_INTERP_FILTER_BILINEAR
:
538 case V4L2_VP9_INTERP_FILTER_SWITCHABLE
:
546 config_others(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
,
547 bool intra_only
, bool resolution_change
)
549 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
551 hantro_reg_write(ctx
->dev
, &g2_idr_pic_e
, intra_only
);
553 hantro_reg_write(ctx
->dev
, &vp9_transform_mode
, vp9_ctx
->cur
.tx_mode
);
555 hantro_reg_write(ctx
->dev
, &vp9_mcomp_filt_type
, intra_only
?
556 0 : hantro_interp_filter_from_v4l2(dec_params
->interpolation_filter
));
558 hantro_reg_write(ctx
->dev
, &vp9_high_prec_mv_e
,
559 !!(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV
));
561 hantro_reg_write(ctx
->dev
, &vp9_comp_pred_mode
, dec_params
->reference_mode
);
563 hantro_reg_write(ctx
->dev
, &g2_tempor_mvp_e
,
564 !(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT
) &&
565 !(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_KEY_FRAME
) &&
566 !(vp9_ctx
->last
.flags
& V4L2_VP9_FRAME_FLAG_KEY_FRAME
) &&
567 !(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_INTRA_ONLY
) &&
568 !resolution_change
&&
569 vp9_ctx
->last
.flags
& V4L2_VP9_FRAME_FLAG_SHOW_FRAME
572 hantro_reg_write(ctx
->dev
, &g2_write_mvs_e
,
573 !(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_KEY_FRAME
));
577 config_compound_reference(struct hantro_ctx
*ctx
,
578 const struct v4l2_ctrl_vp9_frame
*dec_params
)
580 u32 comp_fixed_ref
, comp_var_ref
[2];
581 bool last_ref_frame_sign_bias
;
582 bool golden_ref_frame_sign_bias
;
583 bool alt_ref_frame_sign_bias
;
584 bool comp_ref_allowed
= 0;
590 last_ref_frame_sign_bias
= dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_LAST
;
591 golden_ref_frame_sign_bias
= dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_GOLDEN
;
592 alt_ref_frame_sign_bias
= dec_params
->ref_frame_sign_bias
& V4L2_VP9_SIGN_BIAS_ALT
;
594 /* 6.3.12 Frame reference mode syntax */
595 comp_ref_allowed
|= golden_ref_frame_sign_bias
!= last_ref_frame_sign_bias
;
596 comp_ref_allowed
|= alt_ref_frame_sign_bias
!= last_ref_frame_sign_bias
;
598 if (comp_ref_allowed
) {
599 if (last_ref_frame_sign_bias
==
600 golden_ref_frame_sign_bias
) {
601 comp_fixed_ref
= ALTREF_FRAME
;
602 comp_var_ref
[0] = LAST_FRAME
;
603 comp_var_ref
[1] = GOLDEN_FRAME
;
604 } else if (last_ref_frame_sign_bias
==
605 alt_ref_frame_sign_bias
) {
606 comp_fixed_ref
= GOLDEN_FRAME
;
607 comp_var_ref
[0] = LAST_FRAME
;
608 comp_var_ref
[1] = ALTREF_FRAME
;
610 comp_fixed_ref
= LAST_FRAME
;
611 comp_var_ref
[0] = GOLDEN_FRAME
;
612 comp_var_ref
[1] = ALTREF_FRAME
;
616 hantro_reg_write(ctx
->dev
, &vp9_comp_pred_fixed_ref
, comp_fixed_ref
);
617 hantro_reg_write(ctx
->dev
, &vp9_comp_pred_var_ref0
, comp_var_ref
[0]);
618 hantro_reg_write(ctx
->dev
, &vp9_comp_pred_var_ref1
, comp_var_ref
[1]);
623 for (m = 0; m < ARRAY_SIZE(adaptive->coef[0][0][0][0]); ++m) { \
624 memcpy(adaptive->coef[i][j][k][l][m], \
625 probs->coef[i][j][k][l][m], \
626 sizeof(probs->coef[i][j][k][l][m])); \
628 adaptive->coef[i][j][k][l][m][3] = 0; \
632 static void config_probs(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
)
634 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
635 struct hantro_aux_buf
*misc
= &vp9_ctx
->misc
;
636 struct hantro_g2_all_probs
*all_probs
= misc
->cpu
;
637 struct hantro_g2_probs
*adaptive
;
638 struct hantro_g2_mv_probs
*mv
;
639 const struct v4l2_vp9_segmentation
*seg
= &dec_params
->seg
;
640 const struct v4l2_vp9_frame_context
*probs
= &vp9_ctx
->probability_tables
;
643 for (i
= 0; i
< ARRAY_SIZE(all_probs
->kf_y_mode_prob
); ++i
)
644 for (j
= 0; j
< ARRAY_SIZE(all_probs
->kf_y_mode_prob
[0]); ++j
) {
645 memcpy(all_probs
->kf_y_mode_prob
[i
][j
],
646 v4l2_vp9_kf_y_mode_prob
[i
][j
],
647 ARRAY_SIZE(all_probs
->kf_y_mode_prob
[i
][j
]));
649 all_probs
->kf_y_mode_prob_tail
[i
][j
][0] =
650 v4l2_vp9_kf_y_mode_prob
[i
][j
][8];
653 memcpy(all_probs
->mb_segment_tree_probs
, seg
->tree_probs
,
654 sizeof(all_probs
->mb_segment_tree_probs
));
656 memcpy(all_probs
->segment_pred_probs
, seg
->pred_probs
,
657 sizeof(all_probs
->segment_pred_probs
));
659 for (i
= 0; i
< ARRAY_SIZE(all_probs
->kf_uv_mode_prob
); ++i
) {
660 memcpy(all_probs
->kf_uv_mode_prob
[i
], v4l2_vp9_kf_uv_mode_prob
[i
],
661 ARRAY_SIZE(all_probs
->kf_uv_mode_prob
[i
]));
663 all_probs
->kf_uv_mode_prob_tail
[i
][0] = v4l2_vp9_kf_uv_mode_prob
[i
][8];
666 adaptive
= &all_probs
->probs
;
668 for (i
= 0; i
< ARRAY_SIZE(adaptive
->inter_mode
); ++i
) {
669 memcpy(adaptive
->inter_mode
[i
], probs
->inter_mode
[i
],
670 ARRAY_SIZE(probs
->inter_mode
[i
]));
672 adaptive
->inter_mode
[i
][3] = 0;
675 memcpy(adaptive
->is_inter
, probs
->is_inter
, sizeof(adaptive
->is_inter
));
677 for (i
= 0; i
< ARRAY_SIZE(adaptive
->uv_mode
); ++i
) {
678 memcpy(adaptive
->uv_mode
[i
], probs
->uv_mode
[i
],
679 sizeof(adaptive
->uv_mode
[i
]));
680 adaptive
->uv_mode_tail
[i
][0] = probs
->uv_mode
[i
][8];
683 memcpy(adaptive
->tx8
, probs
->tx8
, sizeof(adaptive
->tx8
));
684 memcpy(adaptive
->tx16
, probs
->tx16
, sizeof(adaptive
->tx16
));
685 memcpy(adaptive
->tx32
, probs
->tx32
, sizeof(adaptive
->tx32
));
687 for (i
= 0; i
< ARRAY_SIZE(adaptive
->y_mode
); ++i
) {
688 memcpy(adaptive
->y_mode
[i
], probs
->y_mode
[i
],
689 ARRAY_SIZE(adaptive
->y_mode
[i
]));
691 adaptive
->y_mode_tail
[i
][0] = probs
->y_mode
[i
][8];
694 for (i
= 0; i
< ARRAY_SIZE(adaptive
->partition
[0]); ++i
) {
695 memcpy(adaptive
->partition
[0][i
], v4l2_vp9_kf_partition_probs
[i
],
696 sizeof(v4l2_vp9_kf_partition_probs
[i
]));
698 adaptive
->partition
[0][i
][3] = 0;
701 for (i
= 0; i
< ARRAY_SIZE(adaptive
->partition
[1]); ++i
) {
702 memcpy(adaptive
->partition
[1][i
], probs
->partition
[i
],
703 sizeof(probs
->partition
[i
]));
705 adaptive
->partition
[1][i
][3] = 0;
708 memcpy(adaptive
->interp_filter
, probs
->interp_filter
,
709 sizeof(adaptive
->interp_filter
));
711 memcpy(adaptive
->comp_mode
, probs
->comp_mode
, sizeof(adaptive
->comp_mode
));
713 memcpy(adaptive
->skip
, probs
->skip
, sizeof(adaptive
->skip
));
717 memcpy(mv
->joint
, probs
->mv
.joint
, sizeof(mv
->joint
));
718 memcpy(mv
->sign
, probs
->mv
.sign
, sizeof(mv
->sign
));
719 memcpy(mv
->class0_bit
, probs
->mv
.class0_bit
, sizeof(mv
->class0_bit
));
720 memcpy(mv
->fr
, probs
->mv
.fr
, sizeof(mv
->fr
));
721 memcpy(mv
->class0_hp
, probs
->mv
.class0_hp
, sizeof(mv
->class0_hp
));
722 memcpy(mv
->hp
, probs
->mv
.hp
, sizeof(mv
->hp
));
723 memcpy(mv
->classes
, probs
->mv
.classes
, sizeof(mv
->classes
));
724 memcpy(mv
->class0_fr
, probs
->mv
.class0_fr
, sizeof(mv
->class0_fr
));
725 memcpy(mv
->bits
, probs
->mv
.bits
, sizeof(mv
->bits
));
727 memcpy(adaptive
->single_ref
, probs
->single_ref
, sizeof(adaptive
->single_ref
));
729 memcpy(adaptive
->comp_ref
, probs
->comp_ref
, sizeof(adaptive
->comp_ref
));
731 for (i
= 0; i
< ARRAY_SIZE(adaptive
->coef
); ++i
)
732 for (j
= 0; j
< ARRAY_SIZE(adaptive
->coef
[0]); ++j
)
733 for (k
= 0; k
< ARRAY_SIZE(adaptive
->coef
[0][0]); ++k
)
734 for (l
= 0; l
< ARRAY_SIZE(adaptive
->coef
[0][0][0]); ++l
)
737 hantro_write_addr(ctx
->dev
, G2_VP9_PROBS_ADDR
, misc
->dma
);
740 static void config_counts(struct hantro_ctx
*ctx
)
742 struct hantro_vp9_dec_hw_ctx
*vp9_dec
= &ctx
->vp9_dec
;
743 struct hantro_aux_buf
*misc
= &vp9_dec
->misc
;
744 dma_addr_t addr
= misc
->dma
+ vp9_dec
->ctx_counters_offset
;
746 hantro_write_addr(ctx
->dev
, G2_VP9_CTX_COUNT_ADDR
, addr
);
749 static void config_seg_map(struct hantro_ctx
*ctx
,
750 const struct v4l2_ctrl_vp9_frame
*dec_params
,
751 bool intra_only
, bool update_map
)
753 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
754 struct hantro_aux_buf
*segment_map
= &vp9_ctx
->segment_map
;
758 (dec_params
->flags
& V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT
)) {
759 memset(segment_map
->cpu
, 0, segment_map
->size
);
760 memset(vp9_ctx
->feature_data
, 0, sizeof(vp9_ctx
->feature_data
));
761 memset(vp9_ctx
->feature_enabled
, 0, sizeof(vp9_ctx
->feature_enabled
));
764 addr
= segment_map
->dma
+ vp9_ctx
->active_segment
* vp9_ctx
->segment_map_size
;
765 hantro_write_addr(ctx
->dev
, G2_VP9_SEGMENT_READ_ADDR
, addr
);
767 addr
= segment_map
->dma
+ (1 - vp9_ctx
->active_segment
) * vp9_ctx
->segment_map_size
;
768 hantro_write_addr(ctx
->dev
, G2_VP9_SEGMENT_WRITE_ADDR
, addr
);
771 vp9_ctx
->active_segment
= 1 - vp9_ctx
->active_segment
;
775 config_source(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
,
776 struct vb2_v4l2_buffer
*vb2_src
)
778 dma_addr_t stream_base
, tmp_addr
;
779 unsigned int headres_size
;
780 u32 src_len
, start_bit
, src_buf_len
;
782 headres_size
= dec_params
->uncompressed_header_size
783 + dec_params
->compressed_header_size
;
785 stream_base
= vb2_dma_contig_plane_dma_addr(&vb2_src
->vb2_buf
, 0);
787 tmp_addr
= stream_base
+ headres_size
;
788 if (ctx
->dev
->variant
->legacy_regs
)
789 hantro_write_addr(ctx
->dev
, G2_STREAM_ADDR
, (tmp_addr
& ~0xf));
791 hantro_write_addr(ctx
->dev
, G2_STREAM_ADDR
, stream_base
);
793 start_bit
= (tmp_addr
& 0xf) * 8;
794 hantro_reg_write(ctx
->dev
, &g2_start_bit
, start_bit
);
796 src_len
= vb2_get_plane_payload(&vb2_src
->vb2_buf
, 0);
797 src_len
+= start_bit
/ 8 - headres_size
;
798 hantro_reg_write(ctx
->dev
, &g2_stream_len
, src_len
);
800 if (!ctx
->dev
->variant
->legacy_regs
) {
802 hantro_reg_write(ctx
->dev
, &g2_strm_start_offset
, tmp_addr
- stream_base
);
803 src_buf_len
= vb2_plane_size(&vb2_src
->vb2_buf
, 0);
804 hantro_reg_write(ctx
->dev
, &g2_strm_buffer_len
, src_buf_len
);
809 config_registers(struct hantro_ctx
*ctx
, const struct v4l2_ctrl_vp9_frame
*dec_params
,
810 struct vb2_v4l2_buffer
*vb2_src
, struct vb2_v4l2_buffer
*vb2_dst
)
812 struct hantro_decoded_buffer
*dst
, *last
, *mv_ref
;
813 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
814 const struct v4l2_vp9_segmentation
*seg
;
815 bool intra_only
, resolution_change
;
818 dst
= vb2_to_hantro_decoded_buf(&vb2_dst
->vb2_buf
);
820 if (vp9_ctx
->last
.valid
)
821 last
= get_ref_buf(ctx
, &dst
->base
.vb
, vp9_ctx
->last
.timestamp
);
825 update_dec_buf_info(dst
, dec_params
);
826 update_ctx_cur_info(vp9_ctx
, dst
, dec_params
);
827 seg
= &dec_params
->seg
;
829 intra_only
= !!(dec_params
->flags
&
830 (V4L2_VP9_FRAME_FLAG_KEY_FRAME
|
831 V4L2_VP9_FRAME_FLAG_INTRA_ONLY
));
834 !(dec_params
->flags
& V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT
) &&
840 resolution_change
= dst
->vp9
.width
!= last
->vp9
.width
||
841 dst
->vp9
.height
!= last
->vp9
.height
;
843 /* configure basic registers */
844 hantro_reg_write(ctx
->dev
, &g2_mode
, VP9_DEC_MODE
);
845 if (!ctx
->dev
->variant
->legacy_regs
) {
846 hantro_reg_write(ctx
->dev
, &g2_strm_swap
, 0xf);
847 hantro_reg_write(ctx
->dev
, &g2_dirmv_swap
, 0xf);
848 hantro_reg_write(ctx
->dev
, &g2_compress_swap
, 0xf);
849 hantro_reg_write(ctx
->dev
, &g2_ref_compress_bypass
, 1);
851 hantro_reg_write(ctx
->dev
, &g2_strm_swap_old
, 0x1f);
852 hantro_reg_write(ctx
->dev
, &g2_pic_swap
, 0x10);
853 hantro_reg_write(ctx
->dev
, &g2_dirmv_swap_old
, 0x10);
854 hantro_reg_write(ctx
->dev
, &g2_tab0_swap_old
, 0x10);
855 hantro_reg_write(ctx
->dev
, &g2_tab1_swap_old
, 0x10);
856 hantro_reg_write(ctx
->dev
, &g2_tab2_swap_old
, 0x10);
857 hantro_reg_write(ctx
->dev
, &g2_tab3_swap_old
, 0x10);
858 hantro_reg_write(ctx
->dev
, &g2_rscan_swap
, 0x10);
860 hantro_reg_write(ctx
->dev
, &g2_buswidth
, BUS_WIDTH_128
);
861 hantro_reg_write(ctx
->dev
, &g2_max_burst
, 16);
862 hantro_reg_write(ctx
->dev
, &g2_apf_threshold
, 8);
863 hantro_reg_write(ctx
->dev
, &g2_clk_gate_e
, 1);
864 hantro_reg_write(ctx
->dev
, &g2_max_cb_size
, 6);
865 hantro_reg_write(ctx
->dev
, &g2_min_cb_size
, 3);
866 if (ctx
->dev
->variant
->double_buffer
)
867 hantro_reg_write(ctx
->dev
, &g2_double_buffer_e
, 1);
869 config_output(ctx
, dst
, dec_params
);
872 config_ref_registers(ctx
, dec_params
, dst
, mv_ref
);
874 config_tiles(ctx
, dec_params
, dst
);
875 config_segment(ctx
, dec_params
);
876 config_loop_filter(ctx
, dec_params
);
877 config_picture_dimensions(ctx
, dst
);
878 config_bit_depth(ctx
, dec_params
);
879 config_quant(ctx
, dec_params
);
880 config_others(ctx
, dec_params
, intra_only
, resolution_change
);
881 config_compound_reference(ctx
, dec_params
);
882 config_probs(ctx
, dec_params
);
884 config_seg_map(ctx
, dec_params
, intra_only
,
885 seg
->flags
& V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP
);
886 config_source(ctx
, dec_params
, vb2_src
);
889 int hantro_g2_vp9_dec_run(struct hantro_ctx
*ctx
)
891 const struct v4l2_ctrl_vp9_frame
*decode_params
;
892 struct vb2_v4l2_buffer
*src
;
893 struct vb2_v4l2_buffer
*dst
;
896 hantro_g2_check_idle(ctx
->dev
);
898 ret
= start_prepare_run(ctx
, &decode_params
);
900 hantro_end_prepare_run(ctx
);
904 src
= hantro_get_src_buf(ctx
);
905 dst
= hantro_get_dst_buf(ctx
);
907 config_registers(ctx
, decode_params
, src
, dst
);
909 hantro_end_prepare_run(ctx
);
911 vdpu_write(ctx
->dev
, G2_REG_INTERRUPT_DEC_E
, G2_REG_INTERRUPT
);
916 #define copy_tx_and_skip(p1, p2) \
918 memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8)); \
919 memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16)); \
920 memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32)); \
921 memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip)); \
924 void hantro_g2_vp9_dec_done(struct hantro_ctx
*ctx
)
926 struct hantro_vp9_dec_hw_ctx
*vp9_ctx
= &ctx
->vp9_dec
;
927 unsigned int fctx_idx
;
929 if (!(vp9_ctx
->cur
.flags
& V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX
))
930 goto out_update_last
;
932 fctx_idx
= vp9_ctx
->cur
.frame_context_idx
;
934 if (!(vp9_ctx
->cur
.flags
& V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE
)) {
935 /* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
936 struct v4l2_vp9_frame_context
*probs
= &vp9_ctx
->probability_tables
;
937 bool frame_is_intra
= vp9_ctx
->cur
.flags
&
938 (V4L2_VP9_FRAME_FLAG_KEY_FRAME
| V4L2_VP9_FRAME_FLAG_INTRA_ONLY
);
944 } _tx_skip
, *tx_skip
= &_tx_skip
;
945 struct v4l2_vp9_frame_symbol_counts
*counts
;
946 struct symbol_counts
*hantro_cnts
;
950 /* buffer the forward-updated TX and skip probs */
952 copy_tx_and_skip(tx_skip
, probs
);
954 /* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
955 *probs
= vp9_ctx
->frame_context
[fctx_idx
];
957 /* if FrameIsIntra then undo the effect of load_probs2() */
959 copy_tx_and_skip(probs
, tx_skip
);
961 counts
= &vp9_ctx
->cnts
;
962 hantro_cnts
= vp9_ctx
->misc
.cpu
+ vp9_ctx
->ctx_counters_offset
;
963 for (i
= 0; i
< ARRAY_SIZE(tx16p
); ++i
) {
965 hantro_cnts
->tx16x16_count
[i
],
966 sizeof(hantro_cnts
->tx16x16_count
[0]));
969 counts
->tx16p
= &tx16p
;
971 v4l2_vp9_adapt_coef_probs(probs
, counts
,
972 !vp9_ctx
->last
.valid
||
973 vp9_ctx
->last
.flags
& V4L2_VP9_FRAME_FLAG_KEY_FRAME
,
976 if (!frame_is_intra
) {
977 /* load_probs2() already done */
980 for (i
= 0; i
< ARRAY_SIZE(mv_mode
); ++i
) {
981 mv_mode
[i
][0] = hantro_cnts
->inter_mode_counts
[i
][1][0];
982 mv_mode
[i
][1] = hantro_cnts
->inter_mode_counts
[i
][2][0];
983 mv_mode
[i
][2] = hantro_cnts
->inter_mode_counts
[i
][0][0];
984 mv_mode
[i
][3] = hantro_cnts
->inter_mode_counts
[i
][2][1];
986 counts
->mv_mode
= &mv_mode
;
987 v4l2_vp9_adapt_noncoef_probs(&vp9_ctx
->probability_tables
, counts
,
988 vp9_ctx
->cur
.reference_mode
,
989 vp9_ctx
->cur
.interpolation_filter
,
990 vp9_ctx
->cur
.tx_mode
, vp9_ctx
->cur
.flags
);
994 vp9_ctx
->frame_context
[fctx_idx
] = vp9_ctx
->probability_tables
;
997 vp9_ctx
->last
= vp9_ctx
->cur
;