2 * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license and patent
5 * grant that can be found in the LICENSE file in the root of the source
6 * tree. All contributing project authors may be found in the AUTHORS
7 * file in the root of the source tree.
13 #include "vpx_codec/vpx_decoder.h"
15 #include "vpx_codec/internal/vpx_codec_internal.h"
16 #include "vpx_version.h"
18 #include "onyxd_int.h"
20 #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
24 ((d&0x000000ff)<<24) | \
25 ((d&0x0000ff00)<<8) | \
26 ((d&0x00ff0000)>>8) | \
29 ((d&0x000000ff)<<8) | \
35 typedef vpx_codec_stream_info_t vp8_stream_info_t
;
37 /* Structures for handling memory allocations */
40 VP8_SEG_ALG_PRIV
= 256,
43 #define NELEMENTS(x) (sizeof(x)/sizeof(x[0]))
45 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t
);
53 unsigned long(*calc_sz
)(const vpx_codec_dec_cfg_t
*, vpx_codec_flags_t
);
56 static const mem_req_t vp8_mem_req_segs
[] =
58 {VP8_SEG_ALG_PRIV
, 0, 8, VPX_CODEC_MEM_ZERO
, vp8_priv_sz
},
59 {VP8_SEG_MAX
, 0, 0, 0, NULL
}
62 struct vpx_codec_alg_priv
64 vpx_codec_priv_t base
;
65 vpx_codec_mmap_t mmaps
[NELEMENTS(vp8_mem_req_segs
)-1];
66 vpx_codec_dec_cfg_t cfg
;
72 vp8_postproc_cfg_t postproc_cfg
;
78 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t flags
)
80 /* Although this declaration is constant, we can't use it in the requested
81 * segments list because we want to define the requested segments list
82 * before defining the private type (so that the number of memory maps is
86 return sizeof(vpx_codec_alg_priv_t
);
90 static void vp8_mmap_dtor(vpx_codec_mmap_t
*mmap
)
95 static vpx_codec_err_t
vp8_mmap_alloc(vpx_codec_mmap_t
*mmap
)
100 align
= mmap
->align
? mmap
->align
- 1 : 0;
102 if (mmap
->flags
& VPX_CODEC_MEM_ZERO
)
103 mmap
->priv
= calloc(1, mmap
->sz
+ align
);
105 mmap
->priv
= malloc(mmap
->sz
+ align
);
107 res
= (mmap
->priv
) ? VPX_CODEC_OK
: VPX_CODEC_MEM_ERROR
;
108 mmap
->base
= (void *)((((uintptr_t)mmap
->priv
) + align
) & ~(uintptr_t)align
);
109 mmap
->dtor
= vp8_mmap_dtor
;
113 static vpx_codec_err_t
vp8_validate_mmaps(const vp8_stream_info_t
*si
,
114 const vpx_codec_mmap_t
*mmaps
,
115 vpx_codec_flags_t init_flags
)
118 vpx_codec_err_t res
= VPX_CODEC_OK
;
120 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
) - 1; i
++)
122 /* Ensure the segment has been allocated */
125 res
= VPX_CODEC_MEM_ERROR
;
129 /* Verify variable size segment is big enough for the current si. */
130 if (vp8_mem_req_segs
[i
].calc_sz
)
132 vpx_codec_dec_cfg_t cfg
;
137 if (mmaps
[i
].sz
< vp8_mem_req_segs
[i
].calc_sz(&cfg
, init_flags
))
139 res
= VPX_CODEC_MEM_ERROR
;
148 static void vp8_init_ctx(vpx_codec_ctx_t
*ctx
, const vpx_codec_mmap_t
*mmap
)
152 ctx
->priv
= mmap
->base
;
153 ctx
->priv
->sz
= sizeof(*ctx
->priv
);
154 ctx
->priv
->iface
= ctx
->iface
;
155 ctx
->priv
->alg_priv
= mmap
->base
;
157 for (i
= 0; i
< NELEMENTS(ctx
->priv
->alg_priv
->mmaps
); i
++)
158 ctx
->priv
->alg_priv
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
160 ctx
->priv
->alg_priv
->mmaps
[0] = *mmap
;
161 ctx
->priv
->alg_priv
->si
.sz
= sizeof(ctx
->priv
->alg_priv
->si
);
162 ctx
->priv
->init_flags
= ctx
->init_flags
;
166 /* Update the reference to the config structure to an internal copy. */
167 ctx
->priv
->alg_priv
->cfg
= *ctx
->config
.dec
;
168 ctx
->config
.dec
= &ctx
->priv
->alg_priv
->cfg
;
172 static void *mmap_lkup(vpx_codec_alg_priv_t
*ctx
, int id
)
176 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
177 if (ctx
->mmaps
[i
].id
== id
)
178 return ctx
->mmaps
[i
].base
;
182 static void vp8_finalize_mmaps(vpx_codec_alg_priv_t
*ctx
)
185 ctx->pbi = mmap_lkup(ctx, VP6_SEG_PB_INSTANCE);
186 ctx->pbi->mbi.block_dx_info[0].idct_output_ptr = mmap_lkup(ctx, VP6_SEG_IDCT_BUFFER);
187 ctx->pbi->loop_filtered_block = mmap_lkup(ctx, VP6_SEG_LF_BLOCK);
188 ctx->pbi->huff = mmap_lkup(ctx, VP6_SEG_HUFF);
189 ctx->pbi->mbi.coeffs_base_ptr = mmap_lkup(ctx, VP6_SEG_COEFFS);
190 ctx->pbi->fc.above_y = mmap_lkup(ctx, VP6_SEG_ABOVEY);
191 ctx->pbi->fc.above_u = mmap_lkup(ctx, VP6_SEG_ABOVEU);
192 ctx->pbi->fc.above_v = mmap_lkup(ctx, VP6_SEG_ABOVEV);
193 ctx->pbi->prediction_mode = mmap_lkup(ctx, VP6_SEG_PRED_MODES);
194 ctx->pbi->mbmotion_vector = mmap_lkup(ctx, VP6_SEG_MV_FIELD);
195 ctx->pbi->fb_storage_ptr[0] = mmap_lkup(ctx, VP6_SEG_IMG0_STRG);
196 ctx->pbi->fb_storage_ptr[1] = mmap_lkup(ctx, VP6_SEG_IMG1_STRG);
197 ctx->pbi->fb_storage_ptr[2] = mmap_lkup(ctx, VP6_SEG_IMG2_STRG);
198 #if CONFIG_NEW_TOKENS
199 ctx->pbi->token_graph = mmap_lkup(ctx, VP6_SEG_TOKEN_GRAPH);
202 ctx->pbi->postproc.deblock.fragment_variances = mmap_lkup(ctx, VP6_SEG_DEBLOCKER);
203 ctx->pbi->fb_storage_ptr[3] = mmap_lkup(ctx, VP6_SEG_PP_IMG_STRG);
208 static vpx_codec_err_t
vp8_init(vpx_codec_ctx_t
*ctx
)
210 vpx_codec_err_t res
= VPX_CODEC_OK
;
212 /* This function only allocates space for the vpx_codec_alg_priv_t
213 * structure. More memory may be required at the time the stream
214 * information becomes known.
218 vpx_codec_mmap_t mmap
;
220 mmap
.id
= vp8_mem_req_segs
[0].id
;
221 mmap
.sz
= sizeof(vpx_codec_alg_priv_t
);
222 mmap
.align
= vp8_mem_req_segs
[0].align
;
223 mmap
.flags
= vp8_mem_req_segs
[0].flags
;
225 res
= vp8_mmap_alloc(&mmap
);
228 vp8_init_ctx(ctx
, &mmap
);
230 ctx
->priv
->alg_priv
->defer_alloc
= 1;
231 /*post processing level initialized to do nothing */
238 static vpx_codec_err_t
vp8_destroy(vpx_codec_alg_priv_t
*ctx
)
242 vp8dx_remove_decompressor(ctx
->pbi
);
244 for (i
= NELEMENTS(ctx
->mmaps
) - 1; i
>= 0; i
--)
246 if (ctx
->mmaps
[i
].dtor
)
247 ctx
->mmaps
[i
].dtor(&ctx
->mmaps
[i
]);
253 static vpx_codec_err_t
vp8_peek_si(const uint8_t *data
,
254 unsigned int data_sz
,
255 vpx_codec_stream_info_t
*si
)
258 vpx_codec_err_t res
= VPX_CODEC_OK
;
260 /*Parse from VP8 compressed data, the implies knowledge of the
262 * First 3 byte header including version, frame type and an offset
263 * Next 3 bytes are image sizewith 12 bit each for width and height
268 if (data_sz
>= 10 && !(data
[0] & 0x01)) /* I-Frame */
270 const uint8_t *c
= data
+ 3;
274 if (c
[0] != 0x9d || c
[1] != 0x01 || c
[2] != 0x2a)
275 res
= VPX_CODEC_UNSUP_BITSTREAM
;
277 si
->w
= swap2(*(const unsigned short *)(c
+ 3)) & 0x3fff;
278 si
->h
= swap2(*(const unsigned short *)(c
+ 5)) & 0x3fff;
280 //printf("w=%d, h=%d\n", si->w, si->h);
281 if (!(si
->h
| si
->w
))
282 res
= VPX_CODEC_UNSUP_BITSTREAM
;
285 res
= VPX_CODEC_UNSUP_BITSTREAM
;
292 static vpx_codec_err_t
vp8_get_si(vpx_codec_alg_priv_t
*ctx
,
293 vpx_codec_stream_info_t
*si
)
298 if (si
->sz
>= sizeof(vp8_stream_info_t
))
299 sz
= sizeof(vp8_stream_info_t
);
301 sz
= sizeof(vpx_codec_stream_info_t
);
303 memcpy(si
, &ctx
->si
, sz
);
310 static vpx_codec_err_t
311 update_error_state(vpx_codec_alg_priv_t
*ctx
,
312 const struct vpx_internal_error_info
*error
)
316 if ((res
= error
->error_code
))
317 ctx
->base
.err_detail
= error
->has_detail
325 static vpx_codec_err_t
vp8_decode(vpx_codec_alg_priv_t
*ctx
,
327 unsigned int data_sz
,
331 vpx_codec_err_t res
= VPX_CODEC_OK
;
335 /* Determine the stream parameters */
337 res
= ctx
->base
.iface
->dec
.peek_si(data
, data_sz
, &ctx
->si
);
340 /* Perform deferred allocations, if required */
341 if (!res
&& ctx
->defer_alloc
)
345 for (i
= 1; !res
&& i
< NELEMENTS(ctx
->mmaps
); i
++)
347 vpx_codec_dec_cfg_t cfg
;
351 ctx
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
352 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].sz
;
353 ctx
->mmaps
[i
].align
= vp8_mem_req_segs
[i
].align
;
354 ctx
->mmaps
[i
].flags
= vp8_mem_req_segs
[i
].flags
;
356 if (!ctx
->mmaps
[i
].sz
)
357 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].calc_sz(&cfg
,
358 ctx
->base
.init_flags
);
360 res
= vp8_mmap_alloc(&ctx
->mmaps
[i
]);
364 vp8_finalize_mmaps(ctx
);
366 ctx
->defer_alloc
= 0;
369 /* Initialize the decoder instance on the first frame*/
370 if (!res
&& !ctx
->decoder_init
)
372 res
= vp8_validate_mmaps(&ctx
->si
, ctx
->mmaps
, ctx
->base
.init_flags
);
381 oxcf
.Width
= ctx
->si
.w
;
382 oxcf
.Height
= ctx
->si
.h
;
384 oxcf
.postprocess
= 0;
385 oxcf
.max_threads
= ctx
->cfg
.threads
;
387 optr
= vp8dx_create_decompressor(&oxcf
);
389 /* If postprocessing was enabled by the application and a
390 * configuration has not been provided, default it.
392 if (!ctx
->postproc_cfg_set
393 && (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
))
395 ctx
->postproc_cfg
.post_proc_flag
=
396 VP8_DEBLOCK
| VP8_DEMACROBLOCK
;
397 ctx
->postproc_cfg
.deblocking_level
= 4;
398 ctx
->postproc_cfg
.noise_level
= 0;
402 res
= VPX_CODEC_ERROR
;
407 ctx
->decoder_init
= 1;
410 if (!res
&& ctx
->pbi
)
412 YV12_BUFFER_CONFIG sd
;
413 INT64 time_stamp
= 0, time_end_stamp
= 0;
415 int ppdeblocking
= 0;
418 if (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
)
420 ppflag
= ctx
->postproc_cfg
.post_proc_flag
;
421 ppdeblocking
= ctx
->postproc_cfg
.deblocking_level
;
422 ppnoise
= ctx
->postproc_cfg
.noise_level
;
425 if (vp8dx_receive_compressed_data(ctx
->pbi
, data_sz
, data
, deadline
))
427 VP8D_COMP
*pbi
= (VP8D_COMP
*)ctx
->pbi
;
428 res
= update_error_state(ctx
, &pbi
->common
.error
);
431 if (!res
&& 0 == vp8dx_get_raw_frame(ctx
->pbi
, &sd
, &time_stamp
, &time_end_stamp
, ppdeblocking
, ppnoise
, ppflag
))
433 /* Align width/height */
434 unsigned int a_w
= (sd
.y_width
+ 15) & ~15;
435 unsigned int a_h
= (sd
.y_height
+ 15) & ~15;
437 vpx_img_wrap(&ctx
->img
, IMG_FMT_I420
,
438 a_w
+ 2 * VP8BORDERINPIXELS
,
439 a_h
+ 2 * VP8BORDERINPIXELS
,
442 vpx_img_set_rect(&ctx
->img
,
443 VP8BORDERINPIXELS
, VP8BORDERINPIXELS
,
444 sd
.y_width
, sd
.y_height
);
453 static vpx_image_t
*vp8_get_frame(vpx_codec_alg_priv_t
*ctx
,
454 vpx_codec_iter_t
*iter
)
456 vpx_image_t
*img
= NULL
;
460 /* iter acts as a flip flop, so an image is only returned on the first
475 vpx_codec_err_t
vp8_xma_get_mmap(const vpx_codec_ctx_t
*ctx
,
476 vpx_codec_mmap_t
*mmap
,
477 vpx_codec_iter_t
*iter
)
480 const mem_req_t
*seg_iter
= *iter
;
482 /* Get address of next segment request */
486 seg_iter
= vp8_mem_req_segs
;
487 else if (seg_iter
->id
!= VP8_SEG_MAX
)
490 *iter
= (vpx_codec_iter_t
)seg_iter
;
492 if (seg_iter
->id
!= VP8_SEG_MAX
)
494 mmap
->id
= seg_iter
->id
;
495 mmap
->sz
= seg_iter
->sz
;
496 mmap
->align
= seg_iter
->align
;
497 mmap
->flags
= seg_iter
->flags
;
500 mmap
->sz
= seg_iter
->calc_sz(ctx
->config
.dec
, ctx
->init_flags
);
505 res
= VPX_CODEC_LIST_END
;
507 while (!mmap
->sz
&& res
!= VPX_CODEC_LIST_END
);
512 static vpx_codec_err_t
vp8_xma_set_mmap(vpx_codec_ctx_t
*ctx
,
513 const vpx_codec_mmap_t
*mmap
)
515 vpx_codec_err_t res
= VPX_CODEC_MEM_ERROR
;
520 if (mmap
->id
== VP8_SEG_ALG_PRIV
)
524 vp8_init_ctx(ctx
, mmap
);
532 if (ctx
->priv
->alg_priv
)
534 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
536 if (ctx
->priv
->alg_priv
->mmaps
[i
].id
== mmap
->id
)
537 if (!ctx
->priv
->alg_priv
->mmaps
[i
].base
)
539 ctx
->priv
->alg_priv
->mmaps
[i
] = *mmap
;
543 done
&= (ctx
->priv
->alg_priv
->mmaps
[i
].base
!= NULL
);
549 vp8_finalize_mmaps(ctx
->priv
->alg_priv
);
550 res
= ctx
->iface
->init(ctx
);
556 static vpx_codec_err_t
image2yuvconfig(const vpx_image_t
*img
,
557 YV12_BUFFER_CONFIG
*yv12
)
559 vpx_codec_err_t res
= VPX_CODEC_OK
;
560 yv12
->y_buffer
= img
->planes
[PLANE_Y
];
561 yv12
->u_buffer
= img
->planes
[PLANE_U
];
562 yv12
->v_buffer
= img
->planes
[PLANE_V
];
564 yv12
->y_width
= img
->d_w
;
565 yv12
->y_height
= img
->d_h
;
566 yv12
->uv_width
= yv12
->y_width
/ 2;
567 yv12
->uv_height
= yv12
->y_height
/ 2;
569 yv12
->y_stride
= img
->stride
[PLANE_Y
];
570 yv12
->uv_stride
= img
->stride
[PLANE_U
];
572 yv12
->border
= (img
->stride
[PLANE_Y
] - img
->d_w
) / 2;
573 yv12
->clrtype
= (img
->fmt
== IMG_FMT_VPXI420
|| img
->fmt
== IMG_FMT_VPXYV12
);
579 static vpx_codec_err_t
vp8_set_reference(vpx_codec_alg_priv_t
*ctx
,
584 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
588 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
589 YV12_BUFFER_CONFIG sd
;
591 image2yuvconfig(&frame
->img
, &sd
);
593 vp8dx_set_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
597 return VPX_CODEC_INVALID_PARAM
;
601 static vpx_codec_err_t
vp8_get_reference(vpx_codec_alg_priv_t
*ctx
,
606 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
610 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
611 YV12_BUFFER_CONFIG sd
;
613 image2yuvconfig(&frame
->img
, &sd
);
615 vp8dx_get_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
619 return VPX_CODEC_INVALID_PARAM
;
623 static vpx_codec_err_t
vp8_set_postproc(vpx_codec_alg_priv_t
*ctx
,
627 vp8_postproc_cfg_t
*data
= va_arg(args
, vp8_postproc_cfg_t
*);
632 ctx
->postproc_cfg_set
= 1;
633 ctx
->postproc_cfg
= *((vp8_postproc_cfg_t
*)data
);
637 return VPX_CODEC_INVALID_PARAM
;
640 return VPX_CODEC_INCAPABLE
;
645 vpx_codec_ctrl_fn_map_t vp8_ctf_maps
[] =
647 {VP8_SET_REFERENCE
, vp8_set_reference
},
648 {VP8_COPY_REFERENCE
, vp8_get_reference
},
649 {VP8_SET_POSTPROC
, vp8_set_postproc
},
654 #ifndef VERSION_STRING
655 #define VERSION_STRING
657 vpx_codec_iface_t vpx_codec_vp8_dx_algo
=
659 "vpx Technologies VP8 Decoder" VERSION_STRING
,
660 VPX_CODEC_INTERNAL_ABI_VERSION
,
661 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
662 /* vpx_codec_caps_t caps; */
663 vp8_init
, /* vpx_codec_init_fn_t init; */
664 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
665 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
666 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
667 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
669 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
670 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
671 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
672 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
674 {NOT_IMPLEMENTED
} /* encoder functions */
678 * BEGIN BACKWARDS COMPATIBILITY SHIM.
680 vpx_codec_iface_t vpx_codec_vp8_algo
=
682 "vpx Technologies VP8 Decoder (Deprecated API)" VERSION_STRING
,
683 VPX_CODEC_INTERNAL_ABI_VERSION
,
684 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
685 /* vpx_codec_caps_t caps; */
686 vp8_init
, /* vpx_codec_init_fn_t init; */
687 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
688 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
689 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
690 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
692 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
693 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
694 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
695 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
697 {NOT_IMPLEMENTED
} /* encoder functions */