2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
14 #include "vpx/vpx_decoder.h"
15 #include "vpx/vp8dx.h"
16 #include "vpx/internal/vpx_codec_internal.h"
17 #include "vpx_version.h"
19 #include "onyxd_int.h"
21 #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
25 ((d&0x000000ff)<<24) | \
26 ((d&0x0000ff00)<<8) | \
27 ((d&0x00ff0000)>>8) | \
30 ((d&0x000000ff)<<8) | \
36 typedef vpx_codec_stream_info_t vp8_stream_info_t
;
38 /* Structures for handling memory allocations */
41 VP8_SEG_ALG_PRIV
= 256,
44 #define NELEMENTS(x) (sizeof(x)/sizeof(x[0]))
46 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t
);
54 unsigned long(*calc_sz
)(const vpx_codec_dec_cfg_t
*, vpx_codec_flags_t
);
57 static const mem_req_t vp8_mem_req_segs
[] =
59 {VP8_SEG_ALG_PRIV
, 0, 8, VPX_CODEC_MEM_ZERO
, vp8_priv_sz
},
60 {VP8_SEG_MAX
, 0, 0, 0, NULL
}
63 struct vpx_codec_alg_priv
65 vpx_codec_priv_t base
;
66 vpx_codec_mmap_t mmaps
[NELEMENTS(vp8_mem_req_segs
)-1];
67 vpx_codec_dec_cfg_t cfg
;
73 vp8_postproc_cfg_t postproc_cfg
;
79 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t flags
)
81 /* Although this declaration is constant, we can't use it in the requested
82 * segments list because we want to define the requested segments list
83 * before defining the private type (so that the number of memory maps is
87 return sizeof(vpx_codec_alg_priv_t
);
91 static void vp8_mmap_dtor(vpx_codec_mmap_t
*mmap
)
96 static vpx_codec_err_t
vp8_mmap_alloc(vpx_codec_mmap_t
*mmap
)
101 align
= mmap
->align
? mmap
->align
- 1 : 0;
103 if (mmap
->flags
& VPX_CODEC_MEM_ZERO
)
104 mmap
->priv
= calloc(1, mmap
->sz
+ align
);
106 mmap
->priv
= malloc(mmap
->sz
+ align
);
108 res
= (mmap
->priv
) ? VPX_CODEC_OK
: VPX_CODEC_MEM_ERROR
;
109 mmap
->base
= (void *)((((uintptr_t)mmap
->priv
) + align
) & ~(uintptr_t)align
);
110 mmap
->dtor
= vp8_mmap_dtor
;
114 static vpx_codec_err_t
vp8_validate_mmaps(const vp8_stream_info_t
*si
,
115 const vpx_codec_mmap_t
*mmaps
,
116 vpx_codec_flags_t init_flags
)
119 vpx_codec_err_t res
= VPX_CODEC_OK
;
121 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
) - 1; i
++)
123 /* Ensure the segment has been allocated */
126 res
= VPX_CODEC_MEM_ERROR
;
130 /* Verify variable size segment is big enough for the current si. */
131 if (vp8_mem_req_segs
[i
].calc_sz
)
133 vpx_codec_dec_cfg_t cfg
;
138 if (mmaps
[i
].sz
< vp8_mem_req_segs
[i
].calc_sz(&cfg
, init_flags
))
140 res
= VPX_CODEC_MEM_ERROR
;
149 static void vp8_init_ctx(vpx_codec_ctx_t
*ctx
, const vpx_codec_mmap_t
*mmap
)
153 ctx
->priv
= mmap
->base
;
154 ctx
->priv
->sz
= sizeof(*ctx
->priv
);
155 ctx
->priv
->iface
= ctx
->iface
;
156 ctx
->priv
->alg_priv
= mmap
->base
;
158 for (i
= 0; i
< NELEMENTS(ctx
->priv
->alg_priv
->mmaps
); i
++)
159 ctx
->priv
->alg_priv
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
161 ctx
->priv
->alg_priv
->mmaps
[0] = *mmap
;
162 ctx
->priv
->alg_priv
->si
.sz
= sizeof(ctx
->priv
->alg_priv
->si
);
163 ctx
->priv
->init_flags
= ctx
->init_flags
;
167 /* Update the reference to the config structure to an internal copy. */
168 ctx
->priv
->alg_priv
->cfg
= *ctx
->config
.dec
;
169 ctx
->config
.dec
= &ctx
->priv
->alg_priv
->cfg
;
173 static void *mmap_lkup(vpx_codec_alg_priv_t
*ctx
, int id
)
177 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
178 if (ctx
->mmaps
[i
].id
== id
)
179 return ctx
->mmaps
[i
].base
;
183 static void vp8_finalize_mmaps(vpx_codec_alg_priv_t
*ctx
)
186 ctx->pbi = mmap_lkup(ctx, VP6_SEG_PB_INSTANCE);
187 ctx->pbi->mbi.block_dx_info[0].idct_output_ptr = mmap_lkup(ctx, VP6_SEG_IDCT_BUFFER);
188 ctx->pbi->loop_filtered_block = mmap_lkup(ctx, VP6_SEG_LF_BLOCK);
189 ctx->pbi->huff = mmap_lkup(ctx, VP6_SEG_HUFF);
190 ctx->pbi->mbi.coeffs_base_ptr = mmap_lkup(ctx, VP6_SEG_COEFFS);
191 ctx->pbi->fc.above_y = mmap_lkup(ctx, VP6_SEG_ABOVEY);
192 ctx->pbi->fc.above_u = mmap_lkup(ctx, VP6_SEG_ABOVEU);
193 ctx->pbi->fc.above_v = mmap_lkup(ctx, VP6_SEG_ABOVEV);
194 ctx->pbi->prediction_mode = mmap_lkup(ctx, VP6_SEG_PRED_MODES);
195 ctx->pbi->mbmotion_vector = mmap_lkup(ctx, VP6_SEG_MV_FIELD);
196 ctx->pbi->fb_storage_ptr[0] = mmap_lkup(ctx, VP6_SEG_IMG0_STRG);
197 ctx->pbi->fb_storage_ptr[1] = mmap_lkup(ctx, VP6_SEG_IMG1_STRG);
198 ctx->pbi->fb_storage_ptr[2] = mmap_lkup(ctx, VP6_SEG_IMG2_STRG);
200 ctx->pbi->postproc.deblock.fragment_variances = mmap_lkup(ctx, VP6_SEG_DEBLOCKER);
201 ctx->pbi->fb_storage_ptr[3] = mmap_lkup(ctx, VP6_SEG_PP_IMG_STRG);
206 static vpx_codec_err_t
vp8_init(vpx_codec_ctx_t
*ctx
)
208 vpx_codec_err_t res
= VPX_CODEC_OK
;
210 /* This function only allocates space for the vpx_codec_alg_priv_t
211 * structure. More memory may be required at the time the stream
212 * information becomes known.
216 vpx_codec_mmap_t mmap
;
218 mmap
.id
= vp8_mem_req_segs
[0].id
;
219 mmap
.sz
= sizeof(vpx_codec_alg_priv_t
);
220 mmap
.align
= vp8_mem_req_segs
[0].align
;
221 mmap
.flags
= vp8_mem_req_segs
[0].flags
;
223 res
= vp8_mmap_alloc(&mmap
);
227 vp8_init_ctx(ctx
, &mmap
);
229 ctx
->priv
->alg_priv
->defer_alloc
= 1;
230 /*post processing level initialized to do nothing */
237 static vpx_codec_err_t
vp8_destroy(vpx_codec_alg_priv_t
*ctx
)
241 vp8dx_remove_decompressor(ctx
->pbi
);
243 for (i
= NELEMENTS(ctx
->mmaps
) - 1; i
>= 0; i
--)
245 if (ctx
->mmaps
[i
].dtor
)
246 ctx
->mmaps
[i
].dtor(&ctx
->mmaps
[i
]);
252 static vpx_codec_err_t
vp8_peek_si(const uint8_t *data
,
253 unsigned int data_sz
,
254 vpx_codec_stream_info_t
*si
)
257 vpx_codec_err_t res
= VPX_CODEC_OK
;
259 /* Parse uncompresssed part of key frame header.
260 * 3 bytes:- including version, frame type and an offset
261 * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
262 * 4 bytes:- including image width and height in the lowest 14 bits
263 * of each 2-byte value.
267 if (data_sz
>= 10 && !(data
[0] & 0x01)) /* I-Frame */
269 const uint8_t *c
= data
+ 3;
273 if (c
[0] != 0x9d || c
[1] != 0x01 || c
[2] != 0x2a)
274 res
= VPX_CODEC_UNSUP_BITSTREAM
;
276 si
->w
= swap2(*(const unsigned short *)(c
+ 3)) & 0x3fff;
277 si
->h
= swap2(*(const unsigned short *)(c
+ 5)) & 0x3fff;
279 //printf("w=%d, h=%d\n", si->w, si->h);
280 if (!(si
->h
| si
->w
))
281 res
= VPX_CODEC_UNSUP_BITSTREAM
;
284 res
= VPX_CODEC_UNSUP_BITSTREAM
;
291 static vpx_codec_err_t
vp8_get_si(vpx_codec_alg_priv_t
*ctx
,
292 vpx_codec_stream_info_t
*si
)
297 if (si
->sz
>= sizeof(vp8_stream_info_t
))
298 sz
= sizeof(vp8_stream_info_t
);
300 sz
= sizeof(vpx_codec_stream_info_t
);
302 memcpy(si
, &ctx
->si
, sz
);
309 static vpx_codec_err_t
310 update_error_state(vpx_codec_alg_priv_t
*ctx
,
311 const struct vpx_internal_error_info
*error
)
315 if ((res
= error
->error_code
))
316 ctx
->base
.err_detail
= error
->has_detail
324 static vpx_codec_err_t
vp8_decode(vpx_codec_alg_priv_t
*ctx
,
326 unsigned int data_sz
,
330 vpx_codec_err_t res
= VPX_CODEC_OK
;
334 /* Determine the stream parameters */
336 res
= ctx
->base
.iface
->dec
.peek_si(data
, data_sz
, &ctx
->si
);
339 /* Perform deferred allocations, if required */
340 if (!res
&& ctx
->defer_alloc
)
344 for (i
= 1; !res
&& i
< NELEMENTS(ctx
->mmaps
); i
++)
346 vpx_codec_dec_cfg_t cfg
;
350 ctx
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
351 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].sz
;
352 ctx
->mmaps
[i
].align
= vp8_mem_req_segs
[i
].align
;
353 ctx
->mmaps
[i
].flags
= vp8_mem_req_segs
[i
].flags
;
355 if (!ctx
->mmaps
[i
].sz
)
356 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].calc_sz(&cfg
,
357 ctx
->base
.init_flags
);
359 res
= vp8_mmap_alloc(&ctx
->mmaps
[i
]);
363 vp8_finalize_mmaps(ctx
);
365 ctx
->defer_alloc
= 0;
368 /* Initialize the decoder instance on the first frame*/
369 if (!res
&& !ctx
->decoder_init
)
371 res
= vp8_validate_mmaps(&ctx
->si
, ctx
->mmaps
, ctx
->base
.init_flags
);
380 oxcf
.Width
= ctx
->si
.w
;
381 oxcf
.Height
= ctx
->si
.h
;
383 oxcf
.postprocess
= 0;
384 oxcf
.max_threads
= ctx
->cfg
.threads
;
386 optr
= vp8dx_create_decompressor(&oxcf
);
388 /* If postprocessing was enabled by the application and a
389 * configuration has not been provided, default it.
391 if (!ctx
->postproc_cfg_set
392 && (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
))
394 ctx
->postproc_cfg
.post_proc_flag
=
395 VP8_DEBLOCK
| VP8_DEMACROBLOCK
;
396 ctx
->postproc_cfg
.deblocking_level
= 4;
397 ctx
->postproc_cfg
.noise_level
= 0;
401 res
= VPX_CODEC_ERROR
;
406 ctx
->decoder_init
= 1;
409 if (!res
&& ctx
->pbi
)
411 YV12_BUFFER_CONFIG sd
;
412 INT64 time_stamp
= 0, time_end_stamp
= 0;
414 int ppdeblocking
= 0;
417 if (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
)
419 ppflag
= ctx
->postproc_cfg
.post_proc_flag
;
420 ppdeblocking
= ctx
->postproc_cfg
.deblocking_level
;
421 ppnoise
= ctx
->postproc_cfg
.noise_level
;
424 if (vp8dx_receive_compressed_data(ctx
->pbi
, data_sz
, data
, deadline
))
426 VP8D_COMP
*pbi
= (VP8D_COMP
*)ctx
->pbi
;
427 res
= update_error_state(ctx
, &pbi
->common
.error
);
430 if (!res
&& 0 == vp8dx_get_raw_frame(ctx
->pbi
, &sd
, &time_stamp
, &time_end_stamp
, ppdeblocking
, ppnoise
, ppflag
))
432 /* Align width/height */
433 unsigned int a_w
= (sd
.y_width
+ 15) & ~15;
434 unsigned int a_h
= (sd
.y_height
+ 15) & ~15;
436 vpx_img_wrap(&ctx
->img
, VPX_IMG_FMT_I420
,
437 a_w
+ 2 * VP8BORDERINPIXELS
,
438 a_h
+ 2 * VP8BORDERINPIXELS
,
441 vpx_img_set_rect(&ctx
->img
,
442 VP8BORDERINPIXELS
, VP8BORDERINPIXELS
,
443 sd
.y_width
, sd
.y_height
);
452 static vpx_image_t
*vp8_get_frame(vpx_codec_alg_priv_t
*ctx
,
453 vpx_codec_iter_t
*iter
)
455 vpx_image_t
*img
= NULL
;
459 /* iter acts as a flip flop, so an image is only returned on the first
474 vpx_codec_err_t
vp8_xma_get_mmap(const vpx_codec_ctx_t
*ctx
,
475 vpx_codec_mmap_t
*mmap
,
476 vpx_codec_iter_t
*iter
)
479 const mem_req_t
*seg_iter
= *iter
;
481 /* Get address of next segment request */
485 seg_iter
= vp8_mem_req_segs
;
486 else if (seg_iter
->id
!= VP8_SEG_MAX
)
489 *iter
= (vpx_codec_iter_t
)seg_iter
;
491 if (seg_iter
->id
!= VP8_SEG_MAX
)
493 mmap
->id
= seg_iter
->id
;
494 mmap
->sz
= seg_iter
->sz
;
495 mmap
->align
= seg_iter
->align
;
496 mmap
->flags
= seg_iter
->flags
;
499 mmap
->sz
= seg_iter
->calc_sz(ctx
->config
.dec
, ctx
->init_flags
);
504 res
= VPX_CODEC_LIST_END
;
506 while (!mmap
->sz
&& res
!= VPX_CODEC_LIST_END
);
511 static vpx_codec_err_t
vp8_xma_set_mmap(vpx_codec_ctx_t
*ctx
,
512 const vpx_codec_mmap_t
*mmap
)
514 vpx_codec_err_t res
= VPX_CODEC_MEM_ERROR
;
519 if (mmap
->id
== VP8_SEG_ALG_PRIV
)
523 vp8_init_ctx(ctx
, mmap
);
531 if (!res
&& ctx
->priv
->alg_priv
)
533 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
535 if (ctx
->priv
->alg_priv
->mmaps
[i
].id
== mmap
->id
)
536 if (!ctx
->priv
->alg_priv
->mmaps
[i
].base
)
538 ctx
->priv
->alg_priv
->mmaps
[i
] = *mmap
;
542 done
&= (ctx
->priv
->alg_priv
->mmaps
[i
].base
!= NULL
);
548 vp8_finalize_mmaps(ctx
->priv
->alg_priv
);
549 res
= ctx
->iface
->init(ctx
);
555 static vpx_codec_err_t
image2yuvconfig(const vpx_image_t
*img
,
556 YV12_BUFFER_CONFIG
*yv12
)
558 vpx_codec_err_t res
= VPX_CODEC_OK
;
559 yv12
->y_buffer
= img
->planes
[VPX_PLANE_Y
];
560 yv12
->u_buffer
= img
->planes
[VPX_PLANE_U
];
561 yv12
->v_buffer
= img
->planes
[VPX_PLANE_V
];
563 yv12
->y_width
= img
->d_w
;
564 yv12
->y_height
= img
->d_h
;
565 yv12
->uv_width
= yv12
->y_width
/ 2;
566 yv12
->uv_height
= yv12
->y_height
/ 2;
568 yv12
->y_stride
= img
->stride
[VPX_PLANE_Y
];
569 yv12
->uv_stride
= img
->stride
[VPX_PLANE_U
];
571 yv12
->border
= (img
->stride
[VPX_PLANE_Y
] - img
->d_w
) / 2;
572 yv12
->clrtype
= (img
->fmt
== VPX_IMG_FMT_VPXI420
|| img
->fmt
== VPX_IMG_FMT_VPXYV12
);
578 static vpx_codec_err_t
vp8_set_reference(vpx_codec_alg_priv_t
*ctx
,
583 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
587 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
588 YV12_BUFFER_CONFIG sd
;
590 image2yuvconfig(&frame
->img
, &sd
);
592 vp8dx_set_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
596 return VPX_CODEC_INVALID_PARAM
;
600 static vpx_codec_err_t
vp8_get_reference(vpx_codec_alg_priv_t
*ctx
,
605 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
609 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
610 YV12_BUFFER_CONFIG sd
;
612 image2yuvconfig(&frame
->img
, &sd
);
614 vp8dx_get_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
618 return VPX_CODEC_INVALID_PARAM
;
622 static vpx_codec_err_t
vp8_set_postproc(vpx_codec_alg_priv_t
*ctx
,
626 vp8_postproc_cfg_t
*data
= va_arg(args
, vp8_postproc_cfg_t
*);
631 ctx
->postproc_cfg_set
= 1;
632 ctx
->postproc_cfg
= *((vp8_postproc_cfg_t
*)data
);
636 return VPX_CODEC_INVALID_PARAM
;
639 return VPX_CODEC_INCAPABLE
;
644 vpx_codec_ctrl_fn_map_t vp8_ctf_maps
[] =
646 {VP8_SET_REFERENCE
, vp8_set_reference
},
647 {VP8_COPY_REFERENCE
, vp8_get_reference
},
648 {VP8_SET_POSTPROC
, vp8_set_postproc
},
653 #ifndef VERSION_STRING
654 #define VERSION_STRING
656 CODEC_INTERFACE(vpx_codec_vp8_dx
) =
658 "WebM Project VP8 Decoder" VERSION_STRING
,
659 VPX_CODEC_INTERNAL_ABI_VERSION
,
660 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
661 /* vpx_codec_caps_t caps; */
662 vp8_init
, /* vpx_codec_init_fn_t init; */
663 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
664 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
665 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
666 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
668 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
669 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
670 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
671 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
673 {NOT_IMPLEMENTED
} /* encoder functions */
677 * BEGIN BACKWARDS COMPATIBILITY SHIM.
679 vpx_codec_iface_t vpx_codec_vp8_algo
=
681 "WebM Project VP8 Decoder (Deprecated API)" VERSION_STRING
,
682 VPX_CODEC_INTERNAL_ABI_VERSION
,
683 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
684 /* vpx_codec_caps_t caps; */
685 vp8_init
, /* vpx_codec_init_fn_t init; */
686 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
687 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
688 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
689 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
691 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
692 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
693 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
694 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
696 {NOT_IMPLEMENTED
} /* encoder functions */