2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
14 #include "vpx/vpx_decoder.h"
15 #include "vpx/vp8dx.h"
16 #include "vpx/internal/vpx_codec_internal.h"
17 #include "vpx_version.h"
18 #include "common/onyxd.h"
19 #include "decoder/onyxd_int.h"
21 #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
25 ((d&0x000000ff)<<24) | \
26 ((d&0x0000ff00)<<8) | \
27 ((d&0x00ff0000)>>8) | \
30 ((d&0x000000ff)<<8) | \
36 typedef vpx_codec_stream_info_t vp8_stream_info_t
;
38 /* Structures for handling memory allocations */
41 VP8_SEG_ALG_PRIV
= 256,
44 #define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
46 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t
);
54 unsigned long(*calc_sz
)(const vpx_codec_dec_cfg_t
*, vpx_codec_flags_t
);
57 static const mem_req_t vp8_mem_req_segs
[] =
59 {VP8_SEG_ALG_PRIV
, 0, 8, VPX_CODEC_MEM_ZERO
, vp8_priv_sz
},
60 {VP8_SEG_MAX
, 0, 0, 0, NULL
}
63 struct vpx_codec_alg_priv
65 vpx_codec_priv_t base
;
66 vpx_codec_mmap_t mmaps
[NELEMENTS(vp8_mem_req_segs
)-1];
67 vpx_codec_dec_cfg_t cfg
;
73 vp8_postproc_cfg_t postproc_cfg
;
74 #if CONFIG_POSTPROC_VISUALIZER
75 unsigned int dbg_postproc_flag
;
76 int dbg_color_ref_frame_flag
;
77 int dbg_color_mb_modes_flag
;
78 int dbg_color_b_modes_flag
;
79 int dbg_display_mv_flag
;
86 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t
*si
, vpx_codec_flags_t flags
)
88 /* Although this declaration is constant, we can't use it in the requested
89 * segments list because we want to define the requested segments list
90 * before defining the private type (so that the number of memory maps is
94 return sizeof(vpx_codec_alg_priv_t
);
98 static void vp8_mmap_dtor(vpx_codec_mmap_t
*mmap
)
103 static vpx_codec_err_t
vp8_mmap_alloc(vpx_codec_mmap_t
*mmap
)
108 align
= mmap
->align
? mmap
->align
- 1 : 0;
110 if (mmap
->flags
& VPX_CODEC_MEM_ZERO
)
111 mmap
->priv
= calloc(1, mmap
->sz
+ align
);
113 mmap
->priv
= malloc(mmap
->sz
+ align
);
115 res
= (mmap
->priv
) ? VPX_CODEC_OK
: VPX_CODEC_MEM_ERROR
;
116 mmap
->base
= (void *)((((uintptr_t)mmap
->priv
) + align
) & ~(uintptr_t)align
);
117 mmap
->dtor
= vp8_mmap_dtor
;
121 static vpx_codec_err_t
vp8_validate_mmaps(const vp8_stream_info_t
*si
,
122 const vpx_codec_mmap_t
*mmaps
,
123 vpx_codec_flags_t init_flags
)
126 vpx_codec_err_t res
= VPX_CODEC_OK
;
128 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
) - 1; i
++)
130 /* Ensure the segment has been allocated */
133 res
= VPX_CODEC_MEM_ERROR
;
137 /* Verify variable size segment is big enough for the current si. */
138 if (vp8_mem_req_segs
[i
].calc_sz
)
140 vpx_codec_dec_cfg_t cfg
;
145 if (mmaps
[i
].sz
< vp8_mem_req_segs
[i
].calc_sz(&cfg
, init_flags
))
147 res
= VPX_CODEC_MEM_ERROR
;
156 static void vp8_init_ctx(vpx_codec_ctx_t
*ctx
, const vpx_codec_mmap_t
*mmap
)
160 ctx
->priv
= mmap
->base
;
161 ctx
->priv
->sz
= sizeof(*ctx
->priv
);
162 ctx
->priv
->iface
= ctx
->iface
;
163 ctx
->priv
->alg_priv
= mmap
->base
;
165 for (i
= 0; i
< NELEMENTS(ctx
->priv
->alg_priv
->mmaps
); i
++)
166 ctx
->priv
->alg_priv
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
168 ctx
->priv
->alg_priv
->mmaps
[0] = *mmap
;
169 ctx
->priv
->alg_priv
->si
.sz
= sizeof(ctx
->priv
->alg_priv
->si
);
170 ctx
->priv
->init_flags
= ctx
->init_flags
;
174 /* Update the reference to the config structure to an internal copy. */
175 ctx
->priv
->alg_priv
->cfg
= *ctx
->config
.dec
;
176 ctx
->config
.dec
= &ctx
->priv
->alg_priv
->cfg
;
180 static void *mmap_lkup(vpx_codec_alg_priv_t
*ctx
, unsigned int id
)
184 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
185 if (ctx
->mmaps
[i
].id
== id
)
186 return ctx
->mmaps
[i
].base
;
190 static void vp8_finalize_mmaps(vpx_codec_alg_priv_t
*ctx
)
193 ctx->pbi = mmap_lkup(ctx, VP6_SEG_PB_INSTANCE);
194 ctx->pbi->mbi.block_dx_info[0].idct_output_ptr = mmap_lkup(ctx, VP6_SEG_IDCT_BUFFER);
195 ctx->pbi->loop_filtered_block = mmap_lkup(ctx, VP6_SEG_LF_BLOCK);
196 ctx->pbi->huff = mmap_lkup(ctx, VP6_SEG_HUFF);
197 ctx->pbi->mbi.coeffs_base_ptr = mmap_lkup(ctx, VP6_SEG_COEFFS);
198 ctx->pbi->fc.above_y = mmap_lkup(ctx, VP6_SEG_ABOVEY);
199 ctx->pbi->fc.above_u = mmap_lkup(ctx, VP6_SEG_ABOVEU);
200 ctx->pbi->fc.above_v = mmap_lkup(ctx, VP6_SEG_ABOVEV);
201 ctx->pbi->prediction_mode = mmap_lkup(ctx, VP6_SEG_PRED_MODES);
202 ctx->pbi->mbmotion_vector = mmap_lkup(ctx, VP6_SEG_MV_FIELD);
203 ctx->pbi->fb_storage_ptr[0] = mmap_lkup(ctx, VP6_SEG_IMG0_STRG);
204 ctx->pbi->fb_storage_ptr[1] = mmap_lkup(ctx, VP6_SEG_IMG1_STRG);
205 ctx->pbi->fb_storage_ptr[2] = mmap_lkup(ctx, VP6_SEG_IMG2_STRG);
207 ctx->pbi->postproc.deblock.fragment_variances = mmap_lkup(ctx, VP6_SEG_DEBLOCKER);
208 ctx->pbi->fb_storage_ptr[3] = mmap_lkup(ctx, VP6_SEG_PP_IMG_STRG);
213 static vpx_codec_err_t
vp8_init(vpx_codec_ctx_t
*ctx
)
215 vpx_codec_err_t res
= VPX_CODEC_OK
;
217 /* This function only allocates space for the vpx_codec_alg_priv_t
218 * structure. More memory may be required at the time the stream
219 * information becomes known.
223 vpx_codec_mmap_t mmap
;
225 mmap
.id
= vp8_mem_req_segs
[0].id
;
226 mmap
.sz
= sizeof(vpx_codec_alg_priv_t
);
227 mmap
.align
= vp8_mem_req_segs
[0].align
;
228 mmap
.flags
= vp8_mem_req_segs
[0].flags
;
230 res
= vp8_mmap_alloc(&mmap
);
234 vp8_init_ctx(ctx
, &mmap
);
236 ctx
->priv
->alg_priv
->defer_alloc
= 1;
237 /*post processing level initialized to do nothing */
244 static vpx_codec_err_t
vp8_destroy(vpx_codec_alg_priv_t
*ctx
)
248 vp8dx_remove_decompressor(ctx
->pbi
);
250 for (i
= NELEMENTS(ctx
->mmaps
) - 1; i
>= 0; i
--)
252 if (ctx
->mmaps
[i
].dtor
)
253 ctx
->mmaps
[i
].dtor(&ctx
->mmaps
[i
]);
259 static vpx_codec_err_t
vp8_peek_si(const uint8_t *data
,
260 unsigned int data_sz
,
261 vpx_codec_stream_info_t
*si
)
263 vpx_codec_err_t res
= VPX_CODEC_OK
;
265 if(data
+ data_sz
<= data
)
266 res
= VPX_CODEC_INVALID_PARAM
;
269 /* Parse uncompresssed part of key frame header.
270 * 3 bytes:- including version, frame type and an offset
271 * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
272 * 4 bytes:- including image width and height in the lowest 14 bits
273 * of each 2-byte value.
277 if (data_sz
>= 10 && !(data
[0] & 0x01)) /* I-Frame */
279 const uint8_t *c
= data
+ 3;
282 /* vet via sync code */
283 if (c
[0] != 0x9d || c
[1] != 0x01 || c
[2] != 0x2a)
284 res
= VPX_CODEC_UNSUP_BITSTREAM
;
286 si
->w
= swap2(*(const unsigned short *)(c
+ 3)) & 0x3fff;
287 si
->h
= swap2(*(const unsigned short *)(c
+ 5)) & 0x3fff;
289 /*printf("w=%d, h=%d\n", si->w, si->h);*/
290 if (!(si
->h
| si
->w
))
291 res
= VPX_CODEC_UNSUP_BITSTREAM
;
294 res
= VPX_CODEC_UNSUP_BITSTREAM
;
301 static vpx_codec_err_t
vp8_get_si(vpx_codec_alg_priv_t
*ctx
,
302 vpx_codec_stream_info_t
*si
)
307 if (si
->sz
>= sizeof(vp8_stream_info_t
))
308 sz
= sizeof(vp8_stream_info_t
);
310 sz
= sizeof(vpx_codec_stream_info_t
);
312 memcpy(si
, &ctx
->si
, sz
);
319 static vpx_codec_err_t
320 update_error_state(vpx_codec_alg_priv_t
*ctx
,
321 const struct vpx_internal_error_info
*error
)
325 if ((res
= error
->error_code
))
326 ctx
->base
.err_detail
= error
->has_detail
334 static vpx_codec_err_t
vp8_decode(vpx_codec_alg_priv_t
*ctx
,
336 unsigned int data_sz
,
340 vpx_codec_err_t res
= VPX_CODEC_OK
;
344 /* Determine the stream parameters. Note that we rely on peek_si to
345 * validate that we have a buffer that does not wrap around the top
349 res
= ctx
->base
.iface
->dec
.peek_si(data
, data_sz
, &ctx
->si
);
352 /* Perform deferred allocations, if required */
353 if (!res
&& ctx
->defer_alloc
)
357 for (i
= 1; !res
&& i
< NELEMENTS(ctx
->mmaps
); i
++)
359 vpx_codec_dec_cfg_t cfg
;
363 ctx
->mmaps
[i
].id
= vp8_mem_req_segs
[i
].id
;
364 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].sz
;
365 ctx
->mmaps
[i
].align
= vp8_mem_req_segs
[i
].align
;
366 ctx
->mmaps
[i
].flags
= vp8_mem_req_segs
[i
].flags
;
368 if (!ctx
->mmaps
[i
].sz
)
369 ctx
->mmaps
[i
].sz
= vp8_mem_req_segs
[i
].calc_sz(&cfg
,
370 ctx
->base
.init_flags
);
372 res
= vp8_mmap_alloc(&ctx
->mmaps
[i
]);
376 vp8_finalize_mmaps(ctx
);
378 ctx
->defer_alloc
= 0;
381 /* Initialize the decoder instance on the first frame*/
382 if (!res
&& !ctx
->decoder_init
)
384 res
= vp8_validate_mmaps(&ctx
->si
, ctx
->mmaps
, ctx
->base
.init_flags
);
393 oxcf
.Width
= ctx
->si
.w
;
394 oxcf
.Height
= ctx
->si
.h
;
396 oxcf
.postprocess
= 0;
397 oxcf
.max_threads
= ctx
->cfg
.threads
;
399 optr
= vp8dx_create_decompressor(&oxcf
);
401 /* If postprocessing was enabled by the application and a
402 * configuration has not been provided, default it.
404 if (!ctx
->postproc_cfg_set
405 && (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
))
407 ctx
->postproc_cfg
.post_proc_flag
=
408 VP8_DEBLOCK
| VP8_DEMACROBLOCK
;
409 ctx
->postproc_cfg
.deblocking_level
= 4;
410 ctx
->postproc_cfg
.noise_level
= 0;
414 res
= VPX_CODEC_ERROR
;
419 ctx
->decoder_init
= 1;
422 if (!res
&& ctx
->pbi
)
424 YV12_BUFFER_CONFIG sd
;
425 INT64 time_stamp
= 0, time_end_stamp
= 0;
426 vp8_ppflags_t flags
= {0};
428 if (ctx
->base
.init_flags
& VPX_CODEC_USE_POSTPROC
)
430 flags
.post_proc_flag
= ctx
->postproc_cfg
.post_proc_flag
431 #if CONFIG_POSTPROC_VISUALIZER
433 | ((ctx
->dbg_color_ref_frame_flag
!= 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS
: 0)
434 | ((ctx
->dbg_color_mb_modes_flag
!= 0) ? VP8D_DEBUG_CLR_BLK_MODES
: 0)
435 | ((ctx
->dbg_color_b_modes_flag
!= 0) ? VP8D_DEBUG_CLR_BLK_MODES
: 0)
436 | ((ctx
->dbg_display_mv_flag
!= 0) ? VP8D_DEBUG_DRAW_MV
: 0)
439 flags
.deblocking_level
= ctx
->postproc_cfg
.deblocking_level
;
440 flags
.noise_level
= ctx
->postproc_cfg
.noise_level
;
441 #if CONFIG_POSTPROC_VISUALIZER
442 flags
.display_ref_frame_flag
= ctx
->dbg_color_ref_frame_flag
;
443 flags
.display_mb_modes_flag
= ctx
->dbg_color_mb_modes_flag
;
444 flags
.display_b_modes_flag
= ctx
->dbg_color_b_modes_flag
;
445 flags
.display_mv_flag
= ctx
->dbg_display_mv_flag
;
449 if (vp8dx_receive_compressed_data(ctx
->pbi
, data_sz
, data
, deadline
))
451 VP8D_COMP
*pbi
= (VP8D_COMP
*)ctx
->pbi
;
452 res
= update_error_state(ctx
, &pbi
->common
.error
);
455 if (!res
&& 0 == vp8dx_get_raw_frame(ctx
->pbi
, &sd
, &time_stamp
, &time_end_stamp
, &flags
))
457 /* Align width/height */
458 unsigned int a_w
= (sd
.y_width
+ 15) & ~15;
459 unsigned int a_h
= (sd
.y_height
+ 15) & ~15;
461 vpx_img_wrap(&ctx
->img
, VPX_IMG_FMT_I420
,
462 a_w
+ 2 * VP8BORDERINPIXELS
,
463 a_h
+ 2 * VP8BORDERINPIXELS
,
466 vpx_img_set_rect(&ctx
->img
,
467 VP8BORDERINPIXELS
, VP8BORDERINPIXELS
,
468 sd
.y_width
, sd
.y_height
);
469 ctx
->img
.user_priv
= user_priv
;
478 static vpx_image_t
*vp8_get_frame(vpx_codec_alg_priv_t
*ctx
,
479 vpx_codec_iter_t
*iter
)
481 vpx_image_t
*img
= NULL
;
485 /* iter acts as a flip flop, so an image is only returned on the first
500 vpx_codec_err_t
vp8_xma_get_mmap(const vpx_codec_ctx_t
*ctx
,
501 vpx_codec_mmap_t
*mmap
,
502 vpx_codec_iter_t
*iter
)
505 const mem_req_t
*seg_iter
= *iter
;
507 /* Get address of next segment request */
511 seg_iter
= vp8_mem_req_segs
;
512 else if (seg_iter
->id
!= VP8_SEG_MAX
)
515 *iter
= (vpx_codec_iter_t
)seg_iter
;
517 if (seg_iter
->id
!= VP8_SEG_MAX
)
519 mmap
->id
= seg_iter
->id
;
520 mmap
->sz
= seg_iter
->sz
;
521 mmap
->align
= seg_iter
->align
;
522 mmap
->flags
= seg_iter
->flags
;
525 mmap
->sz
= seg_iter
->calc_sz(ctx
->config
.dec
, ctx
->init_flags
);
530 res
= VPX_CODEC_LIST_END
;
532 while (!mmap
->sz
&& res
!= VPX_CODEC_LIST_END
);
537 static vpx_codec_err_t
vp8_xma_set_mmap(vpx_codec_ctx_t
*ctx
,
538 const vpx_codec_mmap_t
*mmap
)
540 vpx_codec_err_t res
= VPX_CODEC_MEM_ERROR
;
545 if (mmap
->id
== VP8_SEG_ALG_PRIV
)
549 vp8_init_ctx(ctx
, mmap
);
557 if (!res
&& ctx
->priv
->alg_priv
)
559 for (i
= 0; i
< NELEMENTS(vp8_mem_req_segs
); i
++)
561 if (ctx
->priv
->alg_priv
->mmaps
[i
].id
== mmap
->id
)
562 if (!ctx
->priv
->alg_priv
->mmaps
[i
].base
)
564 ctx
->priv
->alg_priv
->mmaps
[i
] = *mmap
;
568 done
&= (ctx
->priv
->alg_priv
->mmaps
[i
].base
!= NULL
);
574 vp8_finalize_mmaps(ctx
->priv
->alg_priv
);
575 res
= ctx
->iface
->init(ctx
);
581 static vpx_codec_err_t
image2yuvconfig(const vpx_image_t
*img
,
582 YV12_BUFFER_CONFIG
*yv12
)
584 vpx_codec_err_t res
= VPX_CODEC_OK
;
585 yv12
->y_buffer
= img
->planes
[VPX_PLANE_Y
];
586 yv12
->u_buffer
= img
->planes
[VPX_PLANE_U
];
587 yv12
->v_buffer
= img
->planes
[VPX_PLANE_V
];
589 yv12
->y_width
= img
->d_w
;
590 yv12
->y_height
= img
->d_h
;
591 yv12
->uv_width
= yv12
->y_width
/ 2;
592 yv12
->uv_height
= yv12
->y_height
/ 2;
594 yv12
->y_stride
= img
->stride
[VPX_PLANE_Y
];
595 yv12
->uv_stride
= img
->stride
[VPX_PLANE_U
];
597 yv12
->border
= (img
->stride
[VPX_PLANE_Y
] - img
->d_w
) / 2;
598 yv12
->clrtype
= (img
->fmt
== VPX_IMG_FMT_VPXI420
|| img
->fmt
== VPX_IMG_FMT_VPXYV12
);
604 static vpx_codec_err_t
vp8_set_reference(vpx_codec_alg_priv_t
*ctx
,
609 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
613 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
614 YV12_BUFFER_CONFIG sd
;
616 image2yuvconfig(&frame
->img
, &sd
);
618 vp8dx_set_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
622 return VPX_CODEC_INVALID_PARAM
;
626 static vpx_codec_err_t
vp8_get_reference(vpx_codec_alg_priv_t
*ctx
,
631 vpx_ref_frame_t
*data
= va_arg(args
, vpx_ref_frame_t
*);
635 vpx_ref_frame_t
*frame
= (vpx_ref_frame_t
*)data
;
636 YV12_BUFFER_CONFIG sd
;
638 image2yuvconfig(&frame
->img
, &sd
);
640 vp8dx_get_reference(ctx
->pbi
, frame
->frame_type
, &sd
);
644 return VPX_CODEC_INVALID_PARAM
;
648 static vpx_codec_err_t
vp8_set_postproc(vpx_codec_alg_priv_t
*ctx
,
652 vp8_postproc_cfg_t
*data
= va_arg(args
, vp8_postproc_cfg_t
*);
657 ctx
->postproc_cfg_set
= 1;
658 ctx
->postproc_cfg
= *((vp8_postproc_cfg_t
*)data
);
662 return VPX_CODEC_INVALID_PARAM
;
665 return VPX_CODEC_INCAPABLE
;
669 static vpx_codec_err_t
vp8_set_dbg_options(vpx_codec_alg_priv_t
*ctx
,
673 #if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
674 int data
= va_arg(args
, int);
676 #define MAP(id, var) case id: var = data; break;
680 MAP (VP8_SET_DBG_COLOR_REF_FRAME
, ctx
->dbg_color_ref_frame_flag
);
681 MAP (VP8_SET_DBG_COLOR_MB_MODES
, ctx
->dbg_color_mb_modes_flag
);
682 MAP (VP8_SET_DBG_COLOR_B_MODES
, ctx
->dbg_color_b_modes_flag
);
683 MAP (VP8_SET_DBG_DISPLAY_MV
, ctx
->dbg_display_mv_flag
);
688 return VPX_CODEC_INCAPABLE
;
692 static vpx_codec_err_t
vp8_get_last_ref_updates(vpx_codec_alg_priv_t
*ctx
,
696 int *update_info
= va_arg(args
, int *);
697 VP8D_COMP
*pbi
= (VP8D_COMP
*)ctx
->pbi
;
701 *update_info
= pbi
->common
.refresh_alt_ref_frame
* (int) VP8_ALTR_FRAME
702 + pbi
->common
.refresh_golden_frame
* (int) VP8_GOLD_FRAME
703 + pbi
->common
.refresh_last_frame
* (int) VP8_LAST_FRAME
;
708 return VPX_CODEC_INVALID_PARAM
;
712 static vpx_codec_err_t
vp8_get_frame_corrupted(vpx_codec_alg_priv_t
*ctx
,
717 int *corrupted
= va_arg(args
, int *);
721 VP8D_COMP
*pbi
= (VP8D_COMP
*)ctx
->pbi
;
722 *corrupted
= pbi
->common
.frame_to_show
->corrupted
;
727 return VPX_CODEC_INVALID_PARAM
;
731 vpx_codec_ctrl_fn_map_t vp8_ctf_maps
[] =
733 {VP8_SET_REFERENCE
, vp8_set_reference
},
734 {VP8_COPY_REFERENCE
, vp8_get_reference
},
735 {VP8_SET_POSTPROC
, vp8_set_postproc
},
736 {VP8_SET_DBG_COLOR_REF_FRAME
, vp8_set_dbg_options
},
737 {VP8_SET_DBG_COLOR_MB_MODES
, vp8_set_dbg_options
},
738 {VP8_SET_DBG_COLOR_B_MODES
, vp8_set_dbg_options
},
739 {VP8_SET_DBG_DISPLAY_MV
, vp8_set_dbg_options
},
740 {VP8D_GET_LAST_REF_UPDATES
, vp8_get_last_ref_updates
},
741 {VP8D_GET_FRAME_CORRUPTED
, vp8_get_frame_corrupted
},
746 #ifndef VERSION_STRING
747 #define VERSION_STRING
749 CODEC_INTERFACE(vpx_codec_vp8_dx
) =
751 "WebM Project VP8 Decoder" VERSION_STRING
,
752 VPX_CODEC_INTERNAL_ABI_VERSION
,
753 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
754 /* vpx_codec_caps_t caps; */
755 vp8_init
, /* vpx_codec_init_fn_t init; */
756 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
757 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
758 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
759 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
761 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
762 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
763 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
764 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
766 { /* encoder functions */
777 * BEGIN BACKWARDS COMPATIBILITY SHIM.
779 vpx_codec_iface_t vpx_codec_vp8_algo
=
781 "WebM Project VP8 Decoder (Deprecated API)" VERSION_STRING
,
782 VPX_CODEC_INTERNAL_ABI_VERSION
,
783 VPX_CODEC_CAP_DECODER
| VP8_CAP_POSTPROC
,
784 /* vpx_codec_caps_t caps; */
785 vp8_init
, /* vpx_codec_init_fn_t init; */
786 vp8_destroy
, /* vpx_codec_destroy_fn_t destroy; */
787 vp8_ctf_maps
, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
788 vp8_xma_get_mmap
, /* vpx_codec_get_mmap_fn_t get_mmap; */
789 vp8_xma_set_mmap
, /* vpx_codec_set_mmap_fn_t set_mmap; */
791 vp8_peek_si
, /* vpx_codec_peek_si_fn_t peek_si; */
792 vp8_get_si
, /* vpx_codec_get_si_fn_t get_si; */
793 vp8_decode
, /* vpx_codec_decode_fn_t decode; */
794 vp8_get_frame
, /* vpx_codec_frame_get_fn_t frame_get; */
796 { /* encoder functions */