blob: 8aaa52c2c915174d67598c816195499de547cc1c [file] [log] [blame]
John Koleszar0ea50ce2010-05-18 11:58:33 -04001/*
2 * Copyright (c) 2010 The VP8 project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license and patent
5 * grant that can be found in the LICENSE file in the root of the source
6 * tree. All contributing project authors may be found in the AUTHORS
7 * file in the root of the source tree.
8 */
9
10
11#include <stdlib.h>
12#include <string.h>
13#include "vpx_codec/vpx_decoder.h"
14#include "vp8dx.h"
15#include "vpx_codec/internal/vpx_codec_internal.h"
16#include "vpx_version.h"
17#include "onyxd.h"
18#include "onyxd_int.h"
19
20#define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
21
22#if CONFIG_BIG_ENDIAN
23# define swap4(d)\
24 ((d&0x000000ff)<<24) | \
25 ((d&0x0000ff00)<<8) | \
26 ((d&0x00ff0000)>>8) | \
27 ((d&0xff000000)>>24)
28# define swap2(d)\
29 ((d&0x000000ff)<<8) | \
30 ((d&0x0000ff00)>>8)
31#else
32# define swap4(d) d
33# define swap2(d) d
34#endif
35typedef vpx_codec_stream_info_t vp8_stream_info_t;
36
37/* Structures for handling memory allocations */
38typedef enum
39{
40 VP8_SEG_ALG_PRIV = 256,
41 VP8_SEG_MAX
42} mem_seg_id_t;
43#define NELEMENTS(x) (sizeof(x)/sizeof(x[0]))
44
45static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t);
46
47typedef struct
48{
49 unsigned int id;
50 unsigned long sz;
51 unsigned int align;
52 unsigned int flags;
53 unsigned long(*calc_sz)(const vpx_codec_dec_cfg_t *, vpx_codec_flags_t);
54} mem_req_t;
55
56static const mem_req_t vp8_mem_req_segs[] =
57{
58 {VP8_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, vp8_priv_sz},
59 {VP8_SEG_MAX, 0, 0, 0, NULL}
60};
61
62struct vpx_codec_alg_priv
63{
64 vpx_codec_priv_t base;
65 vpx_codec_mmap_t mmaps[NELEMENTS(vp8_mem_req_segs)-1];
66 vpx_codec_dec_cfg_t cfg;
67 vp8_stream_info_t si;
68 int defer_alloc;
69 int decoder_init;
70 VP8D_PTR pbi;
71 int postproc_cfg_set;
72 vp8_postproc_cfg_t postproc_cfg;
73 vpx_image_t img;
74 int img_setup;
75 int img_avail;
76};
77
78static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t flags)
79{
80 /* Although this declaration is constant, we can't use it in the requested
81 * segments list because we want to define the requested segments list
82 * before defining the private type (so that the number of memory maps is
83 * known)
84 */
85 (void)si;
86 return sizeof(vpx_codec_alg_priv_t);
87}
88
89
90static void vp8_mmap_dtor(vpx_codec_mmap_t *mmap)
91{
92 free(mmap->priv);
93}
94
95static vpx_codec_err_t vp8_mmap_alloc(vpx_codec_mmap_t *mmap)
96{
97 vpx_codec_err_t res;
98 unsigned int align;
99
100 align = mmap->align ? mmap->align - 1 : 0;
101
102 if (mmap->flags & VPX_CODEC_MEM_ZERO)
103 mmap->priv = calloc(1, mmap->sz + align);
104 else
105 mmap->priv = malloc(mmap->sz + align);
106
107 res = (mmap->priv) ? VPX_CODEC_OK : VPX_CODEC_MEM_ERROR;
108 mmap->base = (void *)((((uintptr_t)mmap->priv) + align) & ~(uintptr_t)align);
109 mmap->dtor = vp8_mmap_dtor;
110 return res;
111}
112
113static vpx_codec_err_t vp8_validate_mmaps(const vp8_stream_info_t *si,
114 const vpx_codec_mmap_t *mmaps,
115 vpx_codec_flags_t init_flags)
116{
117 int i;
118 vpx_codec_err_t res = VPX_CODEC_OK;
119
120 for (i = 0; i < NELEMENTS(vp8_mem_req_segs) - 1; i++)
121 {
122 /* Ensure the segment has been allocated */
123 if (!mmaps[i].base)
124 {
125 res = VPX_CODEC_MEM_ERROR;
126 break;
127 }
128
129 /* Verify variable size segment is big enough for the current si. */
130 if (vp8_mem_req_segs[i].calc_sz)
131 {
132 vpx_codec_dec_cfg_t cfg;
133
134 cfg.w = si->w;
135 cfg.h = si->h;
136
137 if (mmaps[i].sz < vp8_mem_req_segs[i].calc_sz(&cfg, init_flags))
138 {
139 res = VPX_CODEC_MEM_ERROR;
140 break;
141 }
142 }
143 }
144
145 return res;
146}
147
148static void vp8_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap)
149{
150 int i;
151
152 ctx->priv = mmap->base;
153 ctx->priv->sz = sizeof(*ctx->priv);
154 ctx->priv->iface = ctx->iface;
155 ctx->priv->alg_priv = mmap->base;
156
157 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
158 ctx->priv->alg_priv->mmaps[i].id = vp8_mem_req_segs[i].id;
159
160 ctx->priv->alg_priv->mmaps[0] = *mmap;
161 ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
162 ctx->priv->init_flags = ctx->init_flags;
163
164 if (ctx->config.dec)
165 {
166 /* Update the reference to the config structure to an internal copy. */
167 ctx->priv->alg_priv->cfg = *ctx->config.dec;
168 ctx->config.dec = &ctx->priv->alg_priv->cfg;
169 }
170}
171
172static void *mmap_lkup(vpx_codec_alg_priv_t *ctx, int id)
173{
174 int i;
175
176 for (i = 0; i < NELEMENTS(vp8_mem_req_segs); i++)
177 if (ctx->mmaps[i].id == id)
178 return ctx->mmaps[i].base;
179
180 return NULL;
181}
182static void vp8_finalize_mmaps(vpx_codec_alg_priv_t *ctx)
183{
184 /*
185 ctx->pbi = mmap_lkup(ctx, VP6_SEG_PB_INSTANCE);
186 ctx->pbi->mbi.block_dx_info[0].idct_output_ptr = mmap_lkup(ctx, VP6_SEG_IDCT_BUFFER);
187 ctx->pbi->loop_filtered_block = mmap_lkup(ctx, VP6_SEG_LF_BLOCK);
188 ctx->pbi->huff = mmap_lkup(ctx, VP6_SEG_HUFF);
189 ctx->pbi->mbi.coeffs_base_ptr = mmap_lkup(ctx, VP6_SEG_COEFFS);
190 ctx->pbi->fc.above_y = mmap_lkup(ctx, VP6_SEG_ABOVEY);
191 ctx->pbi->fc.above_u = mmap_lkup(ctx, VP6_SEG_ABOVEU);
192 ctx->pbi->fc.above_v = mmap_lkup(ctx, VP6_SEG_ABOVEV);
193 ctx->pbi->prediction_mode = mmap_lkup(ctx, VP6_SEG_PRED_MODES);
194 ctx->pbi->mbmotion_vector = mmap_lkup(ctx, VP6_SEG_MV_FIELD);
195 ctx->pbi->fb_storage_ptr[0] = mmap_lkup(ctx, VP6_SEG_IMG0_STRG);
196 ctx->pbi->fb_storage_ptr[1] = mmap_lkup(ctx, VP6_SEG_IMG1_STRG);
197 ctx->pbi->fb_storage_ptr[2] = mmap_lkup(ctx, VP6_SEG_IMG2_STRG);
198 #if CONFIG_NEW_TOKENS
199 ctx->pbi->token_graph = mmap_lkup(ctx, VP6_SEG_TOKEN_GRAPH);
200 #endif
201 #if CONFIG_POSTPROC
202 ctx->pbi->postproc.deblock.fragment_variances = mmap_lkup(ctx, VP6_SEG_DEBLOCKER);
203 ctx->pbi->fb_storage_ptr[3] = mmap_lkup(ctx, VP6_SEG_PP_IMG_STRG);
204 #endif
205 */
206}
207
208static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx)
209{
210 vpx_codec_err_t res = VPX_CODEC_OK;
211
212 /* This function only allocates space for the vpx_codec_alg_priv_t
213 * structure. More memory may be required at the time the stream
214 * information becomes known.
215 */
216 if (!ctx->priv)
217 {
218 vpx_codec_mmap_t mmap;
219
220 mmap.id = vp8_mem_req_segs[0].id;
221 mmap.sz = sizeof(vpx_codec_alg_priv_t);
222 mmap.align = vp8_mem_req_segs[0].align;
223 mmap.flags = vp8_mem_req_segs[0].flags;
224
225 res = vp8_mmap_alloc(&mmap);
226
227 if (!res)
228 vp8_init_ctx(ctx, &mmap);
229
230 ctx->priv->alg_priv->defer_alloc = 1;
231 /*post processing level initialized to do nothing */
232
233 }
234
235 return res;
236}
237
238static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx)
239{
240 int i;
241
242 vp8dx_remove_decompressor(ctx->pbi);
243
244 for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--)
245 {
246 if (ctx->mmaps[i].dtor)
247 ctx->mmaps[i].dtor(&ctx->mmaps[i]);
248 }
249
250 return VPX_CODEC_OK;
251}
252
253static vpx_codec_err_t vp8_peek_si(const uint8_t *data,
254 unsigned int data_sz,
255 vpx_codec_stream_info_t *si)
256{
257
258 vpx_codec_err_t res = VPX_CODEC_OK;
259 {
260 /*Parse from VP8 compressed data, the implies knowledge of the
261 *VP8 bitsteam.
262 * First 3 byte header including version, frame type and an offset
263 * Next 3 bytes are image sizewith 12 bit each for width and height
264 */
265
266 si->is_kf = 0;
267
268 if (data_sz >= 10 && !(data[0] & 0x01)) /* I-Frame */
269 {
270 const uint8_t *c = data + 3;
271 si->is_kf = 1;
272
273 // vet via sync code
274 if (c[0] != 0x9d || c[1] != 0x01 || c[2] != 0x2a)
275 res = VPX_CODEC_UNSUP_BITSTREAM;
276
277 si->w = swap2(*(const unsigned short *)(c + 3)) & 0x3fff;
278 si->h = swap2(*(const unsigned short *)(c + 5)) & 0x3fff;
279
280 //printf("w=%d, h=%d\n", si->w, si->h);
281 if (!(si->h | si->w))
282 res = VPX_CODEC_UNSUP_BITSTREAM;
283 }
284 else
285 res = VPX_CODEC_UNSUP_BITSTREAM;
286 }
287
288 return res;
289
290}
291
292static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx,
293 vpx_codec_stream_info_t *si)
294{
295
296 unsigned int sz;
297
298 if (si->sz >= sizeof(vp8_stream_info_t))
299 sz = sizeof(vp8_stream_info_t);
300 else
301 sz = sizeof(vpx_codec_stream_info_t);
302
303 memcpy(si, &ctx->si, sz);
304 si->sz = sz;
305
306 return VPX_CODEC_OK;
307}
308
309
310static vpx_codec_err_t
311update_error_state(vpx_codec_alg_priv_t *ctx,
312 const struct vpx_internal_error_info *error)
313{
314 vpx_codec_err_t res;
315
316 if ((res = error->error_code))
317 ctx->base.err_detail = error->has_detail
318 ? error->detail
319 : NULL;
320
321 return res;
322}
323
324
325static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx,
326 const uint8_t *data,
327 unsigned int data_sz,
328 void *user_priv,
329 long deadline)
330{
331 vpx_codec_err_t res = VPX_CODEC_OK;
332
333 ctx->img_avail = 0;
334
335 /* Determine the stream parameters */
336 if (!ctx->si.h)
337 res = ctx->base.iface->dec.peek_si(data, data_sz, &ctx->si);
338
339
340 /* Perform deferred allocations, if required */
341 if (!res && ctx->defer_alloc)
342 {
343 int i;
344
345 for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++)
346 {
347 vpx_codec_dec_cfg_t cfg;
348
349 cfg.w = ctx->si.w;
350 cfg.h = ctx->si.h;
351 ctx->mmaps[i].id = vp8_mem_req_segs[i].id;
352 ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz;
353 ctx->mmaps[i].align = vp8_mem_req_segs[i].align;
354 ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags;
355
356 if (!ctx->mmaps[i].sz)
357 ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg,
358 ctx->base.init_flags);
359
360 res = vp8_mmap_alloc(&ctx->mmaps[i]);
361 }
362
363 if (!res)
364 vp8_finalize_mmaps(ctx);
365
366 ctx->defer_alloc = 0;
367 }
368
369 /* Initialize the decoder instance on the first frame*/
370 if (!res && !ctx->decoder_init)
371 {
372 res = vp8_validate_mmaps(&ctx->si, ctx->mmaps, ctx->base.init_flags);
373
374 if (!res)
375 {
376 VP8D_CONFIG oxcf;
377 VP8D_PTR optr;
378
379 vp8dx_initialize();
380
381 oxcf.Width = ctx->si.w;
382 oxcf.Height = ctx->si.h;
383 oxcf.Version = 9;
384 oxcf.postprocess = 0;
385 oxcf.max_threads = ctx->cfg.threads;
386
387 optr = vp8dx_create_decompressor(&oxcf);
388
389 /* If postprocessing was enabled by the application and a
390 * configuration has not been provided, default it.
391 */
392 if (!ctx->postproc_cfg_set
393 && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
394 {
395 ctx->postproc_cfg.post_proc_flag =
396 VP8_DEBLOCK | VP8_DEMACROBLOCK;
397 ctx->postproc_cfg.deblocking_level = 4;
398 ctx->postproc_cfg.noise_level = 0;
399 }
400
401 if (!optr)
402 res = VPX_CODEC_ERROR;
403 else
404 ctx->pbi = optr;
405 }
406
407 ctx->decoder_init = 1;
408 }
409
410 if (!res && ctx->pbi)
411 {
412 YV12_BUFFER_CONFIG sd;
413 INT64 time_stamp = 0, time_end_stamp = 0;
414 int ppflag = 0;
415 int ppdeblocking = 0;
416 int ppnoise = 0;
417
418 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
419 {
420 ppflag = ctx->postproc_cfg.post_proc_flag;
421 ppdeblocking = ctx->postproc_cfg.deblocking_level;
422 ppnoise = ctx->postproc_cfg.noise_level;
423 }
424
425 if (vp8dx_receive_compressed_data(ctx->pbi, data_sz, data, deadline))
426 {
427 VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
428 res = update_error_state(ctx, &pbi->common.error);
429 }
430
431 if (!res && 0 == vp8dx_get_raw_frame(ctx->pbi, &sd, &time_stamp, &time_end_stamp, ppdeblocking, ppnoise, ppflag))
432 {
433 /* Align width/height */
434 unsigned int a_w = (sd.y_width + 15) & ~15;
435 unsigned int a_h = (sd.y_height + 15) & ~15;
436
James Zern6cd4a102010-05-20 23:22:39 -0400437 vpx_img_wrap(&ctx->img, VPX_IMG_FMT_I420,
John Koleszar0ea50ce2010-05-18 11:58:33 -0400438 a_w + 2 * VP8BORDERINPIXELS,
439 a_h + 2 * VP8BORDERINPIXELS,
440 1,
441 sd.buffer_alloc);
442 vpx_img_set_rect(&ctx->img,
443 VP8BORDERINPIXELS, VP8BORDERINPIXELS,
444 sd.y_width, sd.y_height);
445 ctx->img_avail = 1;
446
447 }
448 }
449
450 return res;
451}
452
453static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx,
454 vpx_codec_iter_t *iter)
455{
456 vpx_image_t *img = NULL;
457
458 if (ctx->img_avail)
459 {
460 /* iter acts as a flip flop, so an image is only returned on the first
461 * call to get_frame.
462 */
463 if (!(*iter))
464 {
465 img = &ctx->img;
466 *iter = img;
467 }
468 }
469
470 return img;
471}
472
473
474static
475vpx_codec_err_t vp8_xma_get_mmap(const vpx_codec_ctx_t *ctx,
476 vpx_codec_mmap_t *mmap,
477 vpx_codec_iter_t *iter)
478{
479 vpx_codec_err_t res;
480 const mem_req_t *seg_iter = *iter;
481
482 /* Get address of next segment request */
483 do
484 {
485 if (!seg_iter)
486 seg_iter = vp8_mem_req_segs;
487 else if (seg_iter->id != VP8_SEG_MAX)
488 seg_iter++;
489
490 *iter = (vpx_codec_iter_t)seg_iter;
491
492 if (seg_iter->id != VP8_SEG_MAX)
493 {
494 mmap->id = seg_iter->id;
495 mmap->sz = seg_iter->sz;
496 mmap->align = seg_iter->align;
497 mmap->flags = seg_iter->flags;
498
499 if (!seg_iter->sz)
500 mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
501
502 res = VPX_CODEC_OK;
503 }
504 else
505 res = VPX_CODEC_LIST_END;
506 }
507 while (!mmap->sz && res != VPX_CODEC_LIST_END);
508
509 return res;
510}
511
512static vpx_codec_err_t vp8_xma_set_mmap(vpx_codec_ctx_t *ctx,
513 const vpx_codec_mmap_t *mmap)
514{
515 vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
516 int i, done;
517
518 if (!ctx->priv)
519 {
520 if (mmap->id == VP8_SEG_ALG_PRIV)
521 {
522 if (!ctx->priv)
523 {
524 vp8_init_ctx(ctx, mmap);
525 res = VPX_CODEC_OK;
526 }
527 }
528 }
529
530 done = 1;
531
532 if (ctx->priv->alg_priv)
533 {
534 for (i = 0; i < NELEMENTS(vp8_mem_req_segs); i++)
535 {
536 if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
537 if (!ctx->priv->alg_priv->mmaps[i].base)
538 {
539 ctx->priv->alg_priv->mmaps[i] = *mmap;
540 res = VPX_CODEC_OK;
541 }
542
543 done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
544 }
545 }
546
547 if (done && !res)
548 {
549 vp8_finalize_mmaps(ctx->priv->alg_priv);
550 res = ctx->iface->init(ctx);
551 }
552
553 return res;
554}
555
556static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
557 YV12_BUFFER_CONFIG *yv12)
558{
559 vpx_codec_err_t res = VPX_CODEC_OK;
560 yv12->y_buffer = img->planes[PLANE_Y];
561 yv12->u_buffer = img->planes[PLANE_U];
562 yv12->v_buffer = img->planes[PLANE_V];
563
564 yv12->y_width = img->d_w;
565 yv12->y_height = img->d_h;
566 yv12->uv_width = yv12->y_width / 2;
567 yv12->uv_height = yv12->y_height / 2;
568
569 yv12->y_stride = img->stride[PLANE_Y];
570 yv12->uv_stride = img->stride[PLANE_U];
571
572 yv12->border = (img->stride[PLANE_Y] - img->d_w) / 2;
James Zern6cd4a102010-05-20 23:22:39 -0400573 yv12->clrtype = (img->fmt == VPX_IMG_FMT_VPXI420 || img->fmt == VPX_IMG_FMT_VPXYV12);
John Koleszar0ea50ce2010-05-18 11:58:33 -0400574
575 return res;
576}
577
578
579static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
580 int ctr_id,
581 va_list args)
582{
583
584 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
585
586 if (data)
587 {
588 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
589 YV12_BUFFER_CONFIG sd;
590
591 image2yuvconfig(&frame->img, &sd);
592
593 vp8dx_set_reference(ctx->pbi, frame->frame_type, &sd);
594 return VPX_CODEC_OK;
595 }
596 else
597 return VPX_CODEC_INVALID_PARAM;
598
599}
600
601static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
602 int ctr_id,
603 va_list args)
604{
605
606 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
607
608 if (data)
609 {
610 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
611 YV12_BUFFER_CONFIG sd;
612
613 image2yuvconfig(&frame->img, &sd);
614
615 vp8dx_get_reference(ctx->pbi, frame->frame_type, &sd);
616 return VPX_CODEC_OK;
617 }
618 else
619 return VPX_CODEC_INVALID_PARAM;
620
621}
622
623static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
624 int ctr_id,
625 va_list args)
626{
627 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
628#if CONFIG_POSTPROC
629
630 if (data)
631 {
632 ctx->postproc_cfg_set = 1;
633 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
634 return VPX_CODEC_OK;
635 }
636 else
637 return VPX_CODEC_INVALID_PARAM;
638
639#else
640 return VPX_CODEC_INCAPABLE;
641#endif
642}
643
644
645vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] =
646{
647 {VP8_SET_REFERENCE, vp8_set_reference},
648 {VP8_COPY_REFERENCE, vp8_get_reference},
649 {VP8_SET_POSTPROC, vp8_set_postproc},
650 { -1, NULL},
651};
652
653
654#ifndef VERSION_STRING
655#define VERSION_STRING
656#endif
657vpx_codec_iface_t vpx_codec_vp8_dx_algo =
658{
659 "vpx Technologies VP8 Decoder" VERSION_STRING,
660 VPX_CODEC_INTERNAL_ABI_VERSION,
661 VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC,
662 /* vpx_codec_caps_t caps; */
663 vp8_init, /* vpx_codec_init_fn_t init; */
664 vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
665 vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
666 vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
667 vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
668 {
669 vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
670 vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
671 vp8_decode, /* vpx_codec_decode_fn_t decode; */
672 vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
673 },
674 {NOT_IMPLEMENTED} /* encoder functions */
675};
676
677/*
678 * BEGIN BACKWARDS COMPATIBILITY SHIM.
679 */
680vpx_codec_iface_t vpx_codec_vp8_algo =
681{
682 "vpx Technologies VP8 Decoder (Deprecated API)" VERSION_STRING,
683 VPX_CODEC_INTERNAL_ABI_VERSION,
684 VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC,
685 /* vpx_codec_caps_t caps; */
686 vp8_init, /* vpx_codec_init_fn_t init; */
687 vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
688 vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
689 vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
690 vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
691 {
692 vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
693 vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
694 vp8_decode, /* vpx_codec_decode_fn_t decode; */
695 vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
696 },
697 {NOT_IMPLEMENTED} /* encoder functions */
698};