blob: aa5bf1f5b6b487d8ca55a05a3fb13096e7d90c55 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "./av1_rtcd.h"
17#include "./aom_dsp_rtcd.h"
18#include "./aom_scale_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/system_state.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070022#include "aom_ports/aom_once.h"
23#include "aom_ports/aom_timer.h"
24#include "aom_scale/aom_scale.h"
25#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
27#include "av1/common/alloccommon.h"
Tom Finegan17ce8b12017-02-08 12:46:31 -080028#include "av1/common/av1_loopfilter.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/onyxc_int.h"
30#include "av1/common/quant_common.h"
31#include "av1/common/reconinter.h"
32#include "av1/common/reconintra.h"
33
34#include "av1/decoder/decodeframe.h"
35#include "av1/decoder/decoder.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070036
37#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#include "av1/decoder/detokenize.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070039#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070040
41static void initialize_dec(void) {
42 static volatile int init_done = 0;
43
44 if (!init_done) {
Yaowu Xuf883b422016-08-30 14:01:10 -070045 av1_rtcd();
46 aom_dsp_rtcd();
47 aom_scale_rtcd();
48 av1_init_intra_predictors();
Yaowu Xuc27fc142016-08-22 16:08:15 -070049#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070050 av1_init_wedge_masks();
Yaowu Xuc27fc142016-08-22 16:08:15 -070051#endif // CONFIG_EXT_INTER
52 init_done = 1;
Nathan E. Egge00b33312016-11-16 09:44:26 -050053 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
Jingning Han8e67c052017-03-23 15:47:33 -070054 av1_switchable_interp_tree);
Thomas Daviesb1bedf52017-03-17 14:03:28 +000055#if CONFIG_EXT_TX
56 int s;
57 for (s = 1; s < EXT_TX_SETS_INTRA; ++s)
58 av1_indices_from_tree(av1_ext_tx_intra_ind[s], av1_ext_tx_intra_inv[s],
Jingning Han8e67c052017-03-23 15:47:33 -070059 av1_ext_tx_intra_tree[s]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +000060 for (s = 1; s < EXT_TX_SETS_INTER; ++s)
61 av1_indices_from_tree(av1_ext_tx_inter_ind[s], av1_ext_tx_inter_inv[s],
Jingning Han8e67c052017-03-23 15:47:33 -070062 av1_ext_tx_inter_tree[s]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +000063#else
Jingning Han8e67c052017-03-23 15:47:33 -070064 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, av1_ext_tx_tree);
Nathan E. Eggedfa33f22016-11-16 09:44:26 -050065#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070066 }
67}
68
Yaowu Xuf883b422016-08-30 14:01:10 -070069static void av1_dec_setup_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070070 cm->mi = cm->mip + cm->mi_stride + 1;
71 cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1;
72 memset(cm->mi_grid_base, 0,
73 cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mi_grid_base));
74}
75
Yaowu Xuf883b422016-08-30 14:01:10 -070076static int av1_dec_alloc_mi(AV1_COMMON *cm, int mi_size) {
77 cm->mip = aom_calloc(mi_size, sizeof(*cm->mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -070078 if (!cm->mip) return 1;
79 cm->mi_alloc_size = mi_size;
Yaowu Xuf883b422016-08-30 14:01:10 -070080 cm->mi_grid_base = (MODE_INFO **)aom_calloc(mi_size, sizeof(MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -070081 if (!cm->mi_grid_base) return 1;
82 return 0;
83}
84
Yaowu Xuf883b422016-08-30 14:01:10 -070085static void av1_dec_free_mi(AV1_COMMON *cm) {
86 aom_free(cm->mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -070087 cm->mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -070088 aom_free(cm->mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -070089 cm->mi_grid_base = NULL;
90}
91
Yaowu Xuf883b422016-08-30 14:01:10 -070092AV1Decoder *av1_decoder_create(BufferPool *const pool) {
93 AV1Decoder *volatile const pbi = aom_memalign(32, sizeof(*pbi));
94 AV1_COMMON *volatile const cm = pbi ? &pbi->common : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -070095
96 if (!cm) return NULL;
97
Yaowu Xuf883b422016-08-30 14:01:10 -070098 av1_zero(*pbi);
Yaowu Xuc27fc142016-08-22 16:08:15 -070099
100 if (setjmp(cm->error.jmp)) {
101 cm->error.setjmp = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700102 av1_decoder_remove(pbi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700103 return NULL;
104 }
105
106 cm->error.setjmp = 1;
107
Angie Chianga5d96c42016-10-21 16:16:56 -0700108 CHECK_MEM_ERROR(cm, cm->fc,
109 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
110 CHECK_MEM_ERROR(cm, cm->frame_contexts,
111 (FRAME_CONTEXT *)aom_memalign(
112 32, FRAME_CONTEXTS * sizeof(*cm->frame_contexts)));
113 memset(cm->fc, 0, sizeof(*cm->fc));
114 memset(cm->frame_contexts, 0, FRAME_CONTEXTS * sizeof(*cm->frame_contexts));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700115
116 pbi->need_resync = 1;
117 once(initialize_dec);
118
119 // Initialize the references to not point to any frame buffers.
120 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
121 memset(&cm->next_ref_frame_map, -1, sizeof(cm->next_ref_frame_map));
122
123 cm->current_video_frame = 0;
124 pbi->ready_for_new_data = 1;
125 pbi->common.buffer_pool = pool;
126
Yaowu Xuf883b422016-08-30 14:01:10 -0700127 cm->bit_depth = AOM_BITS_8;
128 cm->dequant_bit_depth = AOM_BITS_8;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129
Yaowu Xuf883b422016-08-30 14:01:10 -0700130 cm->alloc_mi = av1_dec_alloc_mi;
131 cm->free_mi = av1_dec_free_mi;
132 cm->setup_mi = av1_dec_setup_mi;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133
Yaowu Xuf883b422016-08-30 14:01:10 -0700134 av1_loop_filter_init(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700135
136#if CONFIG_AOM_QM
137 aom_qm_init(cm);
138#endif
139#if CONFIG_LOOP_RESTORATION
Yaowu Xuf883b422016-08-30 14:01:10 -0700140 av1_loop_restoration_precal();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700141#endif // CONFIG_LOOP_RESTORATION
Michael Bebenita6048d052016-08-25 14:40:54 -0700142#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -0400143 pbi->acct_enabled = 1;
Michael Bebenita6048d052016-08-25 14:40:54 -0700144 aom_accounting_init(&pbi->accounting);
145#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146
147 cm->error.setjmp = 0;
148
Yaowu Xuf883b422016-08-30 14:01:10 -0700149 aom_get_worker_interface()->init(&pbi->lf_worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700150
151 return pbi;
152}
153
Yaowu Xuf883b422016-08-30 14:01:10 -0700154void av1_decoder_remove(AV1Decoder *pbi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 int i;
156
157 if (!pbi) return;
158
Yaowu Xuf883b422016-08-30 14:01:10 -0700159 aom_get_worker_interface()->end(&pbi->lf_worker);
160 aom_free(pbi->lf_worker.data1);
161 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162 for (i = 0; i < pbi->num_tile_workers; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700163 AVxWorker *const worker = &pbi->tile_workers[i];
164 aom_get_worker_interface()->end(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700166 aom_free(pbi->tile_worker_data);
167 aom_free(pbi->tile_worker_info);
168 aom_free(pbi->tile_workers);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700169
170 if (pbi->num_tile_workers > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 av1_loop_filter_dealloc(&pbi->lf_row_sync);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172 }
173
Michael Bebenita6048d052016-08-25 14:40:54 -0700174#if CONFIG_ACCOUNTING
175 aom_accounting_clear(&pbi->accounting);
176#endif
177
Yaowu Xuf883b422016-08-30 14:01:10 -0700178 aom_free(pbi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700179}
180
181static int equal_dimensions(const YV12_BUFFER_CONFIG *a,
182 const YV12_BUFFER_CONFIG *b) {
183 return a->y_height == b->y_height && a->y_width == b->y_width &&
184 a->uv_height == b->uv_height && a->uv_width == b->uv_width;
185}
186
Thomas Daede497d1952017-08-08 17:33:06 -0700187aom_codec_err_t av1_copy_reference_dec(AV1Decoder *pbi, int idx,
Yaowu Xuf883b422016-08-30 14:01:10 -0700188 YV12_BUFFER_CONFIG *sd) {
189 AV1_COMMON *cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190
Thomas Daede497d1952017-08-08 17:33:06 -0700191 const YV12_BUFFER_CONFIG *const cfg = get_ref_frame(cm, idx);
192 if (cfg == NULL) {
193 aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
194 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700195 }
Thomas Daede497d1952017-08-08 17:33:06 -0700196 if (!equal_dimensions(cfg, sd))
197 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
198 "Incorrect buffer dimensions");
199 else
200 aom_yv12_copy_frame(cfg, sd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700201
202 return cm->error.error_code;
203}
204
Thomas Daede497d1952017-08-08 17:33:06 -0700205aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx,
Yaowu Xuf883b422016-08-30 14:01:10 -0700206 YV12_BUFFER_CONFIG *sd) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 YV12_BUFFER_CONFIG *ref_buf = NULL;
208
Yaowu Xuc27fc142016-08-22 16:08:15 -0700209 // Get the destination reference buffer.
Thomas Daede497d1952017-08-08 17:33:06 -0700210 ref_buf = get_ref_frame(cm, idx);
211
212 if (ref_buf == NULL) {
213 aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
214 return AOM_CODEC_ERROR;
215 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216
217 if (!equal_dimensions(ref_buf, sd)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700218 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 "Incorrect buffer dimensions");
220 } else {
221 // Overwrite the reference frame buffer.
Yaowu Xuf883b422016-08-30 14:01:10 -0700222 aom_yv12_copy_frame(sd, ref_buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700223 }
224
225 return cm->error.error_code;
226}
227
228/* If any buffer updating is signaled it should be done here. */
Yaowu Xuf883b422016-08-30 14:01:10 -0700229static void swap_frame_buffers(AV1Decoder *pbi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700230 int ref_index = 0, mask;
Yaowu Xuf883b422016-08-30 14:01:10 -0700231 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232 BufferPool *const pool = cm->buffer_pool;
233 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
234
235 lock_buffer_pool(pool);
236 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
237 const int old_idx = cm->ref_frame_map[ref_index];
238 // Current thread releases the holding of reference frame.
239 decrease_ref_count(old_idx, frame_bufs, pool);
240
241 // Release the reference frame holding in the reference map for the decoding
242 // of the next frame.
243 if (mask & 1) decrease_ref_count(old_idx, frame_bufs, pool);
244 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
245 ++ref_index;
246 }
247
248 // Current thread releases the holding of reference frame.
249 for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) {
250 const int old_idx = cm->ref_frame_map[ref_index];
251 decrease_ref_count(old_idx, frame_bufs, pool);
252 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
253 }
254
255 unlock_buffer_pool(pool);
256 pbi->hold_ref_buf = 0;
257 cm->frame_to_show = get_frame_new_buffer(cm);
258
259 // TODO(zoeliu): To fix the ref frame buffer update for the scenario of
260 // cm->frame_parellel_decode == 1
261 if (!cm->frame_parallel_decode || !cm->show_frame) {
262 lock_buffer_pool(pool);
263 --frame_bufs[cm->new_fb_idx].ref_count;
264 unlock_buffer_pool(pool);
265 }
266
267 // Invalidate these references until the next frame starts.
268 for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) {
269 cm->frame_refs[ref_index].idx = INVALID_IDX;
270 cm->frame_refs[ref_index].buf = NULL;
271 }
272}
273
Yaowu Xuf883b422016-08-30 14:01:10 -0700274int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
275 const uint8_t **psource) {
276 AV1_COMMON *volatile const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 BufferPool *volatile const pool = cm->buffer_pool;
278 RefCntBuffer *volatile const frame_bufs = cm->buffer_pool->frame_bufs;
279 const uint8_t *source = *psource;
280 int retcode = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700281 cm->error.error_code = AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700282
283 if (size == 0) {
284 // This is used to signal that we are missing frames.
285 // We do not know if the missing frame(s) was supposed to update
286 // any of the reference buffers, but we act conservative and
287 // mark only the last buffer as corrupted.
288 //
289 // TODO(jkoleszar): Error concealment is undefined and non-normative
290 // at this point, but if it becomes so, [0] may not always be the correct
291 // thing to do here.
292 if (cm->frame_refs[0].idx > 0) {
293 assert(cm->frame_refs[0].buf != NULL);
294 cm->frame_refs[0].buf->corrupted = 1;
295 }
296 }
297
298 pbi->ready_for_new_data = 0;
299
300 // Find a free buffer for the new frame, releasing the reference previously
301 // held.
302
303 // Check if the previous frame was a frame without any references to it.
304 // Release frame buffer if not decoding in frame parallel mode.
305 if (!cm->frame_parallel_decode && cm->new_fb_idx >= 0 &&
306 frame_bufs[cm->new_fb_idx].ref_count == 0)
307 pool->release_fb_cb(pool->cb_priv,
308 &frame_bufs[cm->new_fb_idx].raw_frame_buffer);
309
310 // Find a free frame buffer. Return error if can not find any.
311 cm->new_fb_idx = get_free_fb(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -0700312 if (cm->new_fb_idx == INVALID_IDX) return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700313
314 // Assign a MV array to the frame buffer.
315 cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx];
316
317 pbi->hold_ref_buf = 0;
318 if (cm->frame_parallel_decode) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700319 AVxWorker *const worker = pbi->frame_worker_owner;
320 av1_frameworker_lock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 frame_bufs[cm->new_fb_idx].frame_worker_owner = worker;
322 // Reset decoding progress.
323 pbi->cur_buf = &frame_bufs[cm->new_fb_idx];
324 pbi->cur_buf->row = -1;
325 pbi->cur_buf->col = -1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700326 av1_frameworker_unlock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700327 } else {
328 pbi->cur_buf = &frame_bufs[cm->new_fb_idx];
329 }
330
331 if (setjmp(cm->error.jmp)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700332 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700333 int i;
334
335 cm->error.setjmp = 0;
336 pbi->ready_for_new_data = 1;
337
338 // Synchronize all threads immediately as a subsequent decode call may
339 // cause a resize invalidating some allocations.
340 winterface->sync(&pbi->lf_worker);
341 for (i = 0; i < pbi->num_tile_workers; ++i) {
342 winterface->sync(&pbi->tile_workers[i]);
343 }
344
345 lock_buffer_pool(pool);
346 // Release all the reference buffers if worker thread is holding them.
347 if (pbi->hold_ref_buf == 1) {
348 int ref_index = 0, mask;
349 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
350 const int old_idx = cm->ref_frame_map[ref_index];
351 // Current thread releases the holding of reference frame.
352 decrease_ref_count(old_idx, frame_bufs, pool);
353
354 // Release the reference frame holding in the reference map for the
355 // decoding of the next frame.
356 if (mask & 1) decrease_ref_count(old_idx, frame_bufs, pool);
357 ++ref_index;
358 }
359
360 // Current thread releases the holding of reference frame.
361 for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) {
362 const int old_idx = cm->ref_frame_map[ref_index];
363 decrease_ref_count(old_idx, frame_bufs, pool);
364 }
365 pbi->hold_ref_buf = 0;
366 }
367 // Release current frame.
368 decrease_ref_count(cm->new_fb_idx, frame_bufs, pool);
369 unlock_buffer_pool(pool);
370
Yaowu Xuf883b422016-08-30 14:01:10 -0700371 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700372 return -1;
373 }
374
375 cm->error.setjmp = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700376 av1_decode_frame(pbi, source, source + size, psource);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700377
378 swap_frame_buffers(pbi);
379
380#if CONFIG_EXT_TILE
381 // For now, we only extend the frame borders when the whole frame is decoded.
382 // Later, if needed, extend the border for the decoded tile on the frame
383 // border.
384 if (pbi->dec_tile_row == -1 && pbi->dec_tile_col == -1)
385#endif // CONFIG_EXT_TILE
Fergus Simpsond2bcbb52017-05-22 23:15:05 -0700386 // TODO(debargha): Fix encoder side mv range, so that we can use the
387 // inner border extension. As of now use the larger extension.
388 // aom_extend_frame_inner_borders(cm->frame_to_show);
389 aom_extend_frame_borders(cm->frame_to_show);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700390
Yaowu Xuf883b422016-08-30 14:01:10 -0700391 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700392
393 if (!cm->show_existing_frame) {
394 cm->last_show_frame = cm->show_frame;
395
396#if CONFIG_EXT_REFS
397 // NOTE: It is not supposed to ref to any frame not used as reference
398 if (cm->is_reference_frame)
399#endif // CONFIG_EXT_REFS
400 cm->prev_frame = cm->cur_frame;
401
402 if (cm->seg.enabled && !cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -0700403 av1_swap_current_and_last_seg_map(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700404 }
405
406 // Update progress in frame parallel decode.
407 if (cm->frame_parallel_decode) {
408 // Need to lock the mutex here as another thread may
409 // be accessing this buffer.
Yaowu Xuf883b422016-08-30 14:01:10 -0700410 AVxWorker *const worker = pbi->frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700411 FrameWorkerData *const frame_worker_data = worker->data1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700412 av1_frameworker_lock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700413
414 if (cm->show_frame) {
415 cm->current_video_frame++;
416 }
417 frame_worker_data->frame_decoded = 1;
418 frame_worker_data->frame_context_ready = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700419 av1_frameworker_signal_stats(worker);
420 av1_frameworker_unlock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700421 } else {
422 cm->last_width = cm->width;
423 cm->last_height = cm->height;
Yi Luo10e23002017-07-31 11:54:43 -0700424 cm->last_tile_cols = cm->tile_cols;
425 cm->last_tile_rows = cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700426 if (cm->show_frame) {
427 cm->current_video_frame++;
428 }
429 }
430
431 cm->error.setjmp = 0;
432 return retcode;
433}
434
Yaowu Xuf883b422016-08-30 14:01:10 -0700435int av1_get_raw_frame(AV1Decoder *pbi, YV12_BUFFER_CONFIG *sd) {
436 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437 int ret = -1;
438 if (pbi->ready_for_new_data == 1) return ret;
439
440 pbi->ready_for_new_data = 1;
441
442 /* no raw frame to show!!! */
443 if (!cm->show_frame) return ret;
444
Yaowu Xuc27fc142016-08-22 16:08:15 -0700445 *sd = *cm->frame_to_show;
446 ret = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700447 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700448 return ret;
449}
450
Yaowu Xuf883b422016-08-30 14:01:10 -0700451int av1_get_frame_to_show(AV1Decoder *pbi, YV12_BUFFER_CONFIG *frame) {
452 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700453
454 if (!cm->show_frame || !cm->frame_to_show) return -1;
455
456 *frame = *cm->frame_to_show;
457 return 0;
458}
459
Yaowu Xuf883b422016-08-30 14:01:10 -0700460aom_codec_err_t av1_parse_superframe_index(const uint8_t *data, size_t data_sz,
461 uint32_t sizes[8], int *count,
Sebastien Alaiwane4c6fc12017-06-21 16:43:22 +0200462 int *index_size,
Yaowu Xuf883b422016-08-30 14:01:10 -0700463 aom_decrypt_cb decrypt_cb,
464 void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700465 // A chunk ending with a byte matching 0xc0 is an invalid chunk unless
466 // it is a super frame index. If the last byte of real video compression
467 // data is 0xc0 the encoder must add a 0 byte. If we have the marker but
468 // not the associated matching marker byte at the front of the index we have
469 // an invalid bitstream and need to return an error.
470
471 uint8_t marker;
472 size_t frame_sz_sum = 0;
473
474 assert(data_sz);
Sebastien Alaiwane4c6fc12017-06-21 16:43:22 +0200475 marker = read_marker(decrypt_cb, decrypt_state, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476 *count = 0;
477
478 if ((marker & 0xe0) == 0xc0) {
479 const uint32_t frames = (marker & 0x7) + 1;
480 const uint32_t mag = ((marker >> 3) & 0x3) + 1;
481 const size_t index_sz = 2 + mag * (frames - 1);
Sebastien Alaiwane4c6fc12017-06-21 16:43:22 +0200482 *index_size = (int)index_sz;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700483
484 // This chunk is marked as having a superframe index but doesn't have
485 // enough data for it, thus it's an invalid superframe index.
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 if (data_sz < index_sz) return AOM_CODEC_CORRUPT_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487
488 {
489 const uint8_t marker2 =
Sebastien Alaiwane4c6fc12017-06-21 16:43:22 +0200490 read_marker(decrypt_cb, decrypt_state, data + index_sz - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491
492 // This chunk is marked as having a superframe index but doesn't have
493 // the matching marker byte at the front of the index therefore it's an
494 // invalid chunk.
Yaowu Xuf883b422016-08-30 14:01:10 -0700495 if (marker != marker2) return AOM_CODEC_CORRUPT_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 }
497
498 {
499 // Found a valid superframe index.
500 uint32_t i, j;
Sebastien Alaiwane4c6fc12017-06-21 16:43:22 +0200501 const uint8_t *x = &data[1];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502
503 // Frames has a maximum of 8 and mag has a maximum of 4.
Adrian Grangea414887a2016-11-22 15:47:52 -0800504 uint8_t clear_buffer[28];
505 assert(sizeof(clear_buffer) >= (frames - 1) * mag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700506 if (decrypt_cb) {
Adrian Grangea414887a2016-11-22 15:47:52 -0800507 decrypt_cb(decrypt_state, x, clear_buffer, (frames - 1) * mag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700508 x = clear_buffer;
509 }
510
511 for (i = 0; i < frames - 1; ++i) {
512 uint32_t this_sz = 0;
513
514 for (j = 0; j < mag; ++j) this_sz |= (*x++) << (j * 8);
515 this_sz += 1;
516 sizes[i] = this_sz;
517 frame_sz_sum += this_sz;
518 }
519 sizes[i] = (uint32_t)(data_sz - index_sz - frame_sz_sum);
520 *count = frames;
521 }
522 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700523 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700524}