Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1 | /* |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 2 | * Copyright (c) 2016, Alliance for Open Media. All rights reserved |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3 | * |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 4 | * This source code is subject to the terms of the BSD 2 Clause License and |
| 5 | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
| 6 | * was not distributed with this source code in the LICENSE file, you can |
| 7 | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
| 8 | * Media Patent License 1.0 was not distributed with this source code in the |
| 9 | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 10 | */ |
| 11 | |
| 12 | #include <assert.h> |
| 13 | #include <limits.h> |
| 14 | #include <stdio.h> |
| 15 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 16 | #include "./av1_rtcd.h" |
| 17 | #include "./aom_dsp_rtcd.h" |
| 18 | #include "./aom_scale_rtcd.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 19 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 20 | #include "aom_mem/aom_mem.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 21 | #include "aom_ports/system_state.h" |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 22 | #include "aom_ports/aom_once.h" |
| 23 | #include "aom_ports/aom_timer.h" |
| 24 | #include "aom_scale/aom_scale.h" |
| 25 | #include "aom_util/aom_thread.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 26 | |
| 27 | #include "av1/common/alloccommon.h" |
Tom Finegan | 17ce8b1 | 2017-02-08 12:46:31 -0800 | [diff] [blame] | 28 | #include "av1/common/av1_loopfilter.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 29 | #include "av1/common/onyxc_int.h" |
| 30 | #include "av1/common/quant_common.h" |
| 31 | #include "av1/common/reconinter.h" |
| 32 | #include "av1/common/reconintra.h" |
| 33 | |
| 34 | #include "av1/decoder/decodeframe.h" |
| 35 | #include "av1/decoder/decoder.h" |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 36 | |
| 37 | #if !CONFIG_PVQ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 38 | #include "av1/decoder/detokenize.h" |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 39 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 40 | |
| 41 | static void initialize_dec(void) { |
| 42 | static volatile int init_done = 0; |
| 43 | |
| 44 | if (!init_done) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 45 | av1_rtcd(); |
| 46 | aom_dsp_rtcd(); |
| 47 | aom_scale_rtcd(); |
| 48 | av1_init_intra_predictors(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 49 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 50 | av1_init_wedge_masks(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 51 | #endif // CONFIG_EXT_INTER |
| 52 | init_done = 1; |
Nathan E. Egge | 00b3331 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 53 | av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv, |
Jingning Han | 8e67c05 | 2017-03-23 15:47:33 -0700 | [diff] [blame] | 54 | av1_switchable_interp_tree); |
Thomas Davies | b1bedf5 | 2017-03-17 14:03:28 +0000 | [diff] [blame] | 55 | #if CONFIG_EXT_TX |
| 56 | int s; |
| 57 | for (s = 1; s < EXT_TX_SETS_INTRA; ++s) |
| 58 | av1_indices_from_tree(av1_ext_tx_intra_ind[s], av1_ext_tx_intra_inv[s], |
Jingning Han | 8e67c05 | 2017-03-23 15:47:33 -0700 | [diff] [blame] | 59 | av1_ext_tx_intra_tree[s]); |
Thomas Davies | b1bedf5 | 2017-03-17 14:03:28 +0000 | [diff] [blame] | 60 | for (s = 1; s < EXT_TX_SETS_INTER; ++s) |
| 61 | av1_indices_from_tree(av1_ext_tx_inter_ind[s], av1_ext_tx_inter_inv[s], |
Jingning Han | 8e67c05 | 2017-03-23 15:47:33 -0700 | [diff] [blame] | 62 | av1_ext_tx_inter_tree[s]); |
Thomas Davies | b1bedf5 | 2017-03-17 14:03:28 +0000 | [diff] [blame] | 63 | #else |
Jingning Han | 8e67c05 | 2017-03-23 15:47:33 -0700 | [diff] [blame] | 64 | av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, av1_ext_tx_tree); |
Nathan E. Egge | dfa33f2 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 65 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 66 | } |
| 67 | } |
| 68 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 69 | static void av1_dec_setup_mi(AV1_COMMON *cm) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 70 | cm->mi = cm->mip + cm->mi_stride + 1; |
| 71 | cm->mi_grid_visible = cm->mi_grid_base + cm->mi_stride + 1; |
| 72 | memset(cm->mi_grid_base, 0, |
| 73 | cm->mi_stride * (cm->mi_rows + 1) * sizeof(*cm->mi_grid_base)); |
| 74 | } |
| 75 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 76 | static int av1_dec_alloc_mi(AV1_COMMON *cm, int mi_size) { |
| 77 | cm->mip = aom_calloc(mi_size, sizeof(*cm->mip)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 78 | if (!cm->mip) return 1; |
| 79 | cm->mi_alloc_size = mi_size; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 80 | cm->mi_grid_base = (MODE_INFO **)aom_calloc(mi_size, sizeof(MODE_INFO *)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 81 | if (!cm->mi_grid_base) return 1; |
| 82 | return 0; |
| 83 | } |
| 84 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 85 | static void av1_dec_free_mi(AV1_COMMON *cm) { |
| 86 | aom_free(cm->mip); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 87 | cm->mip = NULL; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 88 | aom_free(cm->mi_grid_base); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 89 | cm->mi_grid_base = NULL; |
| 90 | } |
| 91 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 92 | AV1Decoder *av1_decoder_create(BufferPool *const pool) { |
| 93 | AV1Decoder *volatile const pbi = aom_memalign(32, sizeof(*pbi)); |
| 94 | AV1_COMMON *volatile const cm = pbi ? &pbi->common : NULL; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 95 | |
| 96 | if (!cm) return NULL; |
| 97 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 98 | av1_zero(*pbi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 99 | |
| 100 | if (setjmp(cm->error.jmp)) { |
| 101 | cm->error.setjmp = 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 102 | av1_decoder_remove(pbi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 103 | return NULL; |
| 104 | } |
| 105 | |
| 106 | cm->error.setjmp = 1; |
| 107 | |
Angie Chiang | a5d96c4 | 2016-10-21 16:16:56 -0700 | [diff] [blame] | 108 | CHECK_MEM_ERROR(cm, cm->fc, |
| 109 | (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc))); |
| 110 | CHECK_MEM_ERROR(cm, cm->frame_contexts, |
| 111 | (FRAME_CONTEXT *)aom_memalign( |
| 112 | 32, FRAME_CONTEXTS * sizeof(*cm->frame_contexts))); |
| 113 | memset(cm->fc, 0, sizeof(*cm->fc)); |
| 114 | memset(cm->frame_contexts, 0, FRAME_CONTEXTS * sizeof(*cm->frame_contexts)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 115 | |
| 116 | pbi->need_resync = 1; |
| 117 | once(initialize_dec); |
| 118 | |
| 119 | // Initialize the references to not point to any frame buffers. |
| 120 | memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map)); |
| 121 | memset(&cm->next_ref_frame_map, -1, sizeof(cm->next_ref_frame_map)); |
| 122 | |
| 123 | cm->current_video_frame = 0; |
| 124 | pbi->ready_for_new_data = 1; |
| 125 | pbi->common.buffer_pool = pool; |
| 126 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 127 | cm->bit_depth = AOM_BITS_8; |
| 128 | cm->dequant_bit_depth = AOM_BITS_8; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 129 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 130 | cm->alloc_mi = av1_dec_alloc_mi; |
| 131 | cm->free_mi = av1_dec_free_mi; |
| 132 | cm->setup_mi = av1_dec_setup_mi; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 133 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 134 | av1_loop_filter_init(cm); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 135 | |
| 136 | #if CONFIG_AOM_QM |
| 137 | aom_qm_init(cm); |
| 138 | #endif |
| 139 | #if CONFIG_LOOP_RESTORATION |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 140 | av1_loop_restoration_precal(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 141 | #endif // CONFIG_LOOP_RESTORATION |
Michael Bebenita | 6048d05 | 2016-08-25 14:40:54 -0700 | [diff] [blame] | 142 | #if CONFIG_ACCOUNTING |
Nathan E. Egge | eb64fc2 | 2016-10-05 19:33:48 -0400 | [diff] [blame] | 143 | pbi->acct_enabled = 1; |
Michael Bebenita | 6048d05 | 2016-08-25 14:40:54 -0700 | [diff] [blame] | 144 | aom_accounting_init(&pbi->accounting); |
| 145 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 146 | |
| 147 | cm->error.setjmp = 0; |
| 148 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 149 | aom_get_worker_interface()->init(&pbi->lf_worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 150 | |
| 151 | return pbi; |
| 152 | } |
| 153 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 154 | void av1_decoder_remove(AV1Decoder *pbi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 155 | int i; |
| 156 | |
| 157 | if (!pbi) return; |
| 158 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 159 | aom_get_worker_interface()->end(&pbi->lf_worker); |
| 160 | aom_free(pbi->lf_worker.data1); |
| 161 | aom_free(pbi->tile_data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 162 | for (i = 0; i < pbi->num_tile_workers; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 163 | AVxWorker *const worker = &pbi->tile_workers[i]; |
| 164 | aom_get_worker_interface()->end(worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 165 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 166 | aom_free(pbi->tile_worker_data); |
| 167 | aom_free(pbi->tile_worker_info); |
| 168 | aom_free(pbi->tile_workers); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 169 | |
| 170 | if (pbi->num_tile_workers > 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 171 | av1_loop_filter_dealloc(&pbi->lf_row_sync); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 172 | } |
| 173 | |
Michael Bebenita | 6048d05 | 2016-08-25 14:40:54 -0700 | [diff] [blame] | 174 | #if CONFIG_ACCOUNTING |
| 175 | aom_accounting_clear(&pbi->accounting); |
| 176 | #endif |
| 177 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 178 | aom_free(pbi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 179 | } |
| 180 | |
| 181 | static int equal_dimensions(const YV12_BUFFER_CONFIG *a, |
| 182 | const YV12_BUFFER_CONFIG *b) { |
| 183 | return a->y_height == b->y_height && a->y_width == b->y_width && |
| 184 | a->uv_height == b->uv_height && a->uv_width == b->uv_width; |
| 185 | } |
| 186 | |
Thomas Daede | 497d195 | 2017-08-08 17:33:06 -0700 | [diff] [blame] | 187 | aom_codec_err_t av1_copy_reference_dec(AV1Decoder *pbi, int idx, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 188 | YV12_BUFFER_CONFIG *sd) { |
| 189 | AV1_COMMON *cm = &pbi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 190 | |
Thomas Daede | 497d195 | 2017-08-08 17:33:06 -0700 | [diff] [blame] | 191 | const YV12_BUFFER_CONFIG *const cfg = get_ref_frame(cm, idx); |
| 192 | if (cfg == NULL) { |
| 193 | aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame"); |
| 194 | return AOM_CODEC_ERROR; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 195 | } |
Thomas Daede | 497d195 | 2017-08-08 17:33:06 -0700 | [diff] [blame] | 196 | if (!equal_dimensions(cfg, sd)) |
| 197 | aom_internal_error(&cm->error, AOM_CODEC_ERROR, |
| 198 | "Incorrect buffer dimensions"); |
| 199 | else |
| 200 | aom_yv12_copy_frame(cfg, sd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 201 | |
| 202 | return cm->error.error_code; |
| 203 | } |
| 204 | |
Thomas Daede | 497d195 | 2017-08-08 17:33:06 -0700 | [diff] [blame] | 205 | aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 206 | YV12_BUFFER_CONFIG *sd) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 207 | YV12_BUFFER_CONFIG *ref_buf = NULL; |
| 208 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 209 | // Get the destination reference buffer. |
Thomas Daede | 497d195 | 2017-08-08 17:33:06 -0700 | [diff] [blame] | 210 | ref_buf = get_ref_frame(cm, idx); |
| 211 | |
| 212 | if (ref_buf == NULL) { |
| 213 | aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame"); |
| 214 | return AOM_CODEC_ERROR; |
| 215 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 216 | |
| 217 | if (!equal_dimensions(ref_buf, sd)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 218 | aom_internal_error(&cm->error, AOM_CODEC_ERROR, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 219 | "Incorrect buffer dimensions"); |
| 220 | } else { |
| 221 | // Overwrite the reference frame buffer. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 222 | aom_yv12_copy_frame(sd, ref_buf); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 223 | } |
| 224 | |
| 225 | return cm->error.error_code; |
| 226 | } |
| 227 | |
| 228 | /* If any buffer updating is signaled it should be done here. */ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 229 | static void swap_frame_buffers(AV1Decoder *pbi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 230 | int ref_index = 0, mask; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 231 | AV1_COMMON *const cm = &pbi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 232 | BufferPool *const pool = cm->buffer_pool; |
| 233 | RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs; |
| 234 | |
| 235 | lock_buffer_pool(pool); |
| 236 | for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { |
| 237 | const int old_idx = cm->ref_frame_map[ref_index]; |
| 238 | // Current thread releases the holding of reference frame. |
| 239 | decrease_ref_count(old_idx, frame_bufs, pool); |
| 240 | |
| 241 | // Release the reference frame holding in the reference map for the decoding |
| 242 | // of the next frame. |
| 243 | if (mask & 1) decrease_ref_count(old_idx, frame_bufs, pool); |
| 244 | cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index]; |
| 245 | ++ref_index; |
| 246 | } |
| 247 | |
| 248 | // Current thread releases the holding of reference frame. |
| 249 | for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) { |
| 250 | const int old_idx = cm->ref_frame_map[ref_index]; |
| 251 | decrease_ref_count(old_idx, frame_bufs, pool); |
| 252 | cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index]; |
| 253 | } |
| 254 | |
| 255 | unlock_buffer_pool(pool); |
| 256 | pbi->hold_ref_buf = 0; |
| 257 | cm->frame_to_show = get_frame_new_buffer(cm); |
| 258 | |
| 259 | // TODO(zoeliu): To fix the ref frame buffer update for the scenario of |
| 260 | // cm->frame_parellel_decode == 1 |
| 261 | if (!cm->frame_parallel_decode || !cm->show_frame) { |
| 262 | lock_buffer_pool(pool); |
| 263 | --frame_bufs[cm->new_fb_idx].ref_count; |
| 264 | unlock_buffer_pool(pool); |
| 265 | } |
| 266 | |
| 267 | // Invalidate these references until the next frame starts. |
| 268 | for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) { |
| 269 | cm->frame_refs[ref_index].idx = INVALID_IDX; |
| 270 | cm->frame_refs[ref_index].buf = NULL; |
| 271 | } |
| 272 | } |
| 273 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 274 | int av1_receive_compressed_data(AV1Decoder *pbi, size_t size, |
| 275 | const uint8_t **psource) { |
| 276 | AV1_COMMON *volatile const cm = &pbi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 277 | BufferPool *volatile const pool = cm->buffer_pool; |
| 278 | RefCntBuffer *volatile const frame_bufs = cm->buffer_pool->frame_bufs; |
| 279 | const uint8_t *source = *psource; |
| 280 | int retcode = 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 281 | cm->error.error_code = AOM_CODEC_OK; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 282 | |
| 283 | if (size == 0) { |
| 284 | // This is used to signal that we are missing frames. |
| 285 | // We do not know if the missing frame(s) was supposed to update |
| 286 | // any of the reference buffers, but we act conservative and |
| 287 | // mark only the last buffer as corrupted. |
| 288 | // |
| 289 | // TODO(jkoleszar): Error concealment is undefined and non-normative |
| 290 | // at this point, but if it becomes so, [0] may not always be the correct |
| 291 | // thing to do here. |
| 292 | if (cm->frame_refs[0].idx > 0) { |
| 293 | assert(cm->frame_refs[0].buf != NULL); |
| 294 | cm->frame_refs[0].buf->corrupted = 1; |
| 295 | } |
| 296 | } |
| 297 | |
| 298 | pbi->ready_for_new_data = 0; |
| 299 | |
| 300 | // Find a free buffer for the new frame, releasing the reference previously |
| 301 | // held. |
| 302 | |
| 303 | // Check if the previous frame was a frame without any references to it. |
| 304 | // Release frame buffer if not decoding in frame parallel mode. |
| 305 | if (!cm->frame_parallel_decode && cm->new_fb_idx >= 0 && |
| 306 | frame_bufs[cm->new_fb_idx].ref_count == 0) |
| 307 | pool->release_fb_cb(pool->cb_priv, |
| 308 | &frame_bufs[cm->new_fb_idx].raw_frame_buffer); |
| 309 | |
| 310 | // Find a free frame buffer. Return error if can not find any. |
| 311 | cm->new_fb_idx = get_free_fb(cm); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 312 | if (cm->new_fb_idx == INVALID_IDX) return AOM_CODEC_MEM_ERROR; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 313 | |
| 314 | // Assign a MV array to the frame buffer. |
| 315 | cm->cur_frame = &pool->frame_bufs[cm->new_fb_idx]; |
| 316 | |
| 317 | pbi->hold_ref_buf = 0; |
| 318 | if (cm->frame_parallel_decode) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 319 | AVxWorker *const worker = pbi->frame_worker_owner; |
| 320 | av1_frameworker_lock_stats(worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 321 | frame_bufs[cm->new_fb_idx].frame_worker_owner = worker; |
| 322 | // Reset decoding progress. |
| 323 | pbi->cur_buf = &frame_bufs[cm->new_fb_idx]; |
| 324 | pbi->cur_buf->row = -1; |
| 325 | pbi->cur_buf->col = -1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 326 | av1_frameworker_unlock_stats(worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 327 | } else { |
| 328 | pbi->cur_buf = &frame_bufs[cm->new_fb_idx]; |
| 329 | } |
| 330 | |
| 331 | if (setjmp(cm->error.jmp)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 332 | const AVxWorkerInterface *const winterface = aom_get_worker_interface(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 333 | int i; |
| 334 | |
| 335 | cm->error.setjmp = 0; |
| 336 | pbi->ready_for_new_data = 1; |
| 337 | |
| 338 | // Synchronize all threads immediately as a subsequent decode call may |
| 339 | // cause a resize invalidating some allocations. |
| 340 | winterface->sync(&pbi->lf_worker); |
| 341 | for (i = 0; i < pbi->num_tile_workers; ++i) { |
| 342 | winterface->sync(&pbi->tile_workers[i]); |
| 343 | } |
| 344 | |
| 345 | lock_buffer_pool(pool); |
| 346 | // Release all the reference buffers if worker thread is holding them. |
| 347 | if (pbi->hold_ref_buf == 1) { |
| 348 | int ref_index = 0, mask; |
| 349 | for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { |
| 350 | const int old_idx = cm->ref_frame_map[ref_index]; |
| 351 | // Current thread releases the holding of reference frame. |
| 352 | decrease_ref_count(old_idx, frame_bufs, pool); |
| 353 | |
| 354 | // Release the reference frame holding in the reference map for the |
| 355 | // decoding of the next frame. |
| 356 | if (mask & 1) decrease_ref_count(old_idx, frame_bufs, pool); |
| 357 | ++ref_index; |
| 358 | } |
| 359 | |
| 360 | // Current thread releases the holding of reference frame. |
| 361 | for (; ref_index < REF_FRAMES && !cm->show_existing_frame; ++ref_index) { |
| 362 | const int old_idx = cm->ref_frame_map[ref_index]; |
| 363 | decrease_ref_count(old_idx, frame_bufs, pool); |
| 364 | } |
| 365 | pbi->hold_ref_buf = 0; |
| 366 | } |
| 367 | // Release current frame. |
| 368 | decrease_ref_count(cm->new_fb_idx, frame_bufs, pool); |
| 369 | unlock_buffer_pool(pool); |
| 370 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 371 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 372 | return -1; |
| 373 | } |
| 374 | |
| 375 | cm->error.setjmp = 1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 376 | av1_decode_frame(pbi, source, source + size, psource); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 377 | |
| 378 | swap_frame_buffers(pbi); |
| 379 | |
| 380 | #if CONFIG_EXT_TILE |
| 381 | // For now, we only extend the frame borders when the whole frame is decoded. |
| 382 | // Later, if needed, extend the border for the decoded tile on the frame |
| 383 | // border. |
| 384 | if (pbi->dec_tile_row == -1 && pbi->dec_tile_col == -1) |
| 385 | #endif // CONFIG_EXT_TILE |
Fergus Simpson | d2bcbb5 | 2017-05-22 23:15:05 -0700 | [diff] [blame] | 386 | // TODO(debargha): Fix encoder side mv range, so that we can use the |
| 387 | // inner border extension. As of now use the larger extension. |
| 388 | // aom_extend_frame_inner_borders(cm->frame_to_show); |
| 389 | aom_extend_frame_borders(cm->frame_to_show); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 390 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 391 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 392 | |
| 393 | if (!cm->show_existing_frame) { |
| 394 | cm->last_show_frame = cm->show_frame; |
| 395 | |
| 396 | #if CONFIG_EXT_REFS |
| 397 | // NOTE: It is not supposed to ref to any frame not used as reference |
| 398 | if (cm->is_reference_frame) |
| 399 | #endif // CONFIG_EXT_REFS |
| 400 | cm->prev_frame = cm->cur_frame; |
| 401 | |
| 402 | if (cm->seg.enabled && !cm->frame_parallel_decode) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 403 | av1_swap_current_and_last_seg_map(cm); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 404 | } |
| 405 | |
| 406 | // Update progress in frame parallel decode. |
| 407 | if (cm->frame_parallel_decode) { |
| 408 | // Need to lock the mutex here as another thread may |
| 409 | // be accessing this buffer. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 410 | AVxWorker *const worker = pbi->frame_worker_owner; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 411 | FrameWorkerData *const frame_worker_data = worker->data1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 412 | av1_frameworker_lock_stats(worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 413 | |
| 414 | if (cm->show_frame) { |
| 415 | cm->current_video_frame++; |
| 416 | } |
| 417 | frame_worker_data->frame_decoded = 1; |
| 418 | frame_worker_data->frame_context_ready = 1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 419 | av1_frameworker_signal_stats(worker); |
| 420 | av1_frameworker_unlock_stats(worker); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 421 | } else { |
| 422 | cm->last_width = cm->width; |
| 423 | cm->last_height = cm->height; |
Yi Luo | 10e2300 | 2017-07-31 11:54:43 -0700 | [diff] [blame] | 424 | cm->last_tile_cols = cm->tile_cols; |
| 425 | cm->last_tile_rows = cm->tile_rows; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 426 | if (cm->show_frame) { |
| 427 | cm->current_video_frame++; |
| 428 | } |
| 429 | } |
| 430 | |
| 431 | cm->error.setjmp = 0; |
| 432 | return retcode; |
| 433 | } |
| 434 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 435 | int av1_get_raw_frame(AV1Decoder *pbi, YV12_BUFFER_CONFIG *sd) { |
| 436 | AV1_COMMON *const cm = &pbi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 437 | int ret = -1; |
| 438 | if (pbi->ready_for_new_data == 1) return ret; |
| 439 | |
| 440 | pbi->ready_for_new_data = 1; |
| 441 | |
| 442 | /* no raw frame to show!!! */ |
| 443 | if (!cm->show_frame) return ret; |
| 444 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 445 | *sd = *cm->frame_to_show; |
| 446 | ret = 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 447 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 448 | return ret; |
| 449 | } |
| 450 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 451 | int av1_get_frame_to_show(AV1Decoder *pbi, YV12_BUFFER_CONFIG *frame) { |
| 452 | AV1_COMMON *const cm = &pbi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 453 | |
| 454 | if (!cm->show_frame || !cm->frame_to_show) return -1; |
| 455 | |
| 456 | *frame = *cm->frame_to_show; |
| 457 | return 0; |
| 458 | } |
| 459 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 460 | aom_codec_err_t av1_parse_superframe_index(const uint8_t *data, size_t data_sz, |
| 461 | uint32_t sizes[8], int *count, |
Sebastien Alaiwan | e4c6fc1 | 2017-06-21 16:43:22 +0200 | [diff] [blame] | 462 | int *index_size, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 463 | aom_decrypt_cb decrypt_cb, |
| 464 | void *decrypt_state) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 465 | // A chunk ending with a byte matching 0xc0 is an invalid chunk unless |
| 466 | // it is a super frame index. If the last byte of real video compression |
| 467 | // data is 0xc0 the encoder must add a 0 byte. If we have the marker but |
| 468 | // not the associated matching marker byte at the front of the index we have |
| 469 | // an invalid bitstream and need to return an error. |
| 470 | |
| 471 | uint8_t marker; |
| 472 | size_t frame_sz_sum = 0; |
| 473 | |
| 474 | assert(data_sz); |
Sebastien Alaiwan | e4c6fc1 | 2017-06-21 16:43:22 +0200 | [diff] [blame] | 475 | marker = read_marker(decrypt_cb, decrypt_state, data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 476 | *count = 0; |
| 477 | |
| 478 | if ((marker & 0xe0) == 0xc0) { |
| 479 | const uint32_t frames = (marker & 0x7) + 1; |
| 480 | const uint32_t mag = ((marker >> 3) & 0x3) + 1; |
| 481 | const size_t index_sz = 2 + mag * (frames - 1); |
Sebastien Alaiwan | e4c6fc1 | 2017-06-21 16:43:22 +0200 | [diff] [blame] | 482 | *index_size = (int)index_sz; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 483 | |
| 484 | // This chunk is marked as having a superframe index but doesn't have |
| 485 | // enough data for it, thus it's an invalid superframe index. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 486 | if (data_sz < index_sz) return AOM_CODEC_CORRUPT_FRAME; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 487 | |
| 488 | { |
| 489 | const uint8_t marker2 = |
Sebastien Alaiwan | e4c6fc1 | 2017-06-21 16:43:22 +0200 | [diff] [blame] | 490 | read_marker(decrypt_cb, decrypt_state, data + index_sz - 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 491 | |
| 492 | // This chunk is marked as having a superframe index but doesn't have |
| 493 | // the matching marker byte at the front of the index therefore it's an |
| 494 | // invalid chunk. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 495 | if (marker != marker2) return AOM_CODEC_CORRUPT_FRAME; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 496 | } |
| 497 | |
| 498 | { |
| 499 | // Found a valid superframe index. |
| 500 | uint32_t i, j; |
Sebastien Alaiwan | e4c6fc1 | 2017-06-21 16:43:22 +0200 | [diff] [blame] | 501 | const uint8_t *x = &data[1]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 502 | |
| 503 | // Frames has a maximum of 8 and mag has a maximum of 4. |
Adrian Grange | a414887a | 2016-11-22 15:47:52 -0800 | [diff] [blame] | 504 | uint8_t clear_buffer[28]; |
| 505 | assert(sizeof(clear_buffer) >= (frames - 1) * mag); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 506 | if (decrypt_cb) { |
Adrian Grange | a414887a | 2016-11-22 15:47:52 -0800 | [diff] [blame] | 507 | decrypt_cb(decrypt_state, x, clear_buffer, (frames - 1) * mag); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 508 | x = clear_buffer; |
| 509 | } |
| 510 | |
| 511 | for (i = 0; i < frames - 1; ++i) { |
| 512 | uint32_t this_sz = 0; |
| 513 | |
| 514 | for (j = 0; j < mag; ++j) this_sz |= (*x++) << (j * 8); |
| 515 | this_sz += 1; |
| 516 | sizes[i] = this_sz; |
| 517 | frame_sz_sum += this_sz; |
| 518 | } |
| 519 | sizes[i] = (uint32_t)(data_sz - index_sz - frame_sz_sum); |
| 520 | *count = frames; |
| 521 | } |
| 522 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 523 | return AOM_CODEC_OK; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 524 | } |