blob: b4a56f5513fd4204bfd9727203e5e22e6e7c5a3c [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070022#include "aom_dsp/bitreader.h"
23#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070024#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070025#include "aom_ports/mem.h"
26#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070027#include "aom_scale/aom_scale.h"
28#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029
30#include "av1/common/alloccommon.h"
31#if CONFIG_CLPF
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +020032#include "aom/aom_image.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033#include "av1/common/clpf.h"
34#endif
35#include "av1/common/common.h"
36#if CONFIG_DERING
37#include "av1/common/dering.h"
38#endif // CONFIG_DERING
39#include "av1/common/entropy.h"
40#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010041#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/idct.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/pred_common.h"
44#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070045#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070046#include "av1/common/reconintra.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070048#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070049#include "av1/common/tile_common.h"
50
51#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070052#include "av1/decoder/decodemv.h"
53#include "av1/decoder/decoder.h"
Jingning Han1aab8182016-06-03 11:09:06 -070054#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/decoder/dsubexp.h"
56
Yaowu Xuf883b422016-08-30 14:01:10 -070057#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070058#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070059
Yushin Cho77bba8d2016-11-04 16:36:56 -070060#if CONFIG_PVQ
61#include "av1/decoder/pvq_decoder.h"
62#include "av1/encoder/encodemb.h"
63
64#include "aom_dsp/entdec.h"
65#include "av1/common/partition.h"
66#include "av1/decoder/decint.h"
67#include "av1/encoder/hybrid_fwd_txfm.h"
68#endif
69
Thomas Davies80188d12016-10-26 16:08:35 -070070static struct aom_read_bit_buffer *init_read_bit_buffer(
71 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
72 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
73static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
74 size_t partition_size);
75static size_t read_uncompressed_header(AV1Decoder *pbi,
76 struct aom_read_bit_buffer *rb);
77
Yaowu Xuf883b422016-08-30 14:01:10 -070078static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070079 int i;
80 if (frame_is_intra_only(cm)) return 0;
81 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
82 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
83
84 return 0;
85}
86
Yaowu Xuf883b422016-08-30 14:01:10 -070087static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070088#if CONFIG_EXT_REFS
89 cm->comp_fwd_ref[0] = LAST_FRAME;
90 cm->comp_fwd_ref[1] = LAST2_FRAME;
91 cm->comp_fwd_ref[2] = LAST3_FRAME;
92 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
93
94 cm->comp_bwd_ref[0] = BWDREF_FRAME;
95 cm->comp_bwd_ref[1] = ALTREF_FRAME;
96#else
97 if (cm->ref_frame_sign_bias[LAST_FRAME] ==
98 cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
99 cm->comp_fixed_ref = ALTREF_FRAME;
100 cm->comp_var_ref[0] = LAST_FRAME;
101 cm->comp_var_ref[1] = GOLDEN_FRAME;
102 } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
103 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
104 cm->comp_fixed_ref = GOLDEN_FRAME;
105 cm->comp_var_ref[0] = LAST_FRAME;
106 cm->comp_var_ref[1] = ALTREF_FRAME;
107 } else {
108 cm->comp_fixed_ref = LAST_FRAME;
109 cm->comp_var_ref[0] = GOLDEN_FRAME;
110 cm->comp_var_ref[1] = ALTREF_FRAME;
111 }
112#endif // CONFIG_EXT_REFS
113}
114
115static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
116 return len != 0 && len <= (size_t)(end - start);
117}
118
Yaowu Xuf883b422016-08-30 14:01:10 -0700119static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
120 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121 return data > max ? max : data;
122}
123
Yaowu Xuf883b422016-08-30 14:01:10 -0700124static TX_MODE read_tx_mode(struct aom_read_bit_buffer *rb) {
125 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700126}
127
Yaowu Xuefc75352016-10-31 09:46:42 -0700128static void read_tx_size_probs(FRAME_CONTEXT *fc, aom_reader *r) {
129 int i, j, k;
130 for (i = 0; i < MAX_TX_DEPTH; ++i)
131 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
132 for (k = 0; k < i + 1; ++k)
133 av1_diff_update_prob(r, &fc->tx_size_probs[i][j][k], ACCT_STR);
134}
135
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400136#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700137static void read_switchable_interp_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138 int i, j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400139 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700140 for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700141 av1_diff_update_prob(r, &fc->switchable_interp_prob[j][i], ACCT_STR);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400142 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700143}
Thomas Davies6519beb2016-10-19 14:46:07 +0100144#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145
Yaowu Xuf883b422016-08-30 14:01:10 -0700146static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147#if CONFIG_REF_MV
Yaowu Xu8af861b2016-11-01 12:12:11 -0700148 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700150 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700152 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700154 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700156 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157#if CONFIG_EXT_INTER
Michael Bebenita6048d052016-08-25 14:40:54 -0700158 av1_diff_update_prob(r, &fc->new2mv_prob, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700159#endif // CONFIG_EXT_INTER
160#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400161#if !CONFIG_EC_ADAPT
Yaowu Xu8af861b2016-11-01 12:12:11 -0700162 int i, j;
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400163 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700164 for (j = 0; j < INTER_MODES - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700165 av1_diff_update_prob(r, &fc->inter_mode_probs[i][j], ACCT_STR);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400166 }
Yaowu Xu8af861b2016-11-01 12:12:11 -0700167#else
168 (void)fc;
169 (void)r;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170#endif
Thomas Davies6519beb2016-10-19 14:46:07 +0100171#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172}
173
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400174#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700176static void read_inter_compound_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -0700178 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700179 for (j = 0; j < INTER_MODE_CONTEXTS; ++j) {
180 for (i = 0; i < INTER_COMPOUND_MODES - 1; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700181 av1_diff_update_prob(r, &fc->inter_compound_mode_probs[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182 }
183 }
184 }
185}
186#endif // CONFIG_EXT_INTER
Thomas9ac55082016-09-23 18:04:17 +0100187#if !CONFIG_EXT_TX
188static void read_ext_tx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
189 int i, j, k;
190 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
191 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
192 for (j = 0; j < TX_TYPES; ++j) {
193 for (k = 0; k < TX_TYPES - 1; ++k)
194 av1_diff_update_prob(r, &fc->intra_ext_tx_prob[i][j][k], ACCT_STR);
Thomas9ac55082016-09-23 18:04:17 +0100195 }
196 }
197 }
198 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
199 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
200 for (k = 0; k < TX_TYPES - 1; ++k)
201 av1_diff_update_prob(r, &fc->inter_ext_tx_prob[i][k], ACCT_STR);
Thomas9ac55082016-09-23 18:04:17 +0100202 }
203 }
204}
205#endif
206#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207
208static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700209 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 if (is_compound_reference_allowed(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700211 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700213 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 } else {
215 return SINGLE_REFERENCE;
216 }
217}
218
Yaowu Xuf883b422016-08-30 14:01:10 -0700219static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220 FRAME_CONTEXT *const fc = cm->fc;
221 int i, j;
222
223 if (cm->reference_mode == REFERENCE_MODE_SELECT)
224 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700225 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700226
227 if (cm->reference_mode != COMPOUND_REFERENCE) {
228 for (i = 0; i < REF_CONTEXTS; ++i) {
229 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700230 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231 }
232 }
233 }
234
235 if (cm->reference_mode != SINGLE_REFERENCE) {
236 for (i = 0; i < REF_CONTEXTS; ++i) {
237#if CONFIG_EXT_REFS
238 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700239 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700240 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700241 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700242#else
243 for (j = 0; j < (COMP_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700244 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700245#endif // CONFIG_EXT_REFS
246 }
247 }
248}
249
Yaowu Xuf883b422016-08-30 14:01:10 -0700250static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700251 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700252 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253}
254
Yaowu Xuf883b422016-08-30 14:01:10 -0700255static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100256 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257
Yaowu Xue86288d2016-10-31 15:56:38 -0700258#if !CONFIG_EC_ADAPT
Thomas9ac55082016-09-23 18:04:17 +0100259 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260 update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
261
Yaowu Xuc27fc142016-08-22 16:08:15 -0700262 for (i = 0; i < 2; ++i) {
263 nmv_component *const comp_ctx = &ctx->comps[i];
264 update_mv_probs(&comp_ctx->sign, 1, r);
265 update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
266 update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
267 update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
268 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700269 for (i = 0; i < 2; ++i) {
270 nmv_component *const comp_ctx = &ctx->comps[i];
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400271 for (j = 0; j < CLASS0_SIZE; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272 update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400273 }
Nathan E. Eggeac499f32016-09-08 15:38:57 -0400274 update_mv_probs(comp_ctx->fp, MV_FP_SIZE - 1, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275 }
Alex Converseaca9feb2016-10-10 11:08:10 -0700276#endif // !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277
278 if (allow_hp) {
279 for (i = 0; i < 2; ++i) {
280 nmv_component *const comp_ctx = &ctx->comps[i];
281 update_mv_probs(&comp_ctx->class0_hp, 1, r);
282 update_mv_probs(&comp_ctx->hp, 1, r);
283 }
284 }
285}
286
287static void inverse_transform_block(MACROBLOCKD *xd, int plane,
288 const TX_TYPE tx_type,
289 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700290 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700291 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700292 tran_low_t *const dqcoeff = pd->dqcoeff;
293 INV_TXFM_PARAM inv_txfm_param;
294 inv_txfm_param.tx_type = tx_type;
295 inv_txfm_param.tx_size = tx_size;
296 inv_txfm_param.eob = eob;
297 inv_txfm_param.lossless = xd->lossless[xd->mi[0]->mbmi.segment_id];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700298
Yaowu Xuf883b422016-08-30 14:01:10 -0700299#if CONFIG_AOM_HIGHBITDEPTH
Jingning Han1be18782016-10-21 11:48:15 -0700300 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
301 inv_txfm_param.bd = xd->bd;
302 highbd_inv_txfm_add(dqcoeff, dst, stride, &inv_txfm_param);
303 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700304#endif // CONFIG_AOM_HIGHBITDEPTH
Jingning Han1be18782016-10-21 11:48:15 -0700305 inv_txfm_add(dqcoeff, dst, stride, &inv_txfm_param);
Yaowu Xuf883b422016-08-30 14:01:10 -0700306#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700307 }
Jingning Han1be18782016-10-21 11:48:15 -0700308#endif // CONFIG_AOM_HIGHBITDEPTH
309 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700310}
311
Yushin Cho77bba8d2016-11-04 16:36:56 -0700312#if CONFIG_PVQ
313static int av1_pvq_decode_helper(od_dec_ctx *dec, int16_t *ref_coeff,
314 int16_t *dqcoeff, int16_t *quant, int pli,
315 int bs, TX_TYPE tx_type, int xdec,
316 int ac_dc_coded) {
317 unsigned int flags; // used for daala's stream analyzer.
318 int off;
319 const int is_keyframe = 0;
320 const int has_dc_skip = 1;
321 int quant_shift = bs == TX_32X32 ? 1 : 0;
322 // DC quantizer for PVQ
323 int pvq_dc_quant;
324 int lossless = (quant[0] == 0);
325 const int blk_size = tx_size_wide[bs];
326 int eob = 0;
327 int i;
328 // TODO(yushin) : To enable activity masking,
329 // int use_activity_masking = dec->use_activity_masking;
330 int use_activity_masking = 0;
331
Yushin Cho48f84db2016-11-07 21:20:17 -0800332 DECLARE_ALIGNED(16, int16_t, dqcoeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
333 DECLARE_ALIGNED(16, int16_t, ref_coeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700334
Yushin Cho48f84db2016-11-07 21:20:17 -0800335 od_coeff ref_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
336 od_coeff out_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700337
338 od_raster_to_coding_order(ref_coeff_pvq, blk_size, tx_type, ref_coeff,
339 blk_size);
340
341 if (lossless)
342 pvq_dc_quant = 1;
343 else {
344 // TODO(yushin): Enable this for activity masking,
345 // when pvq_qm_q4 is available in AOM.
346 // pvq_dc_quant = OD_MAXI(1, quant*
347 // dec->state.pvq_qm_q4[pli][od_qm_get_index(bs, 0)] >> 4);
348 pvq_dc_quant = OD_MAXI(1, quant[0] >> quant_shift);
349 }
350
351 off = od_qm_offset(bs, xdec);
352
353 // copy int16 inputs to int32
354 for (i = 0; i < blk_size * blk_size; i++) ref_int32[i] = ref_coeff_pvq[i];
355
356 od_pvq_decode(dec, ref_int32, out_int32, (int)quant[1] >> quant_shift, pli,
357 bs, OD_PVQ_BETA[use_activity_masking][pli][bs],
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800358 OD_ROBUST_STREAM, is_keyframe, &flags, ac_dc_coded,
359 dec->state.qm + off, dec->state.qm_inv + off);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700360
361 // copy int32 result back to int16
362 for (i = 0; i < blk_size * blk_size; i++) dqcoeff_pvq[i] = out_int32[i];
363
364 if (!has_dc_skip || dqcoeff_pvq[0]) {
365 dqcoeff_pvq[0] =
366 has_dc_skip + generic_decode(dec->ec, &dec->state.adapt.model_dc[pli],
367 -1, &dec->state.adapt.ex_dc[pli][bs][0], 2,
368 "dc:mag");
369 if (dqcoeff_pvq[0])
370 dqcoeff_pvq[0] *= od_ec_dec_bits(dec->ec, 1, "dc:sign") ? -1 : 1;
371 }
372 dqcoeff_pvq[0] = dqcoeff_pvq[0] * pvq_dc_quant + ref_coeff_pvq[0];
373
374 od_coding_order_to_raster(dqcoeff, blk_size, tx_type, dqcoeff_pvq, blk_size);
375
376 eob = blk_size * blk_size;
377
378 return eob;
379}
380
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800381static int av1_pvq_decode_helper2(MACROBLOCKD *const xd,
382 MB_MODE_INFO *const mbmi, int plane, int row,
383 int col, TX_SIZE tx_size, TX_TYPE tx_type) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700384 struct macroblockd_plane *const pd = &xd->plane[plane];
385 // transform block size in pixels
386 int tx_blk_size = tx_size_wide[tx_size];
387 int i, j;
388 tran_low_t *pvq_ref_coeff = pd->pvq_ref_coeff;
389 const int diff_stride = tx_blk_size;
390 int16_t *pred = pd->pred;
391 tran_low_t *const dqcoeff = pd->dqcoeff;
392 int ac_dc_coded; // bit0: DC coded, bit1 : AC coded
393 uint8_t *dst;
394 int eob;
395
396 eob = 0;
397 dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col];
398
399 // decode ac/dc coded flag. bit0: DC coded, bit1 : AC coded
400 // NOTE : we don't use 5 symbols for luma here in aom codebase,
401 // since block partition is taken care of by aom.
402 // So, only AC/DC skip info is coded
403 ac_dc_coded = od_decode_cdf_adapt(
404 xd->daala_dec.ec,
405 xd->daala_dec.state.adapt.skip_cdf[2 * tx_size + (plane != 0)], 4,
406 xd->daala_dec.state.adapt.skip_increment, "skip");
407
408 if (ac_dc_coded) {
409 int xdec = pd->subsampling_x;
410 int seg_id = mbmi->segment_id;
411 int16_t *quant;
412 FWD_TXFM_PARAM fwd_txfm_param;
Yaowu Xufc1b2132016-11-07 15:16:15 -0800413 // ToDo(yaowu): correct this with optimal number from decoding process.
414 const int max_scan_line = tx_size_2d[tx_size];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700415
416 for (j = 0; j < tx_blk_size; j++)
417 for (i = 0; i < tx_blk_size; i++) {
418 pred[diff_stride * j + i] = dst[pd->dst.stride * j + i];
419 }
420
421 fwd_txfm_param.tx_type = tx_type;
422 fwd_txfm_param.tx_size = tx_size;
423 fwd_txfm_param.fwd_txfm_opt = FWD_TXFM_OPT_NORMAL;
424 fwd_txfm_param.rd_transform = 0;
425 fwd_txfm_param.lossless = xd->lossless[seg_id];
426
427 fwd_txfm(pred, pvq_ref_coeff, diff_stride, &fwd_txfm_param);
428
429 quant = &pd->seg_dequant[seg_id][0]; // aom's quantizer
430
431 eob = av1_pvq_decode_helper(&xd->daala_dec, pvq_ref_coeff, dqcoeff, quant,
432 plane, tx_size, tx_type, xdec, ac_dc_coded);
433
434 // Since av1 does not have separate inverse transform
435 // but also contains adding to predicted image,
436 // pass blank dummy image to av1_inv_txfm_add_*x*(), i.e. set dst as zeros
437 for (j = 0; j < tx_blk_size; j++)
438 for (i = 0; i < tx_blk_size; i++) dst[j * pd->dst.stride + i] = 0;
439
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800440 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
441 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700442 }
443
444 return eob;
445}
446#endif
447
Angie Chiangff6d8902016-10-21 11:02:09 -0700448static void predict_and_reconstruct_intra_block(AV1_COMMON *cm,
449 MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700450#if CONFIG_ANS
451 struct AnsDecoder *const r,
452#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700453 aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454#endif // CONFIG_ANS
455 MB_MODE_INFO *const mbmi,
456 int plane, int row, int col,
457 TX_SIZE tx_size) {
458 struct macroblockd_plane *const pd = &xd->plane[plane];
459 PREDICTION_MODE mode = (plane == 0) ? mbmi->mode : mbmi->uv_mode;
460 PLANE_TYPE plane_type = (plane == 0) ? PLANE_TYPE_Y : PLANE_TYPE_UV;
461 uint8_t *dst;
462 int block_idx = (row << 1) + col;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700463#if CONFIG_PVQ
464 (void)cm;
465 (void)r;
466#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col];
468
469 if (mbmi->sb_type < BLOCK_8X8)
470 if (plane == 0) mode = xd->mi[0]->bmi[(row << 1) + col].as_mode;
471
Jingning Hanc4c99da2016-10-24 10:27:28 -0700472 av1_predict_intra_block(xd, pd->width, pd->height, tx_size, mode, dst,
Yaowu Xuf883b422016-08-30 14:01:10 -0700473 pd->dst.stride, dst, pd->dst.stride, col, row, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700474
475 if (!mbmi->skip) {
476 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700477#if !CONFIG_PVQ
Angie Chiangff6d8902016-10-21 11:02:09 -0700478 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, 0);
Jingning Han1be18782016-10-21 11:48:15 -0700479 int16_t max_scan_line = 0;
480 const int eob =
481 av1_decode_block_tokens(xd, plane, scan_order, col, row, tx_size,
482 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chianged8cd9a2016-10-21 16:44:47 -0700483#if CONFIG_ADAPT_SCAN
484 av1_update_scan_count_facade(cm, tx_size, tx_type, pd->dqcoeff, eob);
485#endif
Jingning Han1be18782016-10-21 11:48:15 -0700486 if (eob)
487 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
488 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700489#else
490 av1_pvq_decode_helper2(xd, mbmi, plane, row, col, tx_size, tx_type);
491#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492 }
493}
494
495#if CONFIG_VAR_TX
Angie Chiangff6d8902016-10-21 11:02:09 -0700496static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
497 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700498 int plane, BLOCK_SIZE plane_bsize,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 int blk_row, int blk_col, TX_SIZE tx_size,
500 int *eob_total) {
501 const struct macroblockd_plane *const pd = &xd->plane[plane];
502 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
503 const int tx_row = blk_row >> (1 - pd->subsampling_y);
504 const int tx_col = blk_col >> (1 - pd->subsampling_x);
505 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700506 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700507 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700508 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700509 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
510 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700511
512 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
513
514 if (tx_size == plane_tx_size) {
515 PLANE_TYPE plane_type = (plane == 0) ? PLANE_TYPE_Y : PLANE_TYPE_UV;
Jingning Han8fd62b72016-10-21 12:55:54 -0700516 int block_idx = (blk_row << 1) + blk_col;
517 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, plane_tx_size);
Angie Chiangff6d8902016-10-21 11:02:09 -0700518 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, 1);
Jingning Han1be18782016-10-21 11:48:15 -0700519 int16_t max_scan_line = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520 const int eob =
Yaowu Xuf883b422016-08-30 14:01:10 -0700521 av1_decode_block_tokens(xd, plane, sc, blk_col, blk_row, plane_tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700522 tx_type, &max_scan_line, r, mbmi->segment_id);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700523 inverse_transform_block(
524 xd, plane, tx_type, plane_tx_size,
525 &pd->dst.buf[4 * blk_row * pd->dst.stride + 4 * blk_col],
Jingning Han1be18782016-10-21 11:48:15 -0700526 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 *eob_total += eob;
528 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700529 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
530 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700531 int i;
532
533 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700534
535 for (i = 0; i < 4; ++i) {
Jingning Han5f614262016-10-27 14:27:43 -0700536 const int offsetr = blk_row + (i >> 1) * bsl;
537 const int offsetc = blk_col + (i & 0x01) * bsl;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700538
539 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
540
Jingning Han8fd62b72016-10-21 12:55:54 -0700541 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hanf64062f2016-11-02 16:22:18 -0700542 offsetc, sub_txs, eob_total);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700543 }
544 }
545}
546#endif // CONFIG_VAR_TX
547
548#if !CONFIG_VAR_TX || CONFIG_SUPERTX || (CONFIG_EXT_TX && CONFIG_RECT_TX)
Angie Chiangff6d8902016-10-21 11:02:09 -0700549static int reconstruct_inter_block(AV1_COMMON *cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700550#if CONFIG_ANS
551 struct AnsDecoder *const r,
552#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700553 aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700554#endif
555 int segment_id, int plane, int row, int col,
556 TX_SIZE tx_size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700557 PLANE_TYPE plane_type = (plane == 0) ? PLANE_TYPE_Y : PLANE_TYPE_UV;
558 int block_idx = (row << 1) + col;
559 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700560#if CONFIG_PVQ
561 int eob;
562 (void)cm;
563 (void)r;
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800564 (void)segment_id;
565#else
566 struct macroblockd_plane *const pd = &xd->plane[plane];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700567#endif
568
569#if !CONFIG_PVQ
Angie Chiangff6d8902016-10-21 11:02:09 -0700570 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, 1);
Jingning Han1be18782016-10-21 11:48:15 -0700571 int16_t max_scan_line = 0;
572 const int eob =
573 av1_decode_block_tokens(xd, plane, scan_order, col, row, tx_size, tx_type,
574 &max_scan_line, r, segment_id);
Angie Chianged8cd9a2016-10-21 16:44:47 -0700575#if CONFIG_ADAPT_SCAN
576 av1_update_scan_count_facade(cm, tx_size, tx_type, pd->dqcoeff, eob);
577#endif
Jingning Han1be18782016-10-21 11:48:15 -0700578 if (eob)
579 inverse_transform_block(xd, plane, tx_type, tx_size,
580 &pd->dst.buf[4 * row * pd->dst.stride + 4 * col],
581 pd->dst.stride, max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700582#else
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800583 eob = av1_pvq_decode_helper2(xd, &xd->mi[0]->mbmi, plane, row, col, tx_size,
584 tx_type);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700585#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700586 return eob;
587}
588#endif // !CONFIG_VAR_TX || CONFIG_SUPER_TX
589
Yaowu Xuc27fc142016-08-22 16:08:15 -0700590static INLINE void dec_reset_skip_context(MACROBLOCKD *xd) {
591 int i;
592 for (i = 0; i < MAX_MB_PLANE; i++) {
593 struct macroblockd_plane *const pd = &xd->plane[i];
594 memset(pd->above_context, 0, sizeof(ENTROPY_CONTEXT) * pd->n4_w);
595 memset(pd->left_context, 0, sizeof(ENTROPY_CONTEXT) * pd->n4_h);
596 }
597}
598
Yaowu Xuf883b422016-08-30 14:01:10 -0700599static MB_MODE_INFO *set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600 BLOCK_SIZE bsize, int mi_row, int mi_col,
601 int bw, int bh, int x_mis, int y_mis, int bwl,
602 int bhl) {
603 const int offset = mi_row * cm->mi_stride + mi_col;
604 int x, y;
605 const TileInfo *const tile = &xd->tile;
606
607 xd->mi = cm->mi_grid_visible + offset;
608 xd->mi[0] = &cm->mi[offset];
609 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
610 // passing bsize from decode_partition().
611 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700612#if CONFIG_RD_DEBUG
613 xd->mi[0]->mbmi.mi_row = mi_row;
614 xd->mi[0]->mbmi.mi_col = mi_col;
615#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700616 for (y = 0; y < y_mis; ++y)
Jingning Han97d85482016-07-15 11:06:05 -0700617 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700618
619 set_plane_n4(xd, bw, bh, bwl, bhl);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620 set_skip_context(xd, mi_row, mi_col);
621
622#if CONFIG_VAR_TX
623 xd->max_tx_size = max_txsize_lookup[bsize];
624#endif
625
626 // Distance of Mb to the various image edges. These are specified to 8th pel
627 // as they are always compared to values that are in 1/8th pel units
628 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
629
Yaowu Xuf883b422016-08-30 14:01:10 -0700630 av1_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700631 return &xd->mi[0]->mbmi;
632}
633
634#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700635static MB_MODE_INFO *set_offsets_extend(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700636 MACROBLOCKD *const xd,
637 const TileInfo *const tile,
638 BLOCK_SIZE bsize_pred, int mi_row_pred,
639 int mi_col_pred, int mi_row_ori,
640 int mi_col_ori) {
641 // Used in supertx
642 // (mi_row_ori, mi_col_ori): location for mv
643 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
644 const int bw = num_8x8_blocks_wide_lookup[bsize_pred];
645 const int bh = num_8x8_blocks_high_lookup[bsize_pred];
646 const int offset = mi_row_ori * cm->mi_stride + mi_col_ori;
647 const int bwl = b_width_log2_lookup[bsize_pred];
648 const int bhl = b_height_log2_lookup[bsize_pred];
649 xd->mi = cm->mi_grid_visible + offset;
650 xd->mi[0] = cm->mi + offset;
651 set_mi_row_col(xd, tile, mi_row_pred, bh, mi_col_pred, bw, cm->mi_rows,
652 cm->mi_cols);
653
654 xd->up_available = (mi_row_ori > tile->mi_row_start);
655 xd->left_available = (mi_col_ori > tile->mi_col_start);
656
657 set_plane_n4(xd, bw, bh, bwl, bhl);
658
659 return &xd->mi[0]->mbmi;
660}
661
Yaowu Xuf883b422016-08-30 14:01:10 -0700662static MB_MODE_INFO *set_mb_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
663 BLOCK_SIZE bsize, int mi_row, int mi_col,
664 int bw, int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700665 const int offset = mi_row * cm->mi_stride + mi_col;
666 const TileInfo *const tile = &xd->tile;
667 int x, y;
668
669 xd->mi = cm->mi_grid_visible + offset;
670 xd->mi[0] = cm->mi + offset;
671 xd->mi[0]->mbmi.sb_type = bsize;
672 for (y = 0; y < y_mis; ++y)
673 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
674
675 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
676 return &xd->mi[0]->mbmi;
677}
678
Yaowu Xuf883b422016-08-30 14:01:10 -0700679static void set_offsets_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700680 const TileInfo *const tile, BLOCK_SIZE bsize,
681 int mi_row, int mi_col) {
682 const int bw = num_8x8_blocks_wide_lookup[bsize];
683 const int bh = num_8x8_blocks_high_lookup[bsize];
684 const int offset = mi_row * cm->mi_stride + mi_col;
685 const int bwl = b_width_log2_lookup[bsize];
686 const int bhl = b_height_log2_lookup[bsize];
687
688 xd->mi = cm->mi_grid_visible + offset;
689 xd->mi[0] = cm->mi + offset;
690
691 set_plane_n4(xd, bw, bh, bwl, bhl);
692
693 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
694
Yaowu Xuf883b422016-08-30 14:01:10 -0700695 av1_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696}
697
Yaowu Xuf883b422016-08-30 14:01:10 -0700698static void set_param_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700699 BLOCK_SIZE bsize, int mi_row, int mi_col,
700 int txfm, int skip) {
701 const int bw = num_8x8_blocks_wide_lookup[bsize];
702 const int bh = num_8x8_blocks_high_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700703 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
704 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700705 const int offset = mi_row * cm->mi_stride + mi_col;
706 int x, y;
707
708 xd->mi = cm->mi_grid_visible + offset;
709 xd->mi[0] = cm->mi + offset;
710
711 for (y = 0; y < y_mis; ++y)
712 for (x = 0; x < x_mis; ++x) {
713 xd->mi[y * cm->mi_stride + x]->mbmi.skip = skip;
714 xd->mi[y * cm->mi_stride + x]->mbmi.tx_type = txfm;
715 }
716#if CONFIG_VAR_TX
717 xd->above_txfm_context = cm->above_txfm_context + mi_col;
718 xd->left_txfm_context =
719 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
720 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bw, bh, xd);
721#endif
722}
723
Yaowu Xuf883b422016-08-30 14:01:10 -0700724static void set_ref(AV1_COMMON *const cm, MACROBLOCKD *const xd, int idx,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700725 int mi_row, int mi_col) {
726 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
727 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
728 xd->block_refs[idx] = ref_buffer;
Yaowu Xuf883b422016-08-30 14:01:10 -0700729 if (!av1_is_valid_scale(&ref_buffer->sf))
730 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700731 "Invalid scale factors");
Yaowu Xuf883b422016-08-30 14:01:10 -0700732 av1_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col,
733 &ref_buffer->sf);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700734 xd->corrupted |= ref_buffer->buf->corrupted;
735}
736
737static void dec_predict_b_extend(
Yaowu Xuf883b422016-08-30 14:01:10 -0700738 AV1Decoder *const pbi, MACROBLOCKD *const xd, const TileInfo *const tile,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700739 int block, int mi_row_ori, int mi_col_ori, int mi_row_pred, int mi_col_pred,
740 int mi_row_top, int mi_col_top, uint8_t *dst_buf[3], int dst_stride[3],
741 BLOCK_SIZE bsize_top, BLOCK_SIZE bsize_pred, int b_sub8x8, int bextend) {
742 // Used in supertx
743 // (mi_row_ori, mi_col_ori): location for mv
744 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
745 // (mi_row_top, mi_col_top, bsize_top): region of the top partition size
746 // block: sub location of sub8x8 blocks
747 // b_sub8x8: 1: ori is sub8x8; 0: ori is not sub8x8
748 // bextend: 1: region to predict is an extension of ori; 0: not
749 int r = (mi_row_pred - mi_row_top) * MI_SIZE;
750 int c = (mi_col_pred - mi_col_top) * MI_SIZE;
751 const int mi_width_top = num_8x8_blocks_wide_lookup[bsize_top];
752 const int mi_height_top = num_8x8_blocks_high_lookup[bsize_top];
753 MB_MODE_INFO *mbmi;
Yaowu Xuf883b422016-08-30 14:01:10 -0700754 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755
756 if (mi_row_pred < mi_row_top || mi_col_pred < mi_col_top ||
757 mi_row_pred >= mi_row_top + mi_height_top ||
758 mi_col_pred >= mi_col_top + mi_width_top || mi_row_pred >= cm->mi_rows ||
759 mi_col_pred >= cm->mi_cols)
760 return;
761
762 mbmi = set_offsets_extend(cm, xd, tile, bsize_pred, mi_row_pred, mi_col_pred,
763 mi_row_ori, mi_col_ori);
764 set_ref(cm, xd, 0, mi_row_pred, mi_col_pred);
765 if (has_second_ref(&xd->mi[0]->mbmi))
766 set_ref(cm, xd, 1, mi_row_pred, mi_col_pred);
767
768 if (!bextend) {
769 mbmi->tx_size = b_width_log2_lookup[bsize_top];
770 }
771
772 xd->plane[0].dst.stride = dst_stride[0];
773 xd->plane[1].dst.stride = dst_stride[1];
774 xd->plane[2].dst.stride = dst_stride[2];
775 xd->plane[0].dst.buf = dst_buf[0] +
776 (r >> xd->plane[0].subsampling_y) * dst_stride[0] +
777 (c >> xd->plane[0].subsampling_x);
778 xd->plane[1].dst.buf = dst_buf[1] +
779 (r >> xd->plane[1].subsampling_y) * dst_stride[1] +
780 (c >> xd->plane[1].subsampling_x);
781 xd->plane[2].dst.buf = dst_buf[2] +
782 (r >> xd->plane[2].subsampling_y) * dst_stride[2] +
783 (c >> xd->plane[2].subsampling_x);
784
785 if (!b_sub8x8)
Yaowu Xuf883b422016-08-30 14:01:10 -0700786 av1_build_inter_predictors_sb_extend(xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700787#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700788 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700790 mi_row_pred, mi_col_pred, bsize_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700791 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700792 av1_build_inter_predictors_sb_sub8x8_extend(xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700793#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700794 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700795#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700796 mi_row_pred, mi_col_pred,
797 bsize_pred, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798}
799
Yaowu Xuf883b422016-08-30 14:01:10 -0700800static void dec_extend_dir(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 const TileInfo *const tile, int block,
802 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize, int mi_row,
803 int mi_col, int mi_row_top, int mi_col_top,
804 uint8_t *dst_buf[3], int dst_stride[3], int dir) {
805 // dir: 0-lower, 1-upper, 2-left, 3-right
806 // 4-lowerleft, 5-upperleft, 6-lowerright, 7-upperright
807 const int mi_width = num_8x8_blocks_wide_lookup[bsize];
808 const int mi_height = num_8x8_blocks_high_lookup[bsize];
809 int xss = xd->plane[1].subsampling_x;
810 int yss = xd->plane[1].subsampling_y;
811 int b_sub8x8 = (bsize < BLOCK_8X8) ? 1 : 0;
812 BLOCK_SIZE extend_bsize;
813 int unit, mi_row_pred, mi_col_pred;
814
815 if (dir == 0 || dir == 1) {
816 extend_bsize = (mi_width == 1 || bsize < BLOCK_8X8 || xss < yss)
817 ? BLOCK_8X8
818 : BLOCK_16X8;
819 unit = num_8x8_blocks_wide_lookup[extend_bsize];
820 mi_row_pred = mi_row + ((dir == 0) ? mi_height : -1);
821 mi_col_pred = mi_col;
822
823 dec_predict_b_extend(pbi, xd, tile, block, mi_row, mi_col, mi_row_pred,
824 mi_col_pred, mi_row_top, mi_col_top, dst_buf,
825 dst_stride, top_bsize, extend_bsize, b_sub8x8, 1);
826
827 if (mi_width > unit) {
828 int i;
829 assert(!b_sub8x8);
830 for (i = 0; i < mi_width / unit - 1; i++) {
831 mi_col_pred += unit;
832 dec_predict_b_extend(pbi, xd, tile, block, mi_row, mi_col, mi_row_pred,
833 mi_col_pred, mi_row_top, mi_col_top, dst_buf,
834 dst_stride, top_bsize, extend_bsize, b_sub8x8, 1);
835 }
836 }
837 } else if (dir == 2 || dir == 3) {
838 extend_bsize = (mi_height == 1 || bsize < BLOCK_8X8 || yss < xss)
839 ? BLOCK_8X8
840 : BLOCK_8X16;
841 unit = num_8x8_blocks_high_lookup[extend_bsize];
842 mi_row_pred = mi_row;
843 mi_col_pred = mi_col + ((dir == 3) ? mi_width : -1);
844
845 dec_predict_b_extend(pbi, xd, tile, block, mi_row, mi_col, mi_row_pred,
846 mi_col_pred, mi_row_top, mi_col_top, dst_buf,
847 dst_stride, top_bsize, extend_bsize, b_sub8x8, 1);
848
849 if (mi_height > unit) {
850 int i;
851 for (i = 0; i < mi_height / unit - 1; i++) {
852 mi_row_pred += unit;
853 dec_predict_b_extend(pbi, xd, tile, block, mi_row, mi_col, mi_row_pred,
854 mi_col_pred, mi_row_top, mi_col_top, dst_buf,
855 dst_stride, top_bsize, extend_bsize, b_sub8x8, 1);
856 }
857 }
858 } else {
859 extend_bsize = BLOCK_8X8;
860 mi_row_pred = mi_row + ((dir == 4 || dir == 6) ? mi_height : -1);
861 mi_col_pred = mi_col + ((dir == 6 || dir == 7) ? mi_width : -1);
862 dec_predict_b_extend(pbi, xd, tile, block, mi_row, mi_col, mi_row_pred,
863 mi_col_pred, mi_row_top, mi_col_top, dst_buf,
864 dst_stride, top_bsize, extend_bsize, b_sub8x8, 1);
865 }
866}
867
Yaowu Xuf883b422016-08-30 14:01:10 -0700868static void dec_extend_all(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 const TileInfo *const tile, int block,
870 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize, int mi_row,
871 int mi_col, int mi_row_top, int mi_col_top,
872 uint8_t *dst_buf[3], int dst_stride[3]) {
873 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
874 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
875 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
876 mi_row_top, mi_col_top, dst_buf, dst_stride, 1);
877 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
878 mi_row_top, mi_col_top, dst_buf, dst_stride, 2);
879 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
880 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
881 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
882 mi_row_top, mi_col_top, dst_buf, dst_stride, 4);
883 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
884 mi_row_top, mi_col_top, dst_buf, dst_stride, 5);
885 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
886 mi_row_top, mi_col_top, dst_buf, dst_stride, 6);
887 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row, mi_col,
888 mi_row_top, mi_col_top, dst_buf, dst_stride, 7);
889}
890
Yaowu Xuf883b422016-08-30 14:01:10 -0700891static void dec_predict_sb_complex(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 const TileInfo *const tile, int mi_row,
893 int mi_col, int mi_row_top, int mi_col_top,
894 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
895 uint8_t *dst_buf[3], int dst_stride[3]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700896 const AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897 const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
898 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
899 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
900#if CONFIG_EXT_PARTITION_TYPES
901 const BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
902#endif
903 int i;
904 const int mi_offset = mi_row * cm->mi_stride + mi_col;
905 uint8_t *dst_buf1[3], *dst_buf2[3], *dst_buf3[3];
906
907 DECLARE_ALIGNED(16, uint8_t, tmp_buf1[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
908 DECLARE_ALIGNED(16, uint8_t, tmp_buf2[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
909 DECLARE_ALIGNED(16, uint8_t, tmp_buf3[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
910 int dst_stride1[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
911 int dst_stride2[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
912 int dst_stride3[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
913
Yaowu Xuf883b422016-08-30 14:01:10 -0700914#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700915 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
916 int len = sizeof(uint16_t);
917 dst_buf1[0] = CONVERT_TO_BYTEPTR(tmp_buf1);
918 dst_buf1[1] = CONVERT_TO_BYTEPTR(tmp_buf1 + MAX_TX_SQUARE * len);
919 dst_buf1[2] = CONVERT_TO_BYTEPTR(tmp_buf1 + 2 * MAX_TX_SQUARE * len);
920 dst_buf2[0] = CONVERT_TO_BYTEPTR(tmp_buf2);
921 dst_buf2[1] = CONVERT_TO_BYTEPTR(tmp_buf2 + MAX_TX_SQUARE * len);
922 dst_buf2[2] = CONVERT_TO_BYTEPTR(tmp_buf2 + 2 * MAX_TX_SQUARE * len);
923 dst_buf3[0] = CONVERT_TO_BYTEPTR(tmp_buf3);
924 dst_buf3[1] = CONVERT_TO_BYTEPTR(tmp_buf3 + MAX_TX_SQUARE * len);
925 dst_buf3[2] = CONVERT_TO_BYTEPTR(tmp_buf3 + 2 * MAX_TX_SQUARE * len);
926 } else {
927#endif
928 dst_buf1[0] = tmp_buf1;
929 dst_buf1[1] = tmp_buf1 + MAX_TX_SQUARE;
930 dst_buf1[2] = tmp_buf1 + 2 * MAX_TX_SQUARE;
931 dst_buf2[0] = tmp_buf2;
932 dst_buf2[1] = tmp_buf2 + MAX_TX_SQUARE;
933 dst_buf2[2] = tmp_buf2 + 2 * MAX_TX_SQUARE;
934 dst_buf3[0] = tmp_buf3;
935 dst_buf3[1] = tmp_buf3 + MAX_TX_SQUARE;
936 dst_buf3[2] = tmp_buf3 + 2 * MAX_TX_SQUARE;
Yaowu Xuf883b422016-08-30 14:01:10 -0700937#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700938 }
939#endif
940
941 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
942
943 xd->mi = cm->mi_grid_visible + mi_offset;
944 xd->mi[0] = cm->mi + mi_offset;
945
946 for (i = 0; i < MAX_MB_PLANE; i++) {
947 xd->plane[i].dst.buf = dst_buf[i];
948 xd->plane[i].dst.stride = dst_stride[i];
949 }
950
951 switch (partition) {
952 case PARTITION_NONE:
953 assert(bsize < top_bsize);
954 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
955 mi_row_top, mi_col_top, dst_buf, dst_stride,
956 top_bsize, bsize, 0, 0);
957 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row, mi_col,
958 mi_row_top, mi_col_top, dst_buf, dst_stride);
959 break;
960 case PARTITION_HORZ:
961 if (bsize == BLOCK_8X8) {
962 // For sub8x8, predict in 8x8 unit
963 // First half
964 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
965 mi_row_top, mi_col_top, dst_buf, dst_stride,
966 top_bsize, BLOCK_8X8, 1, 0);
967 if (bsize < top_bsize)
968 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
969 mi_row_top, mi_col_top, dst_buf, dst_stride);
970
971 // Second half
972 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
973 mi_row_top, mi_col_top, dst_buf1, dst_stride1,
974 top_bsize, BLOCK_8X8, 1, 1);
975 if (bsize < top_bsize)
976 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
977 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
978
979 // weighted average to smooth the boundary
980 xd->plane[0].dst.buf = dst_buf[0];
981 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -0700982 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700983 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
984 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
985 0);
986 } else {
987 // First half
988 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
989 mi_row_top, mi_col_top, dst_buf, dst_stride,
990 top_bsize, subsize, 0, 0);
991 if (bsize < top_bsize)
992 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
993 mi_row_top, mi_col_top, dst_buf, dst_stride);
994 else
995 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
996 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
997
998 if (mi_row + hbs < cm->mi_rows) {
999 // Second half
1000 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1001 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1002 dst_buf1, dst_stride1, top_bsize, subsize, 0, 0);
1003 if (bsize < top_bsize)
1004 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1005 mi_col, mi_row_top, mi_col_top, dst_buf1,
1006 dst_stride1);
1007 else
1008 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1009 mi_col, mi_row_top, mi_col_top, dst_buf1,
1010 dst_stride1, 1);
1011
1012 // weighted average to smooth the boundary
1013 for (i = 0; i < MAX_MB_PLANE; i++) {
1014 xd->plane[i].dst.buf = dst_buf[i];
1015 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001016 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001017 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1018 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1019 PARTITION_HORZ, i);
1020 }
1021 }
1022 }
1023 break;
1024 case PARTITION_VERT:
1025 if (bsize == BLOCK_8X8) {
1026 // First half
1027 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1028 mi_row_top, mi_col_top, dst_buf, dst_stride,
1029 top_bsize, BLOCK_8X8, 1, 0);
1030 if (bsize < top_bsize)
1031 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1032 mi_row_top, mi_col_top, dst_buf, dst_stride);
1033
1034 // Second half
1035 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1036 mi_row_top, mi_col_top, dst_buf1, dst_stride1,
1037 top_bsize, BLOCK_8X8, 1, 1);
1038 if (bsize < top_bsize)
1039 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1040 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1041
1042 // Smooth
1043 xd->plane[0].dst.buf = dst_buf[0];
1044 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001045 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001046 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1047 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1048 0);
1049 } else {
1050 // First half
1051 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1052 mi_row_top, mi_col_top, dst_buf, dst_stride,
1053 top_bsize, subsize, 0, 0);
1054 if (bsize < top_bsize)
1055 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1056 mi_row_top, mi_col_top, dst_buf, dst_stride);
1057 else
1058 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1059 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
1060
1061 // Second half
1062 if (mi_col + hbs < cm->mi_cols) {
1063 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1064 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1065 dst_stride1, top_bsize, subsize, 0, 0);
1066 if (bsize < top_bsize)
1067 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1068 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1069 dst_stride1);
1070 else
1071 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1072 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1073 dst_stride1, 2);
1074
1075 // Smooth
1076 for (i = 0; i < MAX_MB_PLANE; i++) {
1077 xd->plane[i].dst.buf = dst_buf[i];
1078 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001079 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1081 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1082 PARTITION_VERT, i);
1083 }
1084 }
1085 }
1086 break;
1087 case PARTITION_SPLIT:
1088 if (bsize == BLOCK_8X8) {
1089 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1090 mi_row_top, mi_col_top, dst_buf, dst_stride,
1091 top_bsize, BLOCK_8X8, 1, 0);
1092 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1093 mi_row_top, mi_col_top, dst_buf1, dst_stride1,
1094 top_bsize, BLOCK_8X8, 1, 1);
1095 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1096 mi_row_top, mi_col_top, dst_buf2, dst_stride2,
1097 top_bsize, BLOCK_8X8, 1, 1);
1098 dec_predict_b_extend(pbi, xd, tile, 3, mi_row, mi_col, mi_row, mi_col,
1099 mi_row_top, mi_col_top, dst_buf3, dst_stride3,
1100 top_bsize, BLOCK_8X8, 1, 1);
1101 if (bsize < top_bsize) {
1102 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1103 mi_row_top, mi_col_top, dst_buf, dst_stride);
1104 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1105 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1106 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1107 mi_row_top, mi_col_top, dst_buf2, dst_stride2);
1108 dec_extend_all(pbi, xd, tile, 3, subsize, top_bsize, mi_row, mi_col,
1109 mi_row_top, mi_col_top, dst_buf3, dst_stride3);
1110 }
1111 } else {
1112 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row_top,
1113 mi_col_top, subsize, top_bsize, dst_buf,
1114 dst_stride);
1115 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1116 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col + hbs,
1117 mi_row_top, mi_col_top, subsize, top_bsize,
1118 dst_buf1, dst_stride1);
1119 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1120 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col,
1121 mi_row_top, mi_col_top, subsize, top_bsize,
1122 dst_buf2, dst_stride2);
1123 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1124 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col + hbs,
1125 mi_row_top, mi_col_top, subsize, top_bsize,
1126 dst_buf3, dst_stride3);
1127 }
1128 for (i = 0; i < MAX_MB_PLANE; i++) {
1129 if (bsize == BLOCK_8X8 && i != 0)
1130 continue; // Skip <4x4 chroma smoothing
1131 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001132 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001133 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1134 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1135 PARTITION_VERT, i);
1136 if (mi_row + hbs < cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001137 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001138 xd, dst_buf2[i], dst_stride2[i], dst_buf3[i], dst_stride3[i],
1139 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1140 PARTITION_VERT, i);
Yaowu Xuf883b422016-08-30 14:01:10 -07001141 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001142 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1143 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1144 PARTITION_HORZ, i);
1145 }
1146 } else if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001147 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001148 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1149 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1150 PARTITION_HORZ, i);
1151 }
1152 }
1153 break;
1154#if CONFIG_EXT_PARTITION_TYPES
1155 case PARTITION_HORZ_A:
1156 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1157 mi_row_top, mi_col_top, dst_buf, dst_stride,
1158 top_bsize, bsize2, 0, 0);
1159 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1160 mi_row_top, mi_col_top, dst_buf, dst_stride);
1161
1162 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1163 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1164 dst_stride1, top_bsize, bsize2, 0, 0);
1165 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1166 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1167
1168 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1169 mi_col, mi_row_top, mi_col_top, dst_buf2,
1170 dst_stride2, top_bsize, subsize, 0, 0);
1171 if (bsize < top_bsize)
1172 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1173 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2);
1174 else
1175 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1176 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2,
1177 1);
1178
1179 for (i = 0; i < MAX_MB_PLANE; i++) {
1180 xd->plane[i].dst.buf = dst_buf[i];
1181 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001182 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1184 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1185 i);
1186 }
1187 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001188 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001189 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1190 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1191 i);
1192 }
1193 break;
1194 case PARTITION_VERT_A:
1195
1196 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1197 mi_row_top, mi_col_top, dst_buf, dst_stride,
1198 top_bsize, bsize2, 0, 0);
1199 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1200 mi_row_top, mi_col_top, dst_buf, dst_stride);
1201
1202 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1203 mi_col, mi_row_top, mi_col_top, dst_buf1,
1204 dst_stride1, top_bsize, bsize2, 0, 0);
1205 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1206 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1207
1208 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1209 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1210 dst_stride2, top_bsize, subsize, 0, 0);
1211 if (bsize < top_bsize)
1212 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1213 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1214 dst_stride2);
1215 else
1216 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1217 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1218 dst_stride2, 2);
1219
1220 for (i = 0; i < MAX_MB_PLANE; i++) {
1221 xd->plane[i].dst.buf = dst_buf[i];
1222 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001223 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001224 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1225 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1226 i);
1227 }
1228 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001229 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001230 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1231 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1232 i);
1233 }
1234 break;
1235 case PARTITION_HORZ_B:
1236 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1237 mi_row_top, mi_col_top, dst_buf, dst_stride,
1238 top_bsize, subsize, 0, 0);
1239 if (bsize < top_bsize)
1240 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1241 mi_row_top, mi_col_top, dst_buf, dst_stride);
1242 else
1243 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1244 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
1245
1246 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1247 mi_col, mi_row_top, mi_col_top, dst_buf1,
1248 dst_stride1, top_bsize, bsize2, 0, 0);
1249 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1250 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1251
1252 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1253 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1254 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1255 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1256 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1257 dst_stride2);
1258
1259 for (i = 0; i < MAX_MB_PLANE; i++) {
1260 xd->plane[i].dst.buf = dst_buf1[i];
1261 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001262 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001263 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1264 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1265 PARTITION_VERT, i);
1266 }
1267 for (i = 0; i < MAX_MB_PLANE; i++) {
1268 xd->plane[i].dst.buf = dst_buf[i];
1269 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001270 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001271 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1272 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1273 i);
1274 }
1275 break;
1276 case PARTITION_VERT_B:
1277 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1278 mi_row_top, mi_col_top, dst_buf, dst_stride,
1279 top_bsize, subsize, 0, 0);
1280 if (bsize < top_bsize)
1281 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1282 mi_row_top, mi_col_top, dst_buf, dst_stride);
1283 else
1284 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1285 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
1286
1287 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1288 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1289 dst_stride1, top_bsize, bsize2, 0, 0);
1290 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1291 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1292
1293 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1294 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1295 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1296 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1297 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1298 dst_stride2);
1299
1300 for (i = 0; i < MAX_MB_PLANE; i++) {
1301 xd->plane[i].dst.buf = dst_buf1[i];
1302 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001303 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001304 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1305 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1306 PARTITION_HORZ, i);
1307 }
1308 for (i = 0; i < MAX_MB_PLANE; i++) {
1309 xd->plane[i].dst.buf = dst_buf[i];
1310 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001311 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001312 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1313 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1314 i);
1315 }
1316 break;
1317#endif // CONFIG_EXT_PARTITION_TYPES
1318 default: assert(0);
1319 }
1320}
1321
Yaowu Xuf883b422016-08-30 14:01:10 -07001322static void set_segment_id_supertx(const AV1_COMMON *const cm, const int mi_row,
1323 const int mi_col, const BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001324 const struct segmentation *seg = &cm->seg;
1325 const int miw =
Yaowu Xuf883b422016-08-30 14:01:10 -07001326 AOMMIN(num_8x8_blocks_wide_lookup[bsize], cm->mi_cols - mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001327 const int mih =
Yaowu Xuf883b422016-08-30 14:01:10 -07001328 AOMMIN(num_8x8_blocks_high_lookup[bsize], cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001329 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1330 MODE_INFO **const mip = cm->mi_grid_visible + mi_offset;
1331 int r, c;
1332 int seg_id_supertx = MAX_SEGMENTS;
1333
1334 if (!seg->enabled) {
1335 seg_id_supertx = 0;
1336 } else {
1337 // Find the minimum segment_id
1338 for (r = 0; r < mih; r++)
1339 for (c = 0; c < miw; c++)
1340 seg_id_supertx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001341 AOMMIN(mip[r * cm->mi_stride + c]->mbmi.segment_id, seg_id_supertx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001342 assert(0 <= seg_id_supertx && seg_id_supertx < MAX_SEGMENTS);
1343 }
1344
1345 // Assign the the segment_id back to segment_id_supertx
1346 for (r = 0; r < mih; r++)
1347 for (c = 0; c < miw; c++)
1348 mip[r * cm->mi_stride + c]->mbmi.segment_id_supertx = seg_id_supertx;
1349}
1350#endif // CONFIG_SUPERTX
1351
Yaowu Xuf883b422016-08-30 14:01:10 -07001352static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001353#if CONFIG_SUPERTX
1354 int supertx_enabled,
1355#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001356 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001357#if CONFIG_EXT_PARTITION_TYPES
1358 PARTITION_TYPE partition,
1359#endif // CONFIG_EXT_PARTITION_TYPES
1360 BLOCK_SIZE bsize, int bwl, int bhl) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001361 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001362 const int bw = 1 << (bwl - 1);
1363 const int bh = 1 << (bhl - 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07001364 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1365 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -04001366 MB_MODE_INFO *mbmi;
1367
Michael Bebenita6048d052016-08-25 14:40:54 -07001368#if CONFIG_ACCOUNTING
1369 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
1370#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001371#if CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001372 if (supertx_enabled) {
1373 mbmi = set_mb_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
1374 } else {
1375 mbmi = set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis, bwl,
1376 bhl);
1377 }
1378#if CONFIG_EXT_PARTITION_TYPES
1379 xd->mi[0]->mbmi.partition = partition;
1380#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001381 av1_read_mode_info(pbi, xd, supertx_enabled, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382#else
Nathan E. Eggeebbd4792016-10-05 19:30:15 -04001383 mbmi = set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis, bwl,
1384 bhl);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001385#if CONFIG_EXT_PARTITION_TYPES
1386 xd->mi[0]->mbmi.partition = partition;
1387#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001388 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001389#endif // CONFIG_SUPERTX
1390
1391 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
1392 const BLOCK_SIZE uv_subsize =
1393 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
1394 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -07001395 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001396 "Invalid block size.");
1397 }
1398
1399#if CONFIG_SUPERTX
1400 mbmi->segment_id_supertx = MAX_SEGMENTS;
1401
1402 if (supertx_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001403 xd->corrupted |= aom_reader_has_error(r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404 return;
1405 }
1406#endif // CONFIG_SUPERTX
1407
Arild Fuldseth07441162016-08-15 15:07:52 +02001408#if CONFIG_DELTA_Q
1409 if (cm->delta_q_present_flag) {
1410 int i;
1411 for (i = 0; i < MAX_SEGMENTS; i++) {
1412 xd->plane[0].seg_dequant[i][0] =
1413 av1_dc_quant(xd->current_qindex, cm->y_dc_delta_q, cm->bit_depth);
1414 xd->plane[0].seg_dequant[i][1] =
1415 av1_ac_quant(xd->current_qindex, 0, cm->bit_depth);
1416 xd->plane[1].seg_dequant[i][0] =
1417 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
1418 xd->plane[1].seg_dequant[i][1] =
1419 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
1420 xd->plane[2].seg_dequant[i][0] =
1421 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
1422 xd->plane[2].seg_dequant[i][1] =
1423 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
1424 }
1425 }
1426#endif
1427
Yaowu Xuc27fc142016-08-22 16:08:15 -07001428 if (mbmi->skip) {
1429 dec_reset_skip_context(xd);
1430 }
1431 if (!is_inter_block(mbmi)) {
1432 int plane;
Urvang Joshib100db72016-10-12 16:28:56 -07001433#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001434 for (plane = 0; plane <= 1; ++plane) {
1435 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -07001436 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001437 }
Urvang Joshib100db72016-10-12 16:28:56 -07001438#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1440 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee2f123402016-08-30 17:43:38 -07001441 const TX_SIZE tx_size = plane ? get_uv_tx_size(mbmi, pd) : mbmi->tx_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001442 const int num_4x4_w = pd->n4_w;
1443 const int num_4x4_h = pd->n4_h;
Jingning Han2d64f122016-10-21 12:44:29 -07001444 const int stepr = tx_size_high_unit[tx_size];
1445 const int stepc = tx_size_wide_unit[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001446 int row, col;
1447 const int max_blocks_wide =
1448 num_4x4_w + (xd->mb_to_right_edge >= 0
1449 ? 0
1450 : xd->mb_to_right_edge >> (5 + pd->subsampling_x));
1451 const int max_blocks_high =
1452 num_4x4_h + (xd->mb_to_bottom_edge >= 0
1453 ? 0
1454 : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y));
1455
1456 for (row = 0; row < max_blocks_high; row += stepr)
1457 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07001458 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane, row, col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001459 tx_size);
1460 }
1461 } else {
1462 // Prediction
Yaowu Xuf883b422016-08-30 14:01:10 -07001463 av1_build_inter_predictors_sb(xd, mi_row, mi_col, AOMMAX(bsize, BLOCK_8X8));
Yue Chencb60b182016-10-13 15:18:22 -07001464#if CONFIG_MOTION_VAR
1465 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chen894fcce2016-10-21 16:50:52 -07001466 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001467 }
Yue Chencb60b182016-10-13 15:18:22 -07001468#endif // CONFIG_MOTION_VAR
Yaowu Xuc27fc142016-08-22 16:08:15 -07001469
1470 // Reconstruction
1471 if (!mbmi->skip) {
1472 int eobtotal = 0;
1473 int plane;
1474
1475 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1476 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han5f614262016-10-27 14:27:43 -07001477 int block_width = pd->width;
1478 int block_height = pd->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001479 int row, col;
1480#if CONFIG_VAR_TX
1481 // TODO(jingning): This can be simplified for decoder performance.
1482 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07001483 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Jingning Han70e5f3f2016-11-09 17:03:07 -08001484 const TX_SIZE max_tx_size = max_txsize_rect_lookup[plane_bsize];
Jingning Hanf64062f2016-11-02 16:22:18 -07001485 const int bh_var_tx = tx_size_high_unit[max_tx_size];
1486 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001487#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chena1e48dc2016-08-29 17:29:33 -07001488 if (is_rect_tx(mbmi->tx_size)) {
1489 const TX_SIZE tx_size =
1490 plane ? get_uv_tx_size(mbmi, pd) : mbmi->tx_size;
Jingning Han2d64f122016-10-21 12:44:29 -07001491 const int stepr = tx_size_high_unit[tx_size];
1492 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han5f614262016-10-27 14:27:43 -07001493 int max_blocks_wide =
1494 block_width +
Yaowu Xuc27fc142016-08-22 16:08:15 -07001495 (xd->mb_to_right_edge >= 0 ? 0 : xd->mb_to_right_edge >>
Jingning Han5f614262016-10-27 14:27:43 -07001496 (3 + pd->subsampling_x));
1497 int max_blocks_high =
1498 block_height +
Yaowu Xuc27fc142016-08-22 16:08:15 -07001499 (xd->mb_to_bottom_edge >= 0 ? 0 : xd->mb_to_bottom_edge >>
Jingning Han5f614262016-10-27 14:27:43 -07001500 (3 + pd->subsampling_y));
1501 max_blocks_wide >>= tx_size_wide_log2[0];
1502 max_blocks_high >>= tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001503 for (row = 0; row < max_blocks_high; row += stepr)
1504 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07001505 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001506 plane, row, col, tx_size);
1507 } else {
1508#endif
Jingning Han5f614262016-10-27 14:27:43 -07001509 block_width >>= tx_size_wide_log2[0];
1510 block_height >>= tx_size_wide_log2[0];
1511 for (row = 0; row < block_height; row += bh_var_tx)
1512 for (col = 0; col < block_width; col += bw_var_tx)
Jingning Han8fd62b72016-10-21 12:55:54 -07001513 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, row,
1514 col, max_tx_size, &eobtotal);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001515#if CONFIG_EXT_TX && CONFIG_RECT_TX
1516 }
1517#endif
1518#else
1519 const TX_SIZE tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -07001520 plane ? get_uv_tx_size(mbmi, pd) : mbmi->tx_size;
Jingning Han2d64f122016-10-21 12:44:29 -07001521 const int stepr = tx_size_high_unit[tx_size];
1522 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han5f614262016-10-27 14:27:43 -07001523 int max_blocks_wide =
1524 block_width +
1525 (xd->mb_to_right_edge >= 0 ? 0 : xd->mb_to_right_edge >>
1526 (3 + pd->subsampling_x));
1527 int max_blocks_high =
1528 block_height +
Yaowu Xuc27fc142016-08-22 16:08:15 -07001529 (xd->mb_to_bottom_edge >= 0 ? 0 : xd->mb_to_bottom_edge >>
Jingning Han5f614262016-10-27 14:27:43 -07001530 (3 + pd->subsampling_y));
1531 max_blocks_wide >>= tx_size_wide_log2[0];
1532 max_blocks_high >>= tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001533 for (row = 0; row < max_blocks_high; row += stepr)
1534 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07001535 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1536 plane, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001537#endif
1538 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001539 }
1540 }
1541
Yaowu Xuf883b422016-08-30 14:01:10 -07001542 xd->corrupted |= aom_reader_has_error(r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001543}
1544
1545static INLINE int dec_partition_plane_context(const MACROBLOCKD *xd, int mi_row,
1546 int mi_col, int bsl) {
1547 const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col;
1548 const PARTITION_CONTEXT *left_ctx =
1549 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
1550 int above = (*above_ctx >> bsl) & 1, left = (*left_ctx >> bsl) & 1;
1551
1552 // assert(bsl >= 0);
1553
1554 return (left * 2 + above) + bsl * PARTITION_PLOFFSET;
1555}
1556
1557#if !CONFIG_EXT_PARTITION_TYPES
1558static INLINE void dec_update_partition_context(MACROBLOCKD *xd, int mi_row,
1559 int mi_col, BLOCK_SIZE subsize,
1560 int bw) {
1561 PARTITION_CONTEXT *const above_ctx = xd->above_seg_context + mi_col;
1562 PARTITION_CONTEXT *const left_ctx =
1563 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
1564
1565 // update the partition context at the end notes. set partition bits
1566 // of block sizes larger than the current one to be one, and partition
1567 // bits of smaller block sizes to be zero.
1568 memset(above_ctx, partition_context_lookup[subsize].above, bw);
1569 memset(left_ctx, partition_context_lookup[subsize].left, bw);
1570}
1571#endif // !CONFIG_EXT_PARTITION_TYPES
1572
Yaowu Xuf883b422016-08-30 14:01:10 -07001573static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
1574 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001575 int has_rows, int has_cols,
1576#if CONFIG_EXT_PARTITION_TYPES
1577 BLOCK_SIZE bsize,
1578#endif
1579 int bsl) {
1580 const int ctx = dec_partition_plane_context(xd, mi_row, mi_col, bsl);
Yaowu Xuf883b422016-08-30 14:01:10 -07001581 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001582 FRAME_COUNTS *counts = xd->counts;
1583 PARTITION_TYPE p;
1584
1585 if (has_rows && has_cols)
1586#if CONFIG_EXT_PARTITION_TYPES
1587 if (bsize <= BLOCK_8X8)
Michael Bebenita6048d052016-08-25 14:40:54 -07001588 p = (PARTITION_TYPE)aom_read_tree(r, av1_partition_tree, probs, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001589 else
Michael Bebenita6048d052016-08-25 14:40:54 -07001590 p = (PARTITION_TYPE)aom_read_tree(r, av1_ext_partition_tree, probs,
1591 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001592#else
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001593#if CONFIG_DAALA_EC
Yaowu Xuf2581a32016-10-20 13:05:47 -07001594 p = (PARTITION_TYPE)aom_read_symbol(r, cm->fc->partition_cdf[ctx],
1595 PARTITION_TYPES, ACCT_STR);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001596#else
Michael Bebenita6048d052016-08-25 14:40:54 -07001597 p = (PARTITION_TYPE)aom_read_tree(r, av1_partition_tree, probs, ACCT_STR);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001598#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001599#endif // CONFIG_EXT_PARTITION_TYPES
1600 else if (!has_rows && has_cols)
Michael Bebenita6048d052016-08-25 14:40:54 -07001601 p = aom_read(r, probs[1], ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001602 else if (has_rows && !has_cols)
Michael Bebenita6048d052016-08-25 14:40:54 -07001603 p = aom_read(r, probs[2], ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001604 else
1605 p = PARTITION_SPLIT;
1606
1607 if (counts) ++counts->partition[ctx][p];
1608
1609 return p;
1610}
1611
1612#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001613static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
1614 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001615 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
1616 return 1;
1617 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001618 const int ctx = av1_get_skip_context(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001619 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001620 FRAME_COUNTS *counts = xd->counts;
1621 if (counts) ++counts->skip[ctx][skip];
1622 return skip;
1623 }
1624}
1625#endif // CONFIG_SUPERTX
Yaowu Xud71be782016-10-14 08:47:03 -07001626#if CONFIG_CLPF
1627static int clpf_all_skip(const AV1_COMMON *cm, int mi_col, int mi_row,
1628 int size) {
1629 int r, c;
1630 int skip = 1;
1631 const int maxc = AOMMIN(size, cm->mi_cols - mi_col);
1632 const int maxr = AOMMIN(size, cm->mi_rows - mi_row);
1633 for (r = 0; r < maxr && skip; r++) {
1634 for (c = 0; c < maxc && skip; c++) {
1635 skip &= !!cm->mi_grid_visible[(mi_row + r) * cm->mi_stride + mi_col + c]
1636 ->mbmi.skip;
1637 }
1638 }
1639 return skip;
1640}
1641#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001642
1643// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -07001644static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001645#if CONFIG_SUPERTX
1646 int supertx_enabled,
1647#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001648 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001649 BLOCK_SIZE bsize, int n4x4_l2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001650 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001651 const int n8x8_l2 = n4x4_l2 - 1;
1652 const int num_8x8_wh = 1 << n8x8_l2;
1653 const int hbs = num_8x8_wh >> 1;
1654 PARTITION_TYPE partition;
1655 BLOCK_SIZE subsize;
1656#if CONFIG_EXT_PARTITION_TYPES
1657 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
1658#endif
1659 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1660 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1661#if CONFIG_SUPERTX
1662 const int read_token = !supertx_enabled;
1663 int skip = 0;
1664 TX_SIZE supertx_size = b_width_log2_lookup[bsize];
1665 const TileInfo *const tile = &xd->tile;
1666 int txfm = DCT_DCT;
1667#endif // CONFIG_SUPERTX
1668
1669 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1670
1671 partition = read_partition(cm, xd, mi_row, mi_col, r, has_rows, has_cols,
1672#if CONFIG_EXT_PARTITION_TYPES
1673 bsize,
1674#endif
1675 n8x8_l2);
1676 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001677
1678#if CONFIG_PVQ
1679 assert(partition < PARTITION_TYPES);
1680 assert(subsize < BLOCK_SIZES);
1681#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001682#if CONFIG_SUPERTX
1683 if (!frame_is_intra_only(cm) && partition != PARTITION_NONE &&
1684 bsize <= MAX_SUPERTX_BLOCK_SIZE && !supertx_enabled && !xd->lossless[0]) {
1685 const int supertx_context = partition_supertx_context_lookup[partition];
Michael Bebenita6048d052016-08-25 14:40:54 -07001686 supertx_enabled = aom_read(
1687 r, cm->fc->supertx_prob[supertx_context][supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001688 if (xd->counts)
1689 xd->counts->supertx[supertx_context][supertx_size][supertx_enabled]++;
1690#if CONFIG_VAR_TX
1691 if (supertx_enabled) xd->supertx_size = supertx_size;
1692#endif
1693 }
1694#endif // CONFIG_SUPERTX
1695 if (!hbs) {
1696 // calculate bmode block dimensions (log 2)
1697 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
1698 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
1699 decode_block(pbi, xd,
1700#if CONFIG_SUPERTX
1701 supertx_enabled,
1702#endif // CONFIG_SUPERTX
1703 mi_row, mi_col, r,
1704#if CONFIG_EXT_PARTITION_TYPES
1705 partition,
1706#endif // CONFIG_EXT_PARTITION_TYPES
1707 subsize, 1, 1);
1708 } else {
1709 switch (partition) {
1710 case PARTITION_NONE:
1711 decode_block(pbi, xd,
1712#if CONFIG_SUPERTX
1713 supertx_enabled,
1714#endif // CONFIG_SUPERTX
1715 mi_row, mi_col, r,
1716#if CONFIG_EXT_PARTITION_TYPES
1717 partition,
1718#endif // CONFIG_EXT_PARTITION_TYPES
1719 subsize, n4x4_l2, n4x4_l2);
1720 break;
1721 case PARTITION_HORZ:
1722 decode_block(pbi, xd,
1723#if CONFIG_SUPERTX
1724 supertx_enabled,
1725#endif // CONFIG_SUPERTX
1726 mi_row, mi_col, r,
1727#if CONFIG_EXT_PARTITION_TYPES
1728 partition,
1729#endif // CONFIG_EXT_PARTITION_TYPES
1730 subsize, n4x4_l2, n8x8_l2);
1731 if (has_rows)
1732 decode_block(pbi, xd,
1733#if CONFIG_SUPERTX
1734 supertx_enabled,
1735#endif // CONFIG_SUPERTX
1736 mi_row + hbs, mi_col, r,
1737#if CONFIG_EXT_PARTITION_TYPES
1738 partition,
1739#endif // CONFIG_EXT_PARTITION_TYPES
1740 subsize, n4x4_l2, n8x8_l2);
1741 break;
1742 case PARTITION_VERT:
1743 decode_block(pbi, xd,
1744#if CONFIG_SUPERTX
1745 supertx_enabled,
1746#endif // CONFIG_SUPERTX
1747 mi_row, mi_col, r,
1748#if CONFIG_EXT_PARTITION_TYPES
1749 partition,
1750#endif // CONFIG_EXT_PARTITION_TYPES
1751 subsize, n8x8_l2, n4x4_l2);
1752 if (has_cols)
1753 decode_block(pbi, xd,
1754#if CONFIG_SUPERTX
1755 supertx_enabled,
1756#endif // CONFIG_SUPERTX
1757 mi_row, mi_col + hbs, r,
1758#if CONFIG_EXT_PARTITION_TYPES
1759 partition,
1760#endif // CONFIG_EXT_PARTITION_TYPES
1761 subsize, n8x8_l2, n4x4_l2);
1762 break;
1763 case PARTITION_SPLIT:
1764 decode_partition(pbi, xd,
1765#if CONFIG_SUPERTX
1766 supertx_enabled,
1767#endif // CONFIG_SUPERTX
1768 mi_row, mi_col, r, subsize, n8x8_l2);
1769 decode_partition(pbi, xd,
1770#if CONFIG_SUPERTX
1771 supertx_enabled,
1772#endif // CONFIG_SUPERTX
1773 mi_row, mi_col + hbs, r, subsize, n8x8_l2);
1774 decode_partition(pbi, xd,
1775#if CONFIG_SUPERTX
1776 supertx_enabled,
1777#endif // CONFIG_SUPERTX
1778 mi_row + hbs, mi_col, r, subsize, n8x8_l2);
1779 decode_partition(pbi, xd,
1780#if CONFIG_SUPERTX
1781 supertx_enabled,
1782#endif // CONFIG_SUPERTX
1783 mi_row + hbs, mi_col + hbs, r, subsize, n8x8_l2);
1784 break;
1785#if CONFIG_EXT_PARTITION_TYPES
1786 case PARTITION_HORZ_A:
1787 decode_block(pbi, xd,
1788#if CONFIG_SUPERTX
1789 supertx_enabled,
1790#endif
1791 mi_row, mi_col, r, partition, bsize2, n8x8_l2, n8x8_l2);
1792 decode_block(pbi, xd,
1793#if CONFIG_SUPERTX
1794 supertx_enabled,
1795#endif
1796 mi_row, mi_col + hbs, r, partition, bsize2, n8x8_l2,
1797 n8x8_l2);
1798 decode_block(pbi, xd,
1799#if CONFIG_SUPERTX
1800 supertx_enabled,
1801#endif
1802 mi_row + hbs, mi_col, r, partition, subsize, n4x4_l2,
1803 n8x8_l2);
1804 break;
1805 case PARTITION_HORZ_B:
1806 decode_block(pbi, xd,
1807#if CONFIG_SUPERTX
1808 supertx_enabled,
1809#endif
1810 mi_row, mi_col, r, partition, subsize, n4x4_l2, n8x8_l2);
1811 decode_block(pbi, xd,
1812#if CONFIG_SUPERTX
1813 supertx_enabled,
1814#endif
1815 mi_row + hbs, mi_col, r, partition, bsize2, n8x8_l2,
1816 n8x8_l2);
1817 decode_block(pbi, xd,
1818#if CONFIG_SUPERTX
1819 supertx_enabled,
1820#endif
1821 mi_row + hbs, mi_col + hbs, r, partition, bsize2, n8x8_l2,
1822 n8x8_l2);
1823 break;
1824 case PARTITION_VERT_A:
1825 decode_block(pbi, xd,
1826#if CONFIG_SUPERTX
1827 supertx_enabled,
1828#endif
1829 mi_row, mi_col, r, partition, bsize2, n8x8_l2, n8x8_l2);
1830 decode_block(pbi, xd,
1831#if CONFIG_SUPERTX
1832 supertx_enabled,
1833#endif
1834 mi_row + hbs, mi_col, r, partition, bsize2, n8x8_l2,
1835 n8x8_l2);
1836 decode_block(pbi, xd,
1837#if CONFIG_SUPERTX
1838 supertx_enabled,
1839#endif
1840 mi_row, mi_col + hbs, r, partition, subsize, n8x8_l2,
1841 n4x4_l2);
1842 break;
1843 case PARTITION_VERT_B:
1844 decode_block(pbi, xd,
1845#if CONFIG_SUPERTX
1846 supertx_enabled,
1847#endif
1848 mi_row, mi_col, r, partition, subsize, n8x8_l2, n4x4_l2);
1849 decode_block(pbi, xd,
1850#if CONFIG_SUPERTX
1851 supertx_enabled,
1852#endif
1853 mi_row, mi_col + hbs, r, partition, bsize2, n8x8_l2,
1854 n8x8_l2);
1855 decode_block(pbi, xd,
1856#if CONFIG_SUPERTX
1857 supertx_enabled,
1858#endif
1859 mi_row + hbs, mi_col + hbs, r, partition, bsize2, n8x8_l2,
1860 n8x8_l2);
1861 break;
1862#endif
1863 default: assert(0 && "Invalid partition type");
1864 }
1865 }
1866
1867#if CONFIG_SUPERTX
1868 if (supertx_enabled && read_token) {
1869 uint8_t *dst_buf[3];
1870 int dst_stride[3], i;
1871 int offset = mi_row * cm->mi_stride + mi_col;
1872
1873 set_segment_id_supertx(cm, mi_row, mi_col, bsize);
1874
1875 xd->mi = cm->mi_grid_visible + offset;
1876 xd->mi[0] = cm->mi + offset;
1877 set_mi_row_col(xd, tile, mi_row, num_8x8_blocks_high_lookup[bsize], mi_col,
1878 num_8x8_blocks_wide_lookup[bsize], cm->mi_rows, cm->mi_cols);
1879 set_skip_context(xd, mi_row, mi_col);
1880 skip = read_skip(cm, xd, xd->mi[0]->mbmi.segment_id_supertx, r);
1881 if (skip) {
1882 reset_skip_context(xd, bsize);
1883 } else {
1884#if CONFIG_EXT_TX
1885 if (get_ext_tx_types(supertx_size, bsize, 1) > 1) {
1886 int eset = get_ext_tx_set(supertx_size, bsize, 1);
1887 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001888 txfm = aom_read_tree(r, av1_ext_tx_inter_tree[eset],
Michael Bebenita6048d052016-08-25 14:40:54 -07001889 cm->fc->inter_ext_tx_prob[eset][supertx_size],
1890 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001891 if (xd->counts) ++xd->counts->inter_ext_tx[eset][supertx_size][txfm];
1892 }
1893 }
1894#else
1895 if (supertx_size < TX_32X32) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001896 txfm = aom_read_tree(r, av1_ext_tx_tree,
Jingning Han8f6eb182016-10-19 13:48:57 -07001897 cm->fc->inter_ext_tx_prob[supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001898 if (xd->counts) ++xd->counts->inter_ext_tx[supertx_size][txfm];
1899 }
1900#endif // CONFIG_EXT_TX
1901 }
1902
Yaowu Xuf883b422016-08-30 14:01:10 -07001903 av1_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001904 for (i = 0; i < MAX_MB_PLANE; i++) {
1905 dst_buf[i] = xd->plane[i].dst.buf;
1906 dst_stride[i] = xd->plane[i].dst.stride;
1907 }
1908 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row, mi_col, bsize,
1909 bsize, dst_buf, dst_stride);
1910
1911 if (!skip) {
1912 int eobtotal = 0;
1913 MB_MODE_INFO *mbmi;
1914 set_offsets_topblock(cm, xd, tile, bsize, mi_row, mi_col);
1915 mbmi = &xd->mi[0]->mbmi;
1916 mbmi->tx_type = txfm;
1917 assert(mbmi->segment_id_supertx != MAX_SEGMENTS);
1918 for (i = 0; i < MAX_MB_PLANE; ++i) {
1919 const struct macroblockd_plane *const pd = &xd->plane[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001920 int row, col;
Debargha Mukherjee2f123402016-08-30 17:43:38 -07001921 const TX_SIZE tx_size = i ? get_uv_tx_size(mbmi, pd) : mbmi->tx_size;
Jingning Han32b20282016-10-28 15:42:44 -07001922 const int stepr = tx_size_high_unit[tx_size];
1923 const int stepc = tx_size_wide_unit[tx_size];
1924 int max_blocks_wide =
1925 pd->width + (xd->mb_to_right_edge >= 0
Yaowu Xuc27fc142016-08-22 16:08:15 -07001926 ? 0
Jingning Han32b20282016-10-28 15:42:44 -07001927 : xd->mb_to_right_edge >> (3 + pd->subsampling_x));
1928 int max_blocks_high =
1929 pd->height +
Yaowu Xuc27fc142016-08-22 16:08:15 -07001930 (xd->mb_to_bottom_edge >= 0 ? 0 : xd->mb_to_bottom_edge >>
Jingning Han32b20282016-10-28 15:42:44 -07001931 (3 + pd->subsampling_y));
1932
1933 max_blocks_wide >>= tx_size_wide_log2[0];
1934 max_blocks_high >>= tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001935
1936 for (row = 0; row < max_blocks_high; row += stepr)
1937 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07001938 eobtotal += reconstruct_inter_block(
1939 cm, xd, r, mbmi->segment_id_supertx, i, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001940 }
1941 if (!(subsize < BLOCK_8X8) && eobtotal == 0) skip = 1;
1942 }
1943 set_param_topblock(cm, xd, bsize, mi_row, mi_col, txfm, skip);
1944 }
1945#endif // CONFIG_SUPERTX
1946
1947#if CONFIG_EXT_PARTITION_TYPES
1948 if (bsize >= BLOCK_8X8) {
1949 switch (partition) {
1950 case PARTITION_SPLIT:
1951 if (bsize > BLOCK_8X8) break;
1952 case PARTITION_NONE:
1953 case PARTITION_HORZ:
1954 case PARTITION_VERT:
1955 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
1956 break;
1957 case PARTITION_HORZ_A:
1958 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
1959 update_partition_context(xd, mi_row + hbs, mi_col, subsize, subsize);
1960 break;
1961 case PARTITION_HORZ_B:
1962 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
1963 update_partition_context(xd, mi_row + hbs, mi_col, bsize2, subsize);
1964 break;
1965 case PARTITION_VERT_A:
1966 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
1967 update_partition_context(xd, mi_row, mi_col + hbs, subsize, subsize);
1968 break;
1969 case PARTITION_VERT_B:
1970 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
1971 update_partition_context(xd, mi_row, mi_col + hbs, bsize2, subsize);
1972 break;
1973 default: assert(0 && "Invalid partition type");
1974 }
1975 }
1976#else
1977 // update partition context
1978 if (bsize >= BLOCK_8X8 &&
1979 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
1980 dec_update_partition_context(xd, mi_row, mi_col, subsize, num_8x8_wh);
David Barkerf8935c92016-10-26 14:54:06 +01001981#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07001982
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02001983#if CONFIG_DERING
David Barker9739f362016-11-10 09:29:32 +00001984#if CONFIG_EXT_PARTITION
1985 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128) {
1986 if (cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
1987 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain =
1988 aom_read_literal(r, DERING_REFINEMENT_BITS, ACCT_STR);
1989 } else {
1990 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain =
1991 0;
1992 }
1993 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64) {
1994#else
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02001995 if (bsize == BLOCK_64X64) {
David Barker9739f362016-11-10 09:29:32 +00001996#endif
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02001997 if (cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
1998 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain =
1999 aom_read_literal(r, DERING_REFINEMENT_BITS, ACCT_STR);
2000 } else {
2001 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain =
2002 0;
2003 }
2004 }
2005#endif
2006
Yaowu Xud71be782016-10-14 08:47:03 -07002007#if CONFIG_CLPF
David Barker9739f362016-11-10 09:29:32 +00002008#if CONFIG_EXT_PARTITION
2009 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
2010 cm->clpf_strength_y && cm->clpf_size != CLPF_NOSIZE) {
2011 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2012 mi_col * MI_SIZE / MIN_FB_SIZE;
2013 if (cm->clpf_size == CLPF_128X128) {
2014 cm->clpf_blocks[tl] = aom_read_literal(r, 1, ACCT_STR);
2015 } else if (cm->clpf_size == CLPF_64X64) {
2016 const int tr = tl + 2;
2017 const int bl = tl + 2 * cm->clpf_stride;
2018 const int br = tr + 2 * cm->clpf_stride;
2019 const int size = 64 / MI_SIZE;
2020
2021 // Up to four bits per SB
2022 if (!clpf_all_skip(cm, mi_col, mi_row, size))
2023 cm->clpf_blocks[tl] = aom_read_literal(r, 1, ACCT_STR);
2024
2025 if (mi_col + size < cm->mi_cols &&
2026 !clpf_all_skip(cm, mi_col + size, mi_row, size))
2027 cm->clpf_blocks[tr] = aom_read_literal(r, 1, ACCT_STR);
2028
2029 if (mi_row + size < cm->mi_rows &&
2030 !clpf_all_skip(cm, mi_col, mi_row + size, size))
2031 cm->clpf_blocks[bl] = aom_read_literal(r, 1, ACCT_STR);
2032
2033 if (mi_col + size < cm->mi_cols && mi_row + size < cm->mi_rows &&
2034 !clpf_all_skip(cm, mi_col + size, mi_row + size, size))
2035 cm->clpf_blocks[br] = aom_read_literal(r, 1, ACCT_STR);
2036 } else if (cm->clpf_size == CLPF_32X32) {
2037 int i, j;
2038 const int size = 32 / MI_SIZE;
2039 for (i = 0; i < 4; ++i)
2040 for (j = 0; j < 4; ++j) {
2041 const int index = tl + i * cm->clpf_stride + j;
2042 if (mi_row + i * size < cm->mi_rows &&
2043 mi_col + j * size < cm->mi_cols &&
2044 !clpf_all_skip(cm, mi_col + j * size, mi_row + i * size, size))
2045 cm->clpf_blocks[index] = aom_read_literal(r, 1, ACCT_STR);
2046 }
2047 }
2048 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2049#else
2050 if (bsize == BLOCK_64X64 &&
2051#endif // CONFIG_EXT_PARTITION
2052 cm->clpf_strength_y && cm->clpf_size != CLPF_NOSIZE) {
Yaowu Xud71be782016-10-14 08:47:03 -07002053 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2054 mi_col * MI_SIZE / MIN_FB_SIZE;
2055
2056 if (!((mi_row * MI_SIZE) & 127) && !((mi_col * MI_SIZE) & 127) &&
2057 cm->clpf_size == CLPF_128X128) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002058 cm->clpf_blocks[tl] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002059 } else if (cm->clpf_size == CLPF_64X64 &&
2060 !clpf_all_skip(cm, mi_col, mi_row, 64 / MI_SIZE)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002061 cm->clpf_blocks[tl] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002062 } else if (cm->clpf_size == CLPF_32X32) {
2063 const int tr = tl + 1;
2064 const int bl = tl + cm->clpf_stride;
2065 const int br = tr + cm->clpf_stride;
2066 const int size = 32 / MI_SIZE;
2067
2068 // Up to four bits per SB
2069 if (!clpf_all_skip(cm, mi_col, mi_row, size))
Michael Bebenita6048d052016-08-25 14:40:54 -07002070 cm->clpf_blocks[tl] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002071
2072 if (mi_col + size < cm->mi_cols &&
2073 !clpf_all_skip(cm, mi_col + size, mi_row, size))
Michael Bebenita6048d052016-08-25 14:40:54 -07002074 cm->clpf_blocks[tr] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002075
2076 if (mi_row + size < cm->mi_rows &&
2077 !clpf_all_skip(cm, mi_col, mi_row + size, size))
Michael Bebenita6048d052016-08-25 14:40:54 -07002078 cm->clpf_blocks[bl] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002079
2080 if (mi_col + size < cm->mi_cols && mi_row + size < cm->mi_rows &&
2081 !clpf_all_skip(cm, mi_col + size, mi_row + size, size))
Michael Bebenita6048d052016-08-25 14:40:54 -07002082 cm->clpf_blocks[br] = aom_read_literal(r, 1, ACCT_STR);
Yaowu Xud71be782016-10-14 08:47:03 -07002083 }
2084 }
David Barker9739f362016-11-10 09:29:32 +00002085#endif // CONFIG_CLPF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002086}
2087
2088#if !CONFIG_ANS
2089static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
2090 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002091 struct aom_internal_error_info *error_info,
2092 aom_reader *r, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002093 void *decrypt_state) {
2094 // Validate the calculated partition length. If the buffer
2095 // described by the partition can't be fully read, then restrict
2096 // it to the portion that can be (for EC mode) or throw an error.
2097 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002098 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002099 "Truncated packet or corrupt tile length");
2100
Yaowu Xuf883b422016-08-30 14:01:10 -07002101 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
2102 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002103 "Failed to allocate bool decoder %d", 1);
2104}
2105#else
2106static void setup_token_decoder(const uint8_t *data, const uint8_t *data_end,
2107 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002108 struct aom_internal_error_info *error_info,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002109 struct AnsDecoder *const ans,
Yaowu Xuf883b422016-08-30 14:01:10 -07002110 aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002111 void *decrypt_state) {
2112 (void)decrypt_cb;
2113 (void)decrypt_state;
2114 // Validate the calculated partition length. If the buffer
2115 // described by the partition can't be fully read, then restrict
2116 // it to the portion that can be (for EC mode) or throw an error.
2117 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002118 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002119 "Truncated packet or corrupt tile length");
2120
2121 if (read_size > INT_MAX || ans_read_init(ans, data, (int)read_size))
Yaowu Xuf883b422016-08-30 14:01:10 -07002122 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002123 "Failed to allocate token decoder %d", 1);
2124}
2125#endif
2126
Yushin Cho77bba8d2016-11-04 16:36:56 -07002127#if !CONFIG_PVQ
Yaowu Xuf883b422016-08-30 14:01:10 -07002128static void read_coef_probs_common(av1_coeff_probs_model *coef_probs,
2129 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002130 int i, j, k, l, m;
Thomas9ac55082016-09-23 18:04:17 +01002131#if CONFIG_EC_ADAPT
Thomas Davies09ebbfb2016-10-20 18:28:47 +01002132 const int node_limit = UNCONSTRAINED_NODES - 1;
Thomas9ac55082016-09-23 18:04:17 +01002133#else
2134 const int node_limit = UNCONSTRAINED_NODES;
2135#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002136
Michael Bebenita6048d052016-08-25 14:40:54 -07002137 if (aom_read_bit(r, ACCT_STR))
Yaowu Xuc27fc142016-08-22 16:08:15 -07002138 for (i = 0; i < PLANE_TYPES; ++i)
2139 for (j = 0; j < REF_TYPES; ++j)
2140 for (k = 0; k < COEF_BANDS; ++k)
2141 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
Thomas9ac55082016-09-23 18:04:17 +01002142 for (m = 0; m < node_limit; ++m)
Michael Bebenita6048d052016-08-25 14:40:54 -07002143 av1_diff_update_prob(r, &coef_probs[i][j][k][l][m], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002144}
2145
Yaowu Xuf883b422016-08-30 14:01:10 -07002146static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002147 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2148 TX_SIZE tx_size;
2149 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
2150 read_coef_probs_common(fc->coef_probs[tx_size], r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002151}
Yushin Cho77bba8d2016-11-04 16:36:56 -07002152#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002153
Yaowu Xuf883b422016-08-30 14:01:10 -07002154static void setup_segmentation(AV1_COMMON *const cm,
2155 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002156 struct segmentation *const seg = &cm->seg;
2157 int i, j;
2158
2159 seg->update_map = 0;
2160 seg->update_data = 0;
2161
Yaowu Xuf883b422016-08-30 14:01:10 -07002162 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002163 if (!seg->enabled) return;
2164
2165 // Segmentation map update
2166 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2167 seg->update_map = 1;
2168 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002169 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002170 }
2171 if (seg->update_map) {
2172 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2173 seg->temporal_update = 0;
2174 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002175 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002176 }
2177 }
2178
2179 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07002180 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002181 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002182 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002183
Yaowu Xuf883b422016-08-30 14:01:10 -07002184 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002185
2186 for (i = 0; i < MAX_SEGMENTS; i++) {
2187 for (j = 0; j < SEG_LVL_MAX; j++) {
2188 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002189 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002190 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002191 av1_enable_segfeature(seg, i, j);
2192 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
2193 if (av1_is_segfeature_signed(j))
2194 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002195 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002196 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002197 }
2198 }
2199 }
2200}
2201
2202#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002203static void decode_restoration_mode(AV1_COMMON *cm,
2204 struct aom_read_bit_buffer *rb) {
2205 RestorationInfo *rsi = &cm->rst_info;
2206 if (aom_rb_read_bit(rb)) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002207 if (aom_rb_read_bit(rb))
2208 rsi->frame_restoration_type =
2209 (aom_rb_read_bit(rb) ? RESTORE_WIENER : RESTORE_BILATERAL);
2210 else
2211 rsi->frame_restoration_type = RESTORE_SGRPROJ;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002212 } else {
2213 rsi->frame_restoration_type =
2214 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
2215 }
2216}
2217
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002218static void read_wiener_filter(WienerInfo *wiener_info, aom_reader *rb) {
2219 wiener_info->vfilter[0] =
2220 aom_read_literal(rb, WIENER_FILT_TAP0_BITS, ACCT_STR) +
2221 WIENER_FILT_TAP0_MINV;
2222 wiener_info->vfilter[1] =
2223 aom_read_literal(rb, WIENER_FILT_TAP1_BITS, ACCT_STR) +
2224 WIENER_FILT_TAP1_MINV;
2225 wiener_info->vfilter[2] =
2226 aom_read_literal(rb, WIENER_FILT_TAP2_BITS, ACCT_STR) +
2227 WIENER_FILT_TAP2_MINV;
2228 wiener_info->hfilter[0] =
2229 aom_read_literal(rb, WIENER_FILT_TAP0_BITS, ACCT_STR) +
2230 WIENER_FILT_TAP0_MINV;
2231 wiener_info->hfilter[1] =
2232 aom_read_literal(rb, WIENER_FILT_TAP1_BITS, ACCT_STR) +
2233 WIENER_FILT_TAP1_MINV;
2234 wiener_info->hfilter[2] =
2235 aom_read_literal(rb, WIENER_FILT_TAP2_BITS, ACCT_STR) +
2236 WIENER_FILT_TAP2_MINV;
2237}
2238
2239static void read_sgrproj_filter(SgrprojInfo *sgrproj_info, aom_reader *rb) {
2240 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
2241 sgrproj_info->xqd[0] =
2242 aom_read_literal(rb, SGRPROJ_PRJ_BITS, ACCT_STR) + SGRPROJ_PRJ_MIN0;
2243 sgrproj_info->xqd[1] =
2244 aom_read_literal(rb, SGRPROJ_PRJ_BITS, ACCT_STR) + SGRPROJ_PRJ_MIN1;
2245}
2246
2247static void read_bilateral_filter(const AV1_COMMON *cm,
2248 BilateralInfo *bilateral_info,
2249 aom_reader *rb) {
2250 int s;
2251 for (s = 0; s < BILATERAL_SUBTILES; ++s) {
2252 if (aom_read(rb, RESTORE_NONE_BILATERAL_PROB, ACCT_STR)) {
2253 bilateral_info->level[s] =
2254 aom_read_literal(rb, av1_bilateral_level_bits(cm), ACCT_STR);
2255 } else {
2256 bilateral_info->level[s] = -1;
2257 }
2258 }
2259}
2260
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002261static void decode_restoration(AV1_COMMON *cm, aom_reader *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002262 int i;
2263 RestorationInfo *rsi = &cm->rst_info;
clang-formatbda8d612016-09-19 15:55:46 -07002264 const int ntiles =
2265 av1_get_rest_ntiles(cm->width, cm->height, NULL, NULL, NULL, NULL);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002266 if (rsi->frame_restoration_type != RESTORE_NONE) {
2267 rsi->restoration_type = (RestorationType *)aom_realloc(
2268 rsi->restoration_type, sizeof(*rsi->restoration_type) * ntiles);
2269 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002270 rsi->bilateral_info = (BilateralInfo *)aom_realloc(
2271 rsi->bilateral_info, sizeof(*rsi->bilateral_info) * ntiles);
2272 assert(rsi->bilateral_info != NULL);
2273 rsi->wiener_info = (WienerInfo *)aom_realloc(
2274 rsi->wiener_info, sizeof(*rsi->wiener_info) * ntiles);
2275 assert(rsi->wiener_info != NULL);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002276 rsi->sgrproj_info = (SgrprojInfo *)aom_realloc(
2277 rsi->sgrproj_info, sizeof(*rsi->sgrproj_info) * ntiles);
2278 assert(rsi->sgrproj_info != NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002279 for (i = 0; i < ntiles; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002280 rsi->restoration_type[i] =
2281 aom_read_tree(rb, av1_switchable_restore_tree,
2282 cm->fc->switchable_restore_prob, ACCT_STR);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002283 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002284 rsi->wiener_info[i].level = 1;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002285 read_wiener_filter(&rsi->wiener_info[i], rb);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002286 } else if (rsi->restoration_type[i] == RESTORE_BILATERAL) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002287#if BILATERAL_SUBTILES == 0
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002288 rsi->bilateral_info[i].level[0] =
2289 aom_read_literal(rb, av1_bilateral_level_bits(cm), ACCT_STR);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002290#else
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002291 read_bilateral_filter(cm, &rsi->bilateral_info[i], rb);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002292#endif
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002293 } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
2294 rsi->sgrproj_info[i].level = 1;
2295 read_sgrproj_filter(&rsi->sgrproj_info[i], rb);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002296 }
2297 }
2298 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002299 rsi->wiener_info = (WienerInfo *)aom_realloc(
2300 rsi->wiener_info, sizeof(*rsi->wiener_info) * ntiles);
2301 assert(rsi->wiener_info != NULL);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002302 for (i = 0; i < ntiles; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002303 if (aom_read(rb, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002304 rsi->restoration_type[i] = RESTORE_WIENER;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002305 rsi->wiener_info[i].level = 1;
2306 read_wiener_filter(&rsi->wiener_info[i], rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002307 } else {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002308 rsi->wiener_info[i].level = 0;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002309 rsi->restoration_type[i] = RESTORE_NONE;
2310 }
2311 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002312 } else if (rsi->frame_restoration_type == RESTORE_BILATERAL) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002313 rsi->bilateral_info = (BilateralInfo *)aom_realloc(
2314 rsi->bilateral_info, sizeof(*rsi->bilateral_info) * ntiles);
2315 assert(rsi->bilateral_info != NULL);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002316 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002317 rsi->restoration_type[i] = RESTORE_BILATERAL;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002318 read_bilateral_filter(cm, &rsi->bilateral_info[i], rb);
2319 }
2320 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
2321 rsi->sgrproj_info = (SgrprojInfo *)aom_realloc(
2322 rsi->sgrproj_info, sizeof(*rsi->sgrproj_info) * ntiles);
2323 assert(rsi->sgrproj_info != NULL);
2324 for (i = 0; i < ntiles; ++i) {
2325 if (aom_read(rb, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
2326 rsi->restoration_type[i] = RESTORE_SGRPROJ;
2327 rsi->sgrproj_info[i].level = 1;
2328 read_sgrproj_filter(&rsi->sgrproj_info[i], rb);
2329 } else {
2330 rsi->sgrproj_info[i].level = 0;
2331 rsi->restoration_type[i] = RESTORE_NONE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002332 }
2333 }
2334 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002335 }
2336}
2337#endif // CONFIG_LOOP_RESTORATION
2338
Yaowu Xuf883b422016-08-30 14:01:10 -07002339static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002340 struct loopfilter *lf = &cm->lf;
Yaowu Xuf883b422016-08-30 14:01:10 -07002341 lf->filter_level = aom_rb_read_literal(rb, 6);
2342 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002343
2344 // Read in loop filter deltas applied at the MB level based on mode or ref
2345 // frame.
2346 lf->mode_ref_delta_update = 0;
2347
Yaowu Xuf883b422016-08-30 14:01:10 -07002348 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002349 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002350 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002351 if (lf->mode_ref_delta_update) {
2352 int i;
2353
2354 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002355 if (aom_rb_read_bit(rb))
2356 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002357
2358 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002359 if (aom_rb_read_bit(rb))
2360 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002361 }
2362 }
2363}
2364
2365#if CONFIG_CLPF
Yaowu Xud71be782016-10-14 08:47:03 -07002366static void setup_clpf(AV1Decoder *pbi, struct aom_read_bit_buffer *rb) {
2367 AV1_COMMON *const cm = &pbi->common;
2368 const int width = pbi->cur_buf->buf.y_crop_width;
2369 const int height = pbi->cur_buf->buf.y_crop_height;
2370
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002371 cm->clpf_blocks = 0;
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02002372 cm->clpf_strength_y = aom_rb_read_literal(rb, 2);
2373 cm->clpf_strength_u = aom_rb_read_literal(rb, 2);
2374 cm->clpf_strength_v = aom_rb_read_literal(rb, 2);
2375 if (cm->clpf_strength_y) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002376 cm->clpf_size = aom_rb_read_literal(rb, 2);
Yaowu Xud71be782016-10-14 08:47:03 -07002377 if (cm->clpf_size != CLPF_NOSIZE) {
2378 int size;
2379 cm->clpf_stride =
2380 ((width + MIN_FB_SIZE - 1) & ~(MIN_FB_SIZE - 1)) >> MIN_FB_SIZE_LOG2;
2381 size =
2382 cm->clpf_stride * ((height + MIN_FB_SIZE - 1) & ~(MIN_FB_SIZE - 1)) >>
2383 MIN_FB_SIZE_LOG2;
2384 CHECK_MEM_ERROR(cm, cm->clpf_blocks, aom_malloc(size));
2385 memset(cm->clpf_blocks, -1, size);
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002386 }
2387 }
2388}
2389
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +02002390static int clpf_bit(UNUSED int k, UNUSED int l,
2391 UNUSED const YV12_BUFFER_CONFIG *rec,
2392 UNUSED const YV12_BUFFER_CONFIG *org,
2393 UNUSED const AV1_COMMON *cm, UNUSED int block_size,
2394 UNUSED int w, UNUSED int h, UNUSED unsigned int strength,
Yaowu Xud71be782016-10-14 08:47:03 -07002395 UNUSED unsigned int fb_size_log2, int8_t *bit) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002396 return *bit;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002397}
2398#endif
2399
2400#if CONFIG_DERING
Yaowu Xuf883b422016-08-30 14:01:10 -07002401static void setup_dering(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
2402 cm->dering_level = aom_rb_read_literal(rb, DERING_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002403}
2404#endif // CONFIG_DERING
2405
Yaowu Xuf883b422016-08-30 14:01:10 -07002406static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
2407 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002408}
2409
Yaowu Xuf883b422016-08-30 14:01:10 -07002410static void setup_quantization(AV1_COMMON *const cm,
2411 struct aom_read_bit_buffer *rb) {
2412 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002413 cm->y_dc_delta_q = read_delta_q(rb);
2414 cm->uv_dc_delta_q = read_delta_q(rb);
2415 cm->uv_ac_delta_q = read_delta_q(rb);
2416 cm->dequant_bit_depth = cm->bit_depth;
2417#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002418 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002419 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002420 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
2421 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002422 } else {
2423 cm->min_qmlevel = 0;
2424 cm->max_qmlevel = 0;
2425 }
2426#endif
2427}
2428
Yaowu Xuf883b422016-08-30 14:01:10 -07002429static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002430 // Build y/uv dequant values based on segmentation.
2431 int i = 0;
2432#if CONFIG_AOM_QM
2433 int lossless;
2434 int j = 0;
2435 int qmlevel;
2436 int using_qm = cm->using_qmatrix;
2437 int minqm = cm->min_qmlevel;
2438 int maxqm = cm->max_qmlevel;
2439#endif
2440#if CONFIG_NEW_QUANT
2441 int b;
2442 int dq;
2443#endif // CONFIG_NEW_QUANT
2444 if (cm->seg.enabled) {
2445 for (i = 0; i < MAX_SEGMENTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002446 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002447 cm->y_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002448 av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
2449 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002450 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002451 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002452 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002453 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002454#if CONFIG_AOM_QM
2455 lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
2456 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
2457 // NB: depends on base index so there is only 1 set per frame
2458 // No quant weighting when lossless or signalled not using QM
2459 qmlevel = (lossless || using_qm == 0)
2460 ? NUM_QM_LEVELS - 1
2461 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
2462 for (j = 0; j < TX_SIZES; ++j) {
2463 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
2464 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
2465 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
2466 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
2467 }
2468#endif // CONFIG_AOM_QM
2469#if CONFIG_NEW_QUANT
2470 for (dq = 0; dq < QUANT_PROFILES; dq++) {
2471 for (b = 0; b < COEF_BANDS; ++b) {
Debargha Mukherjee3c42c092016-09-29 09:17:36 -07002472 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
Yaowu Xuf883b422016-08-30 14:01:10 -07002473 cm->y_dequant_nuq[i][dq][b], NULL, dq);
Debargha Mukherjee3c42c092016-09-29 09:17:36 -07002474 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
Yaowu Xuf883b422016-08-30 14:01:10 -07002475 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002476 }
2477 }
2478#endif // CONFIG_NEW_QUANT
2479 }
2480 } else {
2481 const int qindex = cm->base_qindex;
2482 // When segmentation is disabled, only the first value is used. The
2483 // remaining are don't cares.
Yaowu Xuf883b422016-08-30 14:01:10 -07002484 cm->y_dequant[0][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
2485 cm->y_dequant[0][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002486 cm->uv_dequant[0][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002487 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002488 cm->uv_dequant[0][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002489 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002490#if CONFIG_AOM_QM
2491 lossless = qindex == 0 && cm->y_dc_delta_q == 0 && cm->uv_dc_delta_q == 0 &&
2492 cm->uv_ac_delta_q == 0;
2493 // No quant weighting when lossless or signalled not using QM
2494 qmlevel = (lossless || using_qm == 0)
2495 ? NUM_QM_LEVELS - 1
2496 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
2497 for (j = 0; j < TX_SIZES; ++j) {
2498 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
2499 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
2500 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
2501 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
2502 }
2503#endif
2504#if CONFIG_NEW_QUANT
2505 for (dq = 0; dq < QUANT_PROFILES; dq++) {
2506 for (b = 0; b < COEF_BANDS; ++b) {
Debargha Mukherjee3c42c092016-09-29 09:17:36 -07002507 av1_get_dequant_val_nuq(cm->y_dequant[0][b != 0], b,
Yaowu Xuf883b422016-08-30 14:01:10 -07002508 cm->y_dequant_nuq[0][dq][b], NULL, dq);
Debargha Mukherjee3c42c092016-09-29 09:17:36 -07002509 av1_get_dequant_val_nuq(cm->uv_dequant[0][b != 0], b,
Yaowu Xuf883b422016-08-30 14:01:10 -07002510 cm->uv_dequant_nuq[0][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002511 }
2512 }
2513#endif // CONFIG_NEW_QUANT
2514 }
2515}
2516
James Zern7b9407a2016-05-18 23:48:05 -07002517static InterpFilter read_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002518 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07002519 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002520}
2521
Yaowu Xuf883b422016-08-30 14:01:10 -07002522static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002523 cm->render_width = cm->width;
2524 cm->render_height = cm->height;
Yaowu Xuf883b422016-08-30 14:01:10 -07002525 if (aom_rb_read_bit(rb))
2526 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527}
2528
Yaowu Xuf883b422016-08-30 14:01:10 -07002529static void resize_mv_buffer(AV1_COMMON *cm) {
2530 aom_free(cm->cur_frame->mvs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002531 cm->cur_frame->mi_rows = cm->mi_rows;
2532 cm->cur_frame->mi_cols = cm->mi_cols;
2533 CHECK_MEM_ERROR(cm, cm->cur_frame->mvs,
Yaowu Xuf883b422016-08-30 14:01:10 -07002534 (MV_REF *)aom_calloc(cm->mi_rows * cm->mi_cols,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 sizeof(*cm->cur_frame->mvs)));
2536}
2537
Yaowu Xuf883b422016-08-30 14:01:10 -07002538static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002539#if CONFIG_SIZE_LIMIT
2540 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07002541 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002542 "Dimensions of %dx%d beyond allowed size of %dx%d.",
2543 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
2544#endif
2545 if (cm->width != width || cm->height != height) {
2546 const int new_mi_rows =
2547 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
2548 const int new_mi_cols =
2549 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
2550
Yaowu Xuf883b422016-08-30 14:01:10 -07002551 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07002552 // dimensions as well as the overall size.
2553 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002554 if (av1_alloc_context_buffers(cm, width, height))
2555 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002556 "Failed to allocate context buffers");
2557 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002558 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002559 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002560 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002561 cm->width = width;
2562 cm->height = height;
2563 }
2564 if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
2565 cm->mi_cols > cm->cur_frame->mi_cols) {
2566 resize_mv_buffer(cm);
2567 }
2568}
2569
Yaowu Xuf883b422016-08-30 14:01:10 -07002570static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002571 int width, height;
2572 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07002573 av1_read_frame_size(rb, &width, &height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002574 resize_context_buffers(cm, width, height);
2575 setup_render_size(cm, rb);
2576
2577 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002578 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002579 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
2580 cm->subsampling_y,
Yaowu Xuf883b422016-08-30 14:01:10 -07002581#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07002582 cm->use_highbitdepth,
2583#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07002584 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002585 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
2586 pool->cb_priv)) {
2587 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002588 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002589 "Failed to allocate frame buffer");
2590 }
2591 unlock_buffer_pool(pool);
2592
2593 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
2594 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
2595 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
2596 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
2597 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
2598 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
2599 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
2600}
2601
Yaowu Xuf883b422016-08-30 14:01:10 -07002602static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002603 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07002604 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002605 int this_xss, int this_yss) {
2606 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
2607 ref_yss == this_yss;
2608}
2609
Yaowu Xuf883b422016-08-30 14:01:10 -07002610static void setup_frame_size_with_refs(AV1_COMMON *cm,
2611 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002612 int width, height;
2613 int found = 0, i;
2614 int has_valid_ref_frame = 0;
2615 BufferPool *const pool = cm->buffer_pool;
2616 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002617 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002618 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
2619 width = buf->y_crop_width;
2620 height = buf->y_crop_height;
2621 cm->render_width = buf->render_width;
2622 cm->render_height = buf->render_height;
2623 found = 1;
2624 break;
2625 }
2626 }
2627
2628 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002629 av1_read_frame_size(rb, &width, &height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002630 setup_render_size(cm, rb);
2631 }
2632
2633 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07002634 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002635 "Invalid frame size");
2636
2637 // Check to make sure at least one of frames that this frame references
2638 // has valid dimensions.
2639 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2640 RefBuffer *const ref_frame = &cm->frame_refs[i];
2641 has_valid_ref_frame |=
2642 valid_ref_frame_size(ref_frame->buf->y_crop_width,
2643 ref_frame->buf->y_crop_height, width, height);
2644 }
2645 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07002646 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002647 "Referenced frame has invalid size");
2648 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2649 RefBuffer *const ref_frame = &cm->frame_refs[i];
2650 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
2651 ref_frame->buf->subsampling_x,
2652 ref_frame->buf->subsampling_y, cm->bit_depth,
2653 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07002654 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655 "Referenced frame has incompatible color format");
2656 }
2657
2658 resize_context_buffers(cm, width, height);
2659
2660 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002661 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002662 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
2663 cm->subsampling_y,
Yaowu Xuf883b422016-08-30 14:01:10 -07002664#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07002665 cm->use_highbitdepth,
2666#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07002667 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002668 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
2669 pool->cb_priv)) {
2670 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002671 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002672 "Failed to allocate frame buffer");
2673 }
2674 unlock_buffer_pool(pool);
2675
2676 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
2677 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
2678 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
2679 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
2680 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
2681 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
2682 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
2683}
2684
Yaowu Xuf883b422016-08-30 14:01:10 -07002685static void read_tile_info(AV1Decoder *const pbi,
2686 struct aom_read_bit_buffer *const rb) {
2687 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002688#if CONFIG_EXT_TILE
2689// Read the tile width/height
2690#if CONFIG_EXT_PARTITION
2691 if (cm->sb_size == BLOCK_128X128) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002692 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
2693 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002694 } else
2695#endif // CONFIG_EXT_PARTITION
2696 {
Yaowu Xuf883b422016-08-30 14:01:10 -07002697 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
2698 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002699 }
2700
2701 cm->tile_width <<= cm->mib_size_log2;
2702 cm->tile_height <<= cm->mib_size_log2;
2703
Yaowu Xuf883b422016-08-30 14:01:10 -07002704 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
2705 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002706
2707 // Get the number of tiles
2708 cm->tile_cols = 1;
2709 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
2710
2711 cm->tile_rows = 1;
2712 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
2713
2714 if (cm->tile_cols * cm->tile_rows > 1) {
2715 // Read the number of bytes used to store tile size
Yaowu Xuf883b422016-08-30 14:01:10 -07002716 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
2717 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002718 }
2719#else
2720 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07002721 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002722
2723 // columns
2724 max_ones = max_log2_tile_cols - min_log2_tile_cols;
2725 cm->log2_tile_cols = min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07002726 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002727
2728 if (cm->log2_tile_cols > 6)
Yaowu Xuf883b422016-08-30 14:01:10 -07002729 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002730 "Invalid number of tile columns");
2731
2732 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07002733 cm->log2_tile_rows = aom_rb_read_bit(rb);
2734 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002735
2736 cm->tile_cols = 1 << cm->log2_tile_cols;
2737 cm->tile_rows = 1 << cm->log2_tile_rows;
2738
2739 cm->tile_width = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
2740 cm->tile_width >>= cm->log2_tile_cols;
2741 cm->tile_height = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
2742 cm->tile_height >>= cm->log2_tile_rows;
2743
2744 // round to integer multiples of superblock size
2745 cm->tile_width = ALIGN_POWER_OF_TWO(cm->tile_width, MAX_MIB_SIZE_LOG2);
2746 cm->tile_height = ALIGN_POWER_OF_TWO(cm->tile_height, MAX_MIB_SIZE_LOG2);
2747
Thomas Davies4974e522016-11-07 17:44:05 +00002748// tile size magnitude
2749#if !CONFIG_TILE_GROUPS
2750 if (cm->tile_rows > 1 || cm->tile_cols > 1)
2751#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002752 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002753#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00002754
Thomas Davies80188d12016-10-26 16:08:35 -07002755#if CONFIG_TILE_GROUPS
2756 // Store an index to the location of the tile group information
2757 pbi->tg_size_bit_offset = rb->bit_offset;
2758 pbi->tg_size = 1 << (cm->log2_tile_rows + cm->log2_tile_cols);
2759 if (cm->log2_tile_rows + cm->log2_tile_cols > 0) {
2760 pbi->tg_start =
2761 aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
2762 pbi->tg_size =
2763 1 + aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
2764 }
2765#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002766}
2767
2768static int mem_get_varsize(const uint8_t *src, const int sz) {
2769 switch (sz) {
2770 case 1: return src[0];
2771 case 2: return mem_get_le16(src);
2772 case 3: return mem_get_le24(src);
2773 case 4: return mem_get_le32(src);
2774 default: assert("Invalid size" && 0); return -1;
2775 }
2776}
2777
2778#if CONFIG_EXT_TILE
2779// Reads the next tile returning its size and adjusting '*data' accordingly
2780// based on 'is_last'.
2781static void get_tile_buffer(const uint8_t *const data_end,
Yaowu Xuf883b422016-08-30 14:01:10 -07002782 struct aom_internal_error_info *error_info,
2783 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002784 void *decrypt_state,
2785 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
2786 int tile_size_bytes, int col, int row) {
2787 size_t size;
2788
2789 size_t copy_size = 0;
2790 const uint8_t *copy_data = NULL;
2791
2792 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002793 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002794 "Truncated packet or corrupt tile length");
2795 if (decrypt_cb) {
2796 uint8_t be_data[4];
2797 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2798
2799 // Only read number of bytes in cm->tile_size_bytes.
2800 size = mem_get_varsize(be_data, tile_size_bytes);
2801 } else {
2802 size = mem_get_varsize(*data, tile_size_bytes);
2803 }
2804
2805 // The top bit indicates copy mode
2806 if ((size >> (tile_size_bytes * 8 - 1)) == 1) {
2807 // The remaining bits in the top byte signal the row offset
2808 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
2809
2810 // Currently, only use tiles in same column as reference tiles.
2811 copy_data = tile_buffers[row - offset][col].data;
2812 copy_size = tile_buffers[row - offset][col].size;
2813 size = 0;
2814 }
2815
2816 *data += tile_size_bytes;
2817
2818 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002819 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002820 "Truncated packet or corrupt tile size");
2821
2822 if (size > 0) {
2823 tile_buffers[row][col].data = *data;
2824 tile_buffers[row][col].size = size;
2825 } else {
2826 tile_buffers[row][col].data = copy_data;
2827 tile_buffers[row][col].size = copy_size;
2828 }
2829
2830 *data += size;
2831
2832 tile_buffers[row][col].raw_data_end = *data;
2833}
2834
2835static void get_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07002836 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002837 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002838 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002839 const int tile_cols = cm->tile_cols;
2840 const int tile_rows = cm->tile_rows;
2841 const int have_tiles = tile_cols * tile_rows > 1;
2842
2843 if (!have_tiles) {
2844 const uint32_t tile_size = data_end - data;
2845 tile_buffers[0][0].data = data;
2846 tile_buffers[0][0].size = tile_size;
2847 tile_buffers[0][0].raw_data_end = NULL;
2848 } else {
2849 // We locate only the tile buffers that are required, which are the ones
2850 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
2851 // need the last (bottom right) tile buffer, as we need to know where the
2852 // end of the compressed frame buffer is for proper superframe decoding.
2853
2854 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
2855 const uint8_t *const data_start = data;
2856
Yaowu Xuf883b422016-08-30 14:01:10 -07002857 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002858 const int single_row = pbi->dec_tile_row >= 0;
2859 const int tile_rows_start = single_row ? dec_tile_row : 0;
2860 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07002861 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002862 const int single_col = pbi->dec_tile_col >= 0;
2863 const int tile_cols_start = single_col ? dec_tile_col : 0;
2864 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2865
2866 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
2867 const int tile_size_bytes = pbi->tile_size_bytes;
2868
2869 size_t tile_col_size;
2870 int r, c;
2871
2872 // Read tile column sizes for all columns (we need the last tile buffer)
2873 for (c = 0; c < tile_cols; ++c) {
2874 const int is_last = c == tile_cols - 1;
2875 if (!is_last) {
2876 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
2877 data += tile_col_size_bytes;
2878 tile_col_data_end[c] = data + tile_col_size;
2879 } else {
2880 tile_col_size = data_end - data;
2881 tile_col_data_end[c] = data_end;
2882 }
2883 data += tile_col_size;
2884 }
2885
2886 data = data_start;
2887
2888 // Read the required tile sizes.
2889 for (c = tile_cols_start; c < tile_cols_end; ++c) {
2890 const int is_last = c == tile_cols - 1;
2891
2892 if (c > 0) data = tile_col_data_end[c - 1];
2893
2894 if (!is_last) data += tile_col_size_bytes;
2895
2896 // Get the whole of the last column, otherwise stop at the required tile.
2897 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
2898 tile_buffers[r][c].col = c;
2899
2900 get_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2901 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2902 tile_size_bytes, c, r);
2903 }
2904 }
2905
2906 // If we have not read the last column, then read it to get the last tile.
2907 if (tile_cols_end != tile_cols) {
2908 c = tile_cols - 1;
2909
2910 data = tile_col_data_end[c - 1];
2911
2912 for (r = 0; r < tile_rows; ++r) {
2913 tile_buffers[r][c].col = c;
2914
2915 get_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2916 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2917 tile_size_bytes, c, r);
2918 }
2919 }
2920 }
2921}
2922#else
2923// Reads the next tile returning its size and adjusting '*data' accordingly
2924// based on 'is_last'.
2925static void get_tile_buffer(const uint8_t *const data_end,
2926 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07002927 struct aom_internal_error_info *error_info,
2928 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002929 void *decrypt_state, TileBufferDec *const buf) {
2930 size_t size;
2931
2932 if (!is_last) {
2933 if (!read_is_valid(*data, 4, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002934 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002935 "Truncated packet or corrupt tile length");
2936
2937 if (decrypt_cb) {
2938 uint8_t be_data[4];
2939 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2940 size = mem_get_varsize(be_data, tile_size_bytes);
2941 } else {
2942 size = mem_get_varsize(*data, tile_size_bytes);
2943 }
2944 *data += tile_size_bytes;
2945
2946 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002947 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002948 "Truncated packet or corrupt tile size");
2949 } else {
2950 size = data_end - *data;
2951 }
2952
2953 buf->data = *data;
2954 buf->size = size;
2955
2956 *data += size;
2957}
2958
2959static void get_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07002960 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002961 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002962 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07002963#if CONFIG_TILE_GROUPS
2964 int r, c;
2965 const int tile_cols = cm->tile_cols;
2966 const int tile_rows = cm->tile_rows;
2967 int tc = 0;
2968 int first_tile_in_tg = 0;
2969 int hdr_offset;
2970 struct aom_read_bit_buffer rb_tg_hdr;
2971 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
2972 const int num_tiles = tile_rows * tile_cols;
2973 const int num_bits = OD_ILOG(num_tiles) - 1;
2974 const int hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
2975 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
2976
2977 for (r = 0; r < tile_rows; ++r) {
2978 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07002979 TileBufferDec *const buf = &tile_buffers[r][c];
2980 hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
2981
2982 buf->col = c;
2983 if (hdr_offset) {
2984 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
2985 rb_tg_hdr.bit_offset = tg_size_bit_offset;
2986 if (num_tiles) {
2987 pbi->tg_start = aom_rb_read_literal(&rb_tg_hdr, num_bits);
2988 pbi->tg_size = 1 + aom_rb_read_literal(&rb_tg_hdr, num_bits);
2989 }
2990 }
2991 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
2992 data += hdr_offset;
Thomas Davies8fe64a32016-10-04 13:19:31 +01002993 get_tile_buffer(data_end, pbi->tile_size_bytes, 0, &pbi->common.error,
2994 &data, pbi->decrypt_cb, pbi->decrypt_state, buf);
Thomas Davies80188d12016-10-26 16:08:35 -07002995 }
2996 }
2997#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002998 int r, c;
2999 const int tile_cols = cm->tile_cols;
3000 const int tile_rows = cm->tile_rows;
3001
3002 for (r = 0; r < tile_rows; ++r) {
3003 for (c = 0; c < tile_cols; ++c) {
3004 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
3005 TileBufferDec *const buf = &tile_buffers[r][c];
3006 buf->col = c;
3007 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last, &cm->error,
3008 &data, pbi->decrypt_cb, pbi->decrypt_state, buf);
3009 }
3010 }
Thomas Davies80188d12016-10-26 16:08:35 -07003011#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003012}
3013#endif // CONFIG_EXT_TILE
3014
Yushin Cho77bba8d2016-11-04 16:36:56 -07003015#if CONFIG_PVQ
3016static void daala_dec_init(daala_dec_ctx *daala_dec, od_ec_dec *ec) {
3017 daala_dec->ec = ec;
3018 od_adapt_ctx_reset(&daala_dec->state.adapt, 0);
3019
3020 daala_dec->qm = OD_FLAT_QM;
3021
3022 od_init_qm(daala_dec->state.qm, daala_dec->state.qm_inv,
3023 daala_dec->qm == OD_HVS_QM ? OD_QM8_Q4_HVS : OD_QM8_Q4_FLAT);
3024}
3025#endif
3026
Yaowu Xuf883b422016-08-30 14:01:10 -07003027static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003028 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003029 AV1_COMMON *const cm = &pbi->common;
3030 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031 const int tile_cols = cm->tile_cols;
3032 const int tile_rows = cm->tile_rows;
3033 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07003034 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003035#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003036 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003037 const int single_row = pbi->dec_tile_row >= 0;
3038 const int tile_rows_start = single_row ? dec_tile_row : 0;
3039 const int tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003040 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003041 const int single_col = pbi->dec_tile_col >= 0;
3042 const int tile_cols_start = single_col ? dec_tile_col : 0;
3043 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3044 const int inv_col_order = pbi->inv_tile_order && !single_col;
3045 const int inv_row_order = pbi->inv_tile_order && !single_row;
3046#else
3047 const int tile_rows_start = 0;
3048 const int tile_rows_end = tile_rows;
3049 const int tile_cols_start = 0;
3050 const int tile_cols_end = tile_cols;
3051 const int inv_col_order = pbi->inv_tile_order;
3052 const int inv_row_order = pbi->inv_tile_order;
3053#endif // CONFIG_EXT_TILE
3054 int tile_row, tile_col;
3055
3056#if CONFIG_ENTROPY
3057 cm->do_subframe_update = n_tiles == 1;
3058#endif // CONFIG_ENTROPY
3059
3060 if (cm->lf.filter_level && !cm->skip_loop_filter &&
3061 pbi->lf_worker.data1 == NULL) {
3062 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07003063 aom_memalign(32, sizeof(LFWorkerData)));
3064 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003065 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003066 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003067 "Loop filter thread creation failed");
3068 }
3069 }
3070
3071 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3072 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3073 // Be sure to sync as we might be resuming after a failed frame decode.
3074 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003075 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
3076 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003077 }
3078
3079 assert(tile_rows <= MAX_TILE_ROWS);
3080 assert(tile_cols <= MAX_TILE_COLS);
3081
3082 get_tile_buffers(pbi, data, data_end, tile_buffers);
3083
3084 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003085 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003086 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003087 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003088 pbi->allocated_tiles = n_tiles;
3089 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003090#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003091 if (pbi->acct_enabled) {
3092 aom_accounting_reset(&pbi->accounting);
3093 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003094#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003095 // Load all tile information into tile_data.
3096 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3097 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3098 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
3099 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
3100
3101 td->cm = cm;
3102 td->xd = pbi->mb;
3103 td->xd.corrupted = 0;
3104 td->xd.counts =
3105 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
3106 ? &cm->counts
3107 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07003108 av1_zero(td->dqcoeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003109#if CONFIG_PVQ
Yaowu Xud6ea71c2016-11-07 10:24:14 -08003110 av1_zero(td->pvq_ref_coeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003111#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003112 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003113#if !CONFIG_ANS
3114 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
3115 &td->bit_reader, pbi->decrypt_cb, pbi->decrypt_state);
3116#else
3117 setup_token_decoder(buf->data, data_end, buf->size, &cm->error,
3118 &td->bit_reader, pbi->decrypt_cb, pbi->decrypt_state);
3119#endif
Michael Bebenita6048d052016-08-25 14:40:54 -07003120#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003121 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003122 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003123 } else {
David Barkerd971f402016-10-25 13:52:07 +01003124 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003125 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003126#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003127 av1_init_macroblockd(cm, &td->xd,
3128#if CONFIG_PVQ
3129 td->pvq_ref_coeff,
3130#endif
3131 td->dqcoeff);
3132#if CONFIG_PVQ
3133 daala_dec_init(&td->xd.daala_dec, &td->bit_reader.ec);
3134#endif
Urvang Joshib100db72016-10-12 16:28:56 -07003135#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003136 td->xd.plane[0].color_index_map = td->color_index_map[0];
3137 td->xd.plane[1].color_index_map = td->color_index_map[1];
Urvang Joshib100db72016-10-12 16:28:56 -07003138#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003139 }
3140 }
3141
3142 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3143 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
3144 int mi_row = 0;
3145 TileInfo tile_info;
3146
Yaowu Xuf883b422016-08-30 14:01:10 -07003147 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003148
3149 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3150 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
3151 TileData *const td = pbi->tile_data + tile_cols * row + col;
Michael Bebenita6048d052016-08-25 14:40:54 -07003152#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003153 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003154 td->bit_reader.accounting->last_tell_frac =
3155 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003156 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003157#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003158
Yaowu Xuf883b422016-08-30 14:01:10 -07003159 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160
Yaowu Xuf883b422016-08-30 14:01:10 -07003161 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003162
3163 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
3164 mi_row += cm->mib_size) {
3165 int mi_col;
3166
Yaowu Xuf883b422016-08-30 14:01:10 -07003167 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003168
3169 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
3170 mi_col += cm->mib_size) {
3171 decode_partition(pbi, &td->xd,
3172#if CONFIG_SUPERTX
3173 0,
3174#endif // CONFIG_SUPERTX
3175 mi_row, mi_col, &td->bit_reader, cm->sb_size,
3176 b_width_log2_lookup[cm->sb_size]);
3177 }
3178 pbi->mb.corrupted |= td->xd.corrupted;
3179 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07003180 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003181 "Failed to decode tile data");
3182#if CONFIG_ENTROPY
3183 if (cm->do_subframe_update &&
3184 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
3185 if ((mi_row + MI_SIZE) %
3186 (MI_SIZE *
Yaowu Xuf883b422016-08-30 14:01:10 -07003187 AOMMAX(cm->mi_rows / MI_SIZE / COEF_PROBS_BUFS, 1)) ==
Yaowu Xuc27fc142016-08-22 16:08:15 -07003188 0 &&
3189 mi_row + MI_SIZE < cm->mi_rows &&
3190 cm->coef_probs_update_idx < COEF_PROBS_BUFS - 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003191 av1_partial_adapt_probs(cm, mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003192 ++cm->coef_probs_update_idx;
3193 }
3194 }
3195#endif // CONFIG_ENTROPY
3196 }
3197 }
3198
3199 assert(mi_row > 0);
3200
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003201// when Parallel deblocking is enabled, deblocking should not
3202// be interleaved with decoding. Instead, deblocking should be done
3203// after the entire frame is decoded.
3204#if !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003205 // Loopfilter one tile row.
3206 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3207 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003208 const int lf_start = AOMMAX(0, tile_info.mi_row_start - cm->mib_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003209 const int lf_end = tile_info.mi_row_end - cm->mib_size;
3210
3211 // Delay the loopfilter if the first tile row is only
3212 // a single superblock high.
3213 if (lf_end <= 0) continue;
3214
3215 // Decoding has completed. Finish up the loop filter in this thread.
3216 if (tile_info.mi_row_end >= cm->mi_rows) continue;
3217
3218 winterface->sync(&pbi->lf_worker);
3219 lf_data->start = lf_start;
3220 lf_data->stop = lf_end;
3221 if (pbi->max_threads > 1) {
3222 winterface->launch(&pbi->lf_worker);
3223 } else {
3224 winterface->execute(&pbi->lf_worker);
3225 }
3226 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003227#endif // !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003228
3229 // After loopfiltering, the last 7 row pixels in each superblock row may
3230 // still be changed by the longest loopfilter of the next superblock row.
3231 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003232 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003233 }
3234
3235#if CONFIG_VAR_TX
3236 // Loopfilter the whole frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003237 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
3238 cm->lf.filter_level, 0, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003239#else
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003240#if CONFIG_PARALLEL_DEBLOCKING
3241 // Loopfilter all rows in the frame in the frame.
3242 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3243 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3244 winterface->sync(&pbi->lf_worker);
3245 lf_data->start = 0;
3246 lf_data->stop = cm->mi_rows;
3247 winterface->execute(&pbi->lf_worker);
3248 }
3249#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003250 // Loopfilter remaining rows in the frame.
3251 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3252 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3253 winterface->sync(&pbi->lf_worker);
3254 lf_data->start = lf_data->stop;
3255 lf_data->stop = cm->mi_rows;
3256 winterface->execute(&pbi->lf_worker);
3257 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003258#endif // CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003259#endif // CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003260 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003261 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003262
3263#if CONFIG_EXT_TILE
3264 if (n_tiles == 1) {
3265#if CONFIG_ANS
3266 return data_end;
3267#else
3268 // Find the end of the single tile buffer
Yaowu Xuf883b422016-08-30 14:01:10 -07003269 return aom_reader_find_end(&pbi->tile_data->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003270#endif // CONFIG_ANS
3271 } else {
3272 // Return the end of the last tile buffer
3273 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
3274 }
3275#else
3276#if CONFIG_ANS
3277 return data_end;
3278#else
3279 {
3280 // Get last tile data.
3281 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003282 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003283 }
3284#endif // CONFIG_ANS
3285#endif // CONFIG_EXT_TILE
3286}
3287
3288static int tile_worker_hook(TileWorkerData *const tile_data,
3289 const TileInfo *const tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003290 AV1Decoder *const pbi = tile_data->pbi;
3291 const AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003292 int mi_row, mi_col;
3293
3294 if (setjmp(tile_data->error_info.jmp)) {
3295 tile_data->error_info.setjmp = 0;
3296 tile_data->xd.corrupted = 1;
3297 return 0;
3298 }
3299
3300 tile_data->error_info.setjmp = 1;
3301 tile_data->xd.error_info = &tile_data->error_info;
3302
Yaowu Xuf883b422016-08-30 14:01:10 -07003303 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003304
3305 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
3306 mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003307 av1_zero_left_context(&tile_data->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003308
3309 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
3310 mi_col += cm->mib_size) {
3311 decode_partition(pbi, &tile_data->xd,
3312#if CONFIG_SUPERTX
3313 0,
3314#endif
3315 mi_row, mi_col, &tile_data->bit_reader, cm->sb_size,
3316 b_width_log2_lookup[cm->sb_size]);
3317 }
3318 }
3319 return !tile_data->xd.corrupted;
3320}
3321
3322// sorts in descending order
3323static int compare_tile_buffers(const void *a, const void *b) {
3324 const TileBufferDec *const buf1 = (const TileBufferDec *)a;
3325 const TileBufferDec *const buf2 = (const TileBufferDec *)b;
3326 return (int)(buf2->size - buf1->size);
3327}
3328
Yaowu Xuf883b422016-08-30 14:01:10 -07003329static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003330 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003331 AV1_COMMON *const cm = &pbi->common;
3332 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003333 const int tile_cols = cm->tile_cols;
3334 const int tile_rows = cm->tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003335 const int num_workers = AOMMIN(pbi->max_threads & ~1, tile_cols);
clang-format67948d32016-09-07 22:40:40 -07003336 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003337#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003338 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003339 const int single_row = pbi->dec_tile_row >= 0;
3340 const int tile_rows_start = single_row ? dec_tile_row : 0;
3341 const int tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003342 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003343 const int single_col = pbi->dec_tile_col >= 0;
3344 const int tile_cols_start = single_col ? dec_tile_col : 0;
3345 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3346#else
3347 const int tile_rows_start = 0;
3348 const int tile_rows_end = tile_rows;
3349 const int tile_cols_start = 0;
3350 const int tile_cols_end = tile_cols;
3351#endif // CONFIG_EXT_TILE
3352 int tile_row, tile_col;
3353 int i;
3354
3355#if !(CONFIG_ANS || CONFIG_EXT_TILE)
3356 int final_worker = -1;
3357#endif // !(CONFIG_ANS || CONFIG_EXT_TILE)
3358
3359 assert(tile_rows <= MAX_TILE_ROWS);
3360 assert(tile_cols <= MAX_TILE_COLS);
3361
3362 assert(tile_cols * tile_rows > 1);
3363
3364#if CONFIG_ANS
3365 // TODO(any): This might just work now. Needs to be tested.
3366 abort(); // FIXME: Tile parsing broken
3367#endif // CONFIG_ANS
3368
3369 // TODO(jzern): See if we can remove the restriction of passing in max
3370 // threads to the decoder.
3371 if (pbi->num_tile_workers == 0) {
3372 const int num_threads = pbi->max_threads & ~1;
3373 CHECK_MEM_ERROR(cm, pbi->tile_workers,
Yaowu Xuf883b422016-08-30 14:01:10 -07003374 aom_malloc(num_threads * sizeof(*pbi->tile_workers)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003375 // Ensure tile data offsets will be properly aligned. This may fail on
3376 // platforms without DECLARE_ALIGNED().
3377 assert((sizeof(*pbi->tile_worker_data) % 16) == 0);
3378 CHECK_MEM_ERROR(
3379 cm, pbi->tile_worker_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003380 aom_memalign(32, num_threads * sizeof(*pbi->tile_worker_data)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003381 CHECK_MEM_ERROR(cm, pbi->tile_worker_info,
Yaowu Xuf883b422016-08-30 14:01:10 -07003382 aom_malloc(num_threads * sizeof(*pbi->tile_worker_info)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003383 for (i = 0; i < num_threads; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003384 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003385 ++pbi->num_tile_workers;
3386
3387 winterface->init(worker);
3388 if (i < num_threads - 1 && !winterface->reset(worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003389 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003390 "Tile decoder thread creation failed");
3391 }
3392 }
3393 }
3394
3395 // Reset tile decoding hook
3396 for (i = 0; i < num_workers; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003397 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003398 winterface->sync(worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003399 worker->hook = (AVxWorkerHook)tile_worker_hook;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003400 worker->data1 = &pbi->tile_worker_data[i];
3401 worker->data2 = &pbi->tile_worker_info[i];
3402 }
3403
3404 // Initialize thread frame counts.
3405 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
3406 for (i = 0; i < num_workers; ++i) {
3407 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003408 av1_zero(twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003409 }
3410 }
3411
3412 // Load tile data into tile_buffers
3413 get_tile_buffers(pbi, data, data_end, tile_buffers);
3414
3415 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3416 // Sort the buffers in this tile row based on size in descending order.
3417 qsort(&tile_buffers[tile_row][tile_cols_start],
3418 tile_cols_end - tile_cols_start, sizeof(tile_buffers[0][0]),
3419 compare_tile_buffers);
3420
3421 // Rearrange the tile buffers in this tile row such that per-tile group
3422 // the largest, and presumably the most difficult tile will be decoded in
3423 // the main thread. This should help minimize the number of instances
3424 // where the main thread is waiting for a worker to complete.
3425 {
3426 int group_start;
3427 for (group_start = tile_cols_start; group_start < tile_cols_end;
3428 group_start += num_workers) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003429 const int group_end = AOMMIN(group_start + num_workers, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003430 const TileBufferDec largest = tile_buffers[tile_row][group_start];
3431 memmove(&tile_buffers[tile_row][group_start],
3432 &tile_buffers[tile_row][group_start + 1],
3433 (group_end - group_start - 1) * sizeof(tile_buffers[0][0]));
3434 tile_buffers[tile_row][group_end - 1] = largest;
3435 }
3436 }
3437
3438 for (tile_col = tile_cols_start; tile_col < tile_cols_end;) {
3439 // Launch workers for individual columns
3440 for (i = 0; i < num_workers && tile_col < tile_cols_end;
3441 ++i, ++tile_col) {
3442 TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuf883b422016-08-30 14:01:10 -07003443 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003444 TileWorkerData *const twd = (TileWorkerData *)worker->data1;
3445 TileInfo *const tile_info = (TileInfo *)worker->data2;
3446
3447 twd->pbi = pbi;
3448 twd->xd = pbi->mb;
3449 twd->xd.corrupted = 0;
3450 twd->xd.counts =
3451 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
3452 ? &twd->counts
3453 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07003454 av1_zero(twd->dqcoeff);
3455 av1_tile_init(tile_info, cm, tile_row, buf->col);
3456 av1_tile_init(&twd->xd.tile, cm, tile_row, buf->col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003457#if !CONFIG_ANS
3458 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
3459 &twd->bit_reader, pbi->decrypt_cb,
3460 pbi->decrypt_state);
3461#else
3462 setup_token_decoder(buf->data, data_end, buf->size, &cm->error,
3463 &twd->bit_reader, pbi->decrypt_cb,
3464 pbi->decrypt_state);
3465#endif // CONFIG_ANS
Yushin Cho77bba8d2016-11-04 16:36:56 -07003466 av1_init_macroblockd(cm, &twd->xd,
3467#if CONFIG_PVQ
3468 twd->pvq_ref_coeff,
3469#endif
3470 twd->dqcoeff);
3471#if CONFIG_PVQ
Yaowu Xud6ea71c2016-11-07 10:24:14 -08003472 daala_dec_init(&twd->xd.daala_dec, &twd->bit_reader.ec);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003473#endif
Urvang Joshib100db72016-10-12 16:28:56 -07003474#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003475 twd->xd.plane[0].color_index_map = twd->color_index_map[0];
3476 twd->xd.plane[1].color_index_map = twd->color_index_map[1];
Urvang Joshib100db72016-10-12 16:28:56 -07003477#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003478
3479 worker->had_error = 0;
3480 if (i == num_workers - 1 || tile_col == tile_cols_end - 1) {
3481 winterface->execute(worker);
3482 } else {
3483 winterface->launch(worker);
3484 }
3485
3486#if !(CONFIG_ANS || CONFIG_EXT_TILE)
3487 if (tile_row == tile_rows - 1 && buf->col == tile_cols - 1) {
3488 final_worker = i;
3489 }
3490#endif // !(CONFIG_ANS || CONFIG_EXT_TILE)
3491 }
3492
3493 // Sync all workers
3494 for (; i > 0; --i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003495 AVxWorker *const worker = &pbi->tile_workers[i - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003496 // TODO(jzern): The tile may have specific error data associated with
Yaowu Xuf883b422016-08-30 14:01:10 -07003497 // its aom_internal_error_info which could be propagated to the main
Yaowu Xuc27fc142016-08-22 16:08:15 -07003498 // info in cm. Additionally once the threads have been synced and an
3499 // error is detected, there's no point in continuing to decode tiles.
3500 pbi->mb.corrupted |= !winterface->sync(worker);
3501 }
3502 }
3503 }
3504
3505 // Accumulate thread frame counts.
3506 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
3507 for (i = 0; i < num_workers; ++i) {
3508 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003509 av1_accumulate_frame_counts(cm, &twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003510 }
3511 }
3512
3513#if CONFIG_EXT_TILE
3514 // Return the end of the last tile buffer
3515 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
3516#else
3517#if CONFIG_ANS
3518 return data_end;
3519#else
3520 assert(final_worker != -1);
3521 {
3522 TileWorkerData *const twd =
3523 (TileWorkerData *)pbi->tile_workers[final_worker].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003524 return aom_reader_find_end(&twd->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003525 }
3526#endif // CONFIG_ANS
3527#endif // CONFIG_EXT_TILE
3528}
3529
3530static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003531 AV1_COMMON *const cm = (AV1_COMMON *)data;
3532 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533}
3534
Yaowu Xuf883b422016-08-30 14:01:10 -07003535static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
3536 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003537 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003538 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
3539#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003540 cm->use_highbitdepth = 1;
3541#endif
3542 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003543 cm->bit_depth = AOM_BITS_8;
3544#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003545 cm->use_highbitdepth = 0;
3546#endif
3547 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003548 cm->color_space = aom_rb_read_literal(rb, 3);
3549 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003550 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07003551 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003552 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003553 cm->subsampling_x = aom_rb_read_bit(rb);
3554 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003555 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07003556 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003557 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07003558 if (aom_rb_read_bit(rb))
3559 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003560 "Reserved bit set");
3561 } else {
3562 cm->subsampling_y = cm->subsampling_x = 1;
3563 }
3564 } else {
3565 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3566 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
3567 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
3568 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003569 if (aom_rb_read_bit(rb))
3570 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003571 "Reserved bit set");
3572 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003573 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003574 "4:4:4 color not supported in profile 0 or 2");
3575 }
3576 }
3577}
3578
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003579#if CONFIG_REFERENCE_BUFFER
3580void read_sequence_header(SequenceHeader *seq_params) {
3581 /* Placeholder for actually reading from the bitstream */
3582 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
3583 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
3584 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
3585}
3586#endif
3587
Yaowu Xuf883b422016-08-30 14:01:10 -07003588static size_t read_uncompressed_header(AV1Decoder *pbi,
3589 struct aom_read_bit_buffer *rb) {
3590 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003591 MACROBLOCKD *const xd = &pbi->mb;
3592 BufferPool *const pool = cm->buffer_pool;
3593 RefCntBuffer *const frame_bufs = pool->frame_bufs;
3594 int i, mask, ref_index = 0;
3595 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003596
3597#if CONFIG_REFERENCE_BUFFER
3598 /* TODO: Move outside frame loop or inside key-frame branch */
3599 read_sequence_header(&pbi->seq_params);
3600#endif
3601
Yaowu Xuc27fc142016-08-22 16:08:15 -07003602 cm->last_frame_type = cm->frame_type;
3603 cm->last_intra_only = cm->intra_only;
3604
3605#if CONFIG_EXT_REFS
3606 // NOTE: By default all coded frames to be used as a reference
3607 cm->is_reference_frame = 1;
3608#endif // CONFIG_EXT_REFS
3609
Yaowu Xuf883b422016-08-30 14:01:10 -07003610 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
3611 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003612 "Invalid frame marker");
3613
Yaowu Xuf883b422016-08-30 14:01:10 -07003614 cm->profile = av1_read_profile(rb);
3615#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003616 if (cm->profile >= MAX_PROFILES)
Yaowu Xuf883b422016-08-30 14:01:10 -07003617 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003618 "Unsupported bitstream profile");
3619#else
3620 if (cm->profile >= PROFILE_2)
Yaowu Xuf883b422016-08-30 14:01:10 -07003621 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003622 "Unsupported bitstream profile");
3623#endif
3624
Yaowu Xuf883b422016-08-30 14:01:10 -07003625 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003626
3627 if (cm->show_existing_frame) {
3628 // Show an existing frame directly.
Yaowu Xuf883b422016-08-30 14:01:10 -07003629 const int frame_to_show = cm->ref_frame_map[aom_rb_read_literal(rb, 3)];
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003630#if CONFIG_REFERENCE_BUFFER
3631 if (pbi->seq_params.frame_id_numbers_present_flag) {
3632 int FidLen = pbi->seq_params.frame_id_length_minus7 + 7;
3633 int display_frame_id = aom_rb_read_literal(rb, FidLen);
3634 /* Compare display_frame_id with ref_frame_id and check valid for
3635 * referencing */
3636 if (display_frame_id != cm->ref_frame_id[frame_to_show] ||
3637 cm->valid_for_referencing[frame_to_show] == 0)
3638 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3639 "Reference buffer frame ID mismatch");
3640 }
3641#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003642 lock_buffer_pool(pool);
3643 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
3644 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003645 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003646 "Buffer %d does not contain a decoded frame",
3647 frame_to_show);
3648 }
3649 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3650 unlock_buffer_pool(pool);
3651
3652 cm->lf.filter_level = 0;
3653 cm->show_frame = 1;
3654 pbi->refresh_frame_flags = 0;
3655
3656 if (cm->frame_parallel_decode) {
3657 for (i = 0; i < REF_FRAMES; ++i)
3658 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
3659 }
3660
3661 return 0;
3662 }
3663
Yaowu Xuf883b422016-08-30 14:01:10 -07003664 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
3665 cm->show_frame = aom_rb_read_bit(rb);
3666 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003667#if CONFIG_REFERENCE_BUFFER
3668 if (pbi->seq_params.frame_id_numbers_present_flag) {
3669 int FidLen = pbi->seq_params.frame_id_length_minus7 + 7;
3670 int DiffLen = pbi->seq_params.delta_frame_id_length_minus2 + 2;
3671 int PrevFrameId = 0;
3672 if (cm->frame_type != KEY_FRAME) {
3673 PrevFrameId = cm->current_frame_id;
3674 }
3675 cm->current_frame_id = aom_rb_read_literal(rb, FidLen);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003676
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003677 if (cm->frame_type != KEY_FRAME) {
3678 int DiffFrameID;
3679 if (cm->current_frame_id > PrevFrameId) {
3680 DiffFrameID = cm->current_frame_id - PrevFrameId;
3681 } else {
3682 DiffFrameID = (1 << FidLen) + cm->current_frame_id - PrevFrameId;
3683 }
3684 /* Check current_frame_id for conformance */
3685 if (PrevFrameId == cm->current_frame_id ||
3686 DiffFrameID >= (1 << (FidLen - 1))) {
3687 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3688 "Invalid value of current_frame_id");
3689 }
3690 }
3691 /* Check if some frames need to be marked as not valid for referencing */
3692 for (i = 0; i < REF_FRAMES; i++) {
3693 if (cm->frame_type == KEY_FRAME) {
3694 cm->valid_for_referencing[i] = 0;
3695 } else if (cm->current_frame_id - (1 << DiffLen) > 0) {
3696 if (cm->ref_frame_id[i] > cm->current_frame_id ||
3697 cm->ref_frame_id[i] < cm->current_frame_id - (1 << DiffLen))
3698 cm->valid_for_referencing[i] = 0;
3699 } else {
3700 if (cm->ref_frame_id[i] > cm->current_frame_id &&
3701 cm->ref_frame_id[i] <
3702 (1 << FidLen) + cm->current_frame_id - (1 << DiffLen))
3703 cm->valid_for_referencing[i] = 0;
3704 }
3705 }
3706 }
3707#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003708 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003709 if (!av1_read_sync_code(rb))
3710 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003711 "Invalid frame sync code");
3712
3713 read_bitdepth_colorspace_sampling(cm, rb);
3714 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
3715
3716 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3717 cm->frame_refs[i].idx = INVALID_IDX;
3718 cm->frame_refs[i].buf = NULL;
3719 }
3720
3721 setup_frame_size(cm, rb);
3722 if (pbi->need_resync) {
3723 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3724 pbi->need_resync = 0;
3725 }
Urvang Joshib100db72016-10-12 16:28:56 -07003726#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003727 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Urvang Joshib100db72016-10-12 16:28:56 -07003728#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003730 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Urvang Joshib100db72016-10-12 16:28:56 -07003731#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003732 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Urvang Joshib100db72016-10-12 16:28:56 -07003733#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003734 if (cm->error_resilient_mode) {
3735 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
3736 } else {
3737 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003738 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003739 ? RESET_FRAME_CONTEXT_ALL
3740 : RESET_FRAME_CONTEXT_CURRENT;
3741 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003742 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003743 ? RESET_FRAME_CONTEXT_CURRENT
3744 : RESET_FRAME_CONTEXT_NONE;
3745 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07003746 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003747 ? RESET_FRAME_CONTEXT_ALL
3748 : RESET_FRAME_CONTEXT_CURRENT;
3749 }
3750 }
3751
3752 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003753 if (!av1_read_sync_code(rb))
3754 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755 "Invalid frame sync code");
3756
3757 read_bitdepth_colorspace_sampling(cm, rb);
3758
Yaowu Xuf883b422016-08-30 14:01:10 -07003759 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003760 setup_frame_size(cm, rb);
3761 if (pbi->need_resync) {
3762 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3763 pbi->need_resync = 0;
3764 }
3765 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Yaowu Xuf883b422016-08-30 14:01:10 -07003766 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003767
3768#if CONFIG_EXT_REFS
3769 if (!pbi->refresh_frame_flags) {
3770 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
3771 // will not be used as a reference
3772 cm->is_reference_frame = 0;
3773 }
3774#endif // CONFIG_EXT_REFS
3775
3776 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003777 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003778 const int idx = cm->ref_frame_map[ref];
3779 RefBuffer *const ref_frame = &cm->frame_refs[i];
3780 ref_frame->idx = idx;
3781 ref_frame->buf = &frame_bufs[idx].buf;
Yaowu Xuf883b422016-08-30 14:01:10 -07003782 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003783#if CONFIG_REFERENCE_BUFFER
3784 if (pbi->seq_params.frame_id_numbers_present_flag) {
3785 int FidLen = pbi->seq_params.frame_id_length_minus7 + 7;
3786 int DiffLen = pbi->seq_params.delta_frame_id_length_minus2 + 2;
3787 int delta_frame_id_minus1 = aom_rb_read_literal(rb, DiffLen);
3788 int refFrameId = ((cm->current_frame_id -
3789 (delta_frame_id_minus1 + 1) + (1 << FidLen)) %
3790 (1 << FidLen));
3791 /* Compare values derived from delta_frame_id_minus1 and
3792 * refresh_frame_flags. Also, check valid for referencing */
3793 if (refFrameId != cm->ref_frame_id[ref] ||
3794 cm->valid_for_referencing[ref] == 0)
3795 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3796 "Reference buffer frame ID mismatch");
3797 }
3798#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003799 }
3800
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003801#if CONFIG_FRAME_SIZE
3802 if (cm->error_resilient_mode == 0) {
3803 setup_frame_size_with_refs(cm, rb);
3804 } else {
3805 setup_frame_size(cm, rb);
3806 }
3807#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003808 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003809#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003810
Yaowu Xuf883b422016-08-30 14:01:10 -07003811 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003812 cm->interp_filter = read_interp_filter(rb);
3813
3814 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3815 RefBuffer *const ref_buf = &cm->frame_refs[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003816#if CONFIG_AOM_HIGHBITDEPTH
3817 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003818 &ref_buf->sf, ref_buf->buf->y_crop_width,
3819 ref_buf->buf->y_crop_height, cm->width, cm->height,
3820 cm->use_highbitdepth);
3821#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003822 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003823 &ref_buf->sf, ref_buf->buf->y_crop_width,
3824 ref_buf->buf->y_crop_height, cm->width, cm->height);
3825#endif
3826 }
3827 }
3828 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003829
3830#if CONFIG_REFERENCE_BUFFER
3831 if (pbi->seq_params.frame_id_numbers_present_flag) {
3832 /* If bitmask is set, update reference frame id values and
3833 mark frames as valid for reference */
3834 int refresh_frame_flags =
3835 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
3836 for (i = 0; i < REF_FRAMES; i++) {
3837 if ((refresh_frame_flags >> i) & 1) {
3838 cm->ref_frame_id[i] = cm->current_frame_id;
3839 cm->valid_for_referencing[i] = 1;
3840 }
3841 }
3842 }
3843#endif
3844
Yaowu Xuf883b422016-08-30 14:01:10 -07003845#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003846 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
3847#endif
3848 get_frame_new_buffer(cm)->color_space = cm->color_space;
3849 get_frame_new_buffer(cm)->color_range = cm->color_range;
3850 get_frame_new_buffer(cm)->render_width = cm->render_width;
3851 get_frame_new_buffer(cm)->render_height = cm->render_height;
3852
3853 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003854 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003855 "Keyframe / intra-only frame required to reset decoder"
3856 " state");
3857 }
3858
3859 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003860 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003861 ? REFRESH_FRAME_CONTEXT_FORWARD
3862 : REFRESH_FRAME_CONTEXT_BACKWARD;
3863 } else {
3864 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
3865 }
3866
Yaowu Xuf883b422016-08-30 14:01:10 -07003867 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07003868 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07003869 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003870
3871 // Generate next_ref_frame_map.
3872 lock_buffer_pool(pool);
3873 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
3874 if (mask & 1) {
3875 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
3876 ++frame_bufs[cm->new_fb_idx].ref_count;
3877 } else {
3878 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3879 }
3880 // Current thread holds the reference frame.
3881 if (cm->ref_frame_map[ref_index] >= 0)
3882 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3883 ++ref_index;
3884 }
3885
3886 for (; ref_index < REF_FRAMES; ++ref_index) {
3887 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3888
3889 // Current thread holds the reference frame.
3890 if (cm->ref_frame_map[ref_index] >= 0)
3891 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3892 }
3893 unlock_buffer_pool(pool);
3894 pbi->hold_ref_buf = 1;
3895
3896 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003897 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003898
3899#if CONFIG_EXT_PARTITION
Yaowu Xuf883b422016-08-30 14:01:10 -07003900 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003901#else
3902 set_sb_size(cm, BLOCK_64X64);
3903#endif // CONFIG_EXT_PARTITION
3904
3905 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003906#if CONFIG_DERING
3907 setup_dering(cm, rb);
3908#endif
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02003909#if CONFIG_CLPF
3910 setup_clpf(pbi, rb);
3911#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003912#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003913 decode_restoration_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003914#endif // CONFIG_LOOP_RESTORATION
3915 setup_quantization(cm, rb);
Yaowu Xuf883b422016-08-30 14:01:10 -07003916#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003917 xd->bd = (int)cm->bit_depth;
3918#endif
3919
3920#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07003921 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003922 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
3923 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
3924 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
3925 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
3926 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3927 }
3928#endif // CONFIG_ENTROPY
3929
3930 setup_segmentation(cm, rb);
3931
Arild Fuldseth07441162016-08-15 15:07:52 +02003932#if CONFIG_DELTA_Q
3933 {
3934 struct segmentation *const seg = &cm->seg;
3935 int segment_quantizer_active = 0;
3936 for (i = 0; i < MAX_SEGMENTS; i++) {
3937 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3938 segment_quantizer_active = 1;
3939 }
3940 }
3941
Thomas Daviesf6936102016-09-05 16:51:31 +01003942 cm->delta_q_res = 1;
Arild Fuldseth07441162016-08-15 15:07:52 +02003943 if (segment_quantizer_active == 0) {
3944 cm->delta_q_present_flag = aom_rb_read_bit(rb);
3945 } else {
3946 cm->delta_q_present_flag = 0;
3947 }
3948 if (cm->delta_q_present_flag) {
3949 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01003950 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003951 }
3952 }
3953#endif
3954
Urvang Joshi454280d2016-10-14 16:51:44 -07003955 for (i = 0; i < MAX_SEGMENTS; ++i) {
3956 const int qindex = cm->seg.enabled
3957 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
3958 : cm->base_qindex;
3959 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
3960 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3961 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003962 }
3963
3964 setup_segmentation_dequant(cm);
3965 cm->tx_mode =
3966 (!cm->seg.enabled && xd->lossless[0]) ? ONLY_4X4 : read_tx_mode(rb);
3967 cm->reference_mode = read_frame_reference_mode(cm, rb);
3968
3969 read_tile_info(pbi, rb);
Yaowu Xuf883b422016-08-30 14:01:10 -07003970 sz = aom_rb_read_literal(rb, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003971
3972 if (sz == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07003973 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003974 "Invalid header size");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003975 return sz;
3976}
3977
3978#if CONFIG_EXT_TX
Thomas9ac55082016-09-23 18:04:17 +01003979#if !CONFIG_EC_ADAPT || !CONFIG_DAALA_EC
Yaowu Xuf883b422016-08-30 14:01:10 -07003980static void read_ext_tx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003981 int i, j, k;
3982 int s;
3983 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003984 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003985 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
3986 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
3987 for (j = 0; j < num_ext_tx_set_inter[s] - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07003988 av1_diff_update_prob(r, &fc->inter_ext_tx_prob[s][i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003989 }
3990 }
3991 }
3992
3993 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003994 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003995 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
3996 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
3997 for (j = 0; j < INTRA_MODES; ++j)
3998 for (k = 0; k < num_ext_tx_set_intra[s] - 1; ++k)
Michael Bebenita6048d052016-08-25 14:40:54 -07003999 av1_diff_update_prob(r, &fc->intra_ext_tx_prob[s][i][j][k],
4000 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004001 }
4002 }
4003 }
4004}
Thomas9ac55082016-09-23 18:04:17 +01004005#endif // !CONFIG_EC_ADAPT || !CONFIG_DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004006#else
4007
Yaowu Xuc27fc142016-08-22 16:08:15 -07004008#endif // CONFIG_EXT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07004009#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07004010static void read_supertx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004011 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -07004012 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004013 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
4014 for (j = 1; j < TX_SIZES; ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004015 av1_diff_update_prob(r, &fc->supertx_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004016 }
4017 }
4018 }
4019}
4020#endif // CONFIG_SUPERTX
4021
4022#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +00004023static void read_global_motion_params(WarpedMotionParams *params,
Yaowu Xuf883b422016-08-30 14:01:10 -07004024 aom_prob *probs, aom_reader *r) {
David Barkercf3d0b02016-11-10 10:14:49 +00004025 TransformationType type =
Michael Bebenita6048d052016-08-25 14:40:54 -07004026 aom_read_tree(r, av1_global_motion_types_tree, probs, ACCT_STR);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004027 set_default_gmparams(params);
David Barkercf3d0b02016-11-10 10:14:49 +00004028 params->wmtype = type;
4029 switch (type) {
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004030 case HOMOGRAPHY:
4031 params->wmmat[6] = aom_read_primitive_symmetric(r, GM_ABS_ROW3HOMO_BITS) *
4032 GM_ROW3HOMO_DECODE_FACTOR;
4033 params->wmmat[7] = aom_read_primitive_symmetric(r, GM_ABS_ROW3HOMO_BITS) *
4034 GM_ROW3HOMO_DECODE_FACTOR;
David Barkercf3d0b02016-11-10 10:14:49 +00004035 case AFFINE:
4036 case ROTZOOM:
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004037 params->wmmat[2] = aom_read_primitive_symmetric(r, GM_ABS_ALPHA_BITS) *
4038 GM_ALPHA_DECODE_FACTOR +
David Barkercf3d0b02016-11-10 10:14:49 +00004039 (1 << WARPEDMODEL_PREC_BITS);
4040 params->wmmat[3] = aom_read_primitive_symmetric(r, GM_ABS_ALPHA_BITS) *
4041 GM_ALPHA_DECODE_FACTOR;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004042 if (type == AFFINE || type == HOMOGRAPHY) {
4043 params->wmmat[4] = aom_read_primitive_symmetric(r, GM_ABS_ALPHA_BITS) *
4044 GM_ALPHA_DECODE_FACTOR;
David Barkercf3d0b02016-11-10 10:14:49 +00004045 params->wmmat[5] = aom_read_primitive_symmetric(r, GM_ABS_ALPHA_BITS) *
4046 GM_ALPHA_DECODE_FACTOR +
4047 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004048 } else {
David Barkercf3d0b02016-11-10 10:14:49 +00004049 params->wmmat[4] = -params->wmmat[3];
4050 params->wmmat[5] = params->wmmat[2];
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004051 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004052 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004053 case TRANSLATION:
4054 params->wmmat[0] = aom_read_primitive_symmetric(r, GM_ABS_TRANS_BITS) *
4055 GM_TRANS_DECODE_FACTOR;
4056 params->wmmat[1] = aom_read_primitive_symmetric(r, GM_ABS_TRANS_BITS) *
4057 GM_TRANS_DECODE_FACTOR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004058 break;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004059 case IDENTITY: break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004060 default: assert(0);
4061 }
4062}
4063
Yaowu Xuf883b422016-08-30 14:01:10 -07004064static void read_global_motion(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004065 int frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004066 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
4067 read_global_motion_params(&cm->global_motion[frame],
4068 cm->fc->global_motion_types_prob, r);
Sarah Parkere5299862016-08-16 14:57:37 -07004069 /*
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004070 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
4071 frame, cm->current_video_frame, cm->show_frame,
David Barkercf3d0b02016-11-10 10:14:49 +00004072 cm->global_motion[frame].wmmat[0],
4073 cm->global_motion[frame].wmmat[1],
4074 cm->global_motion[frame].wmmat[2],
4075 cm->global_motion[frame].wmmat[3]);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004076 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07004077 }
4078}
4079#endif // CONFIG_GLOBAL_MOTION
4080
Yaowu Xuf883b422016-08-30 14:01:10 -07004081static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004082 size_t partition_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004083 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004084#if CONFIG_SUPERTX
4085 MACROBLOCKD *const xd = &pbi->mb;
4086#endif
4087 FRAME_CONTEXT *const fc = cm->fc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004088 aom_reader r;
Yaowu Xu8af861b2016-11-01 12:12:11 -07004089 int k, i;
4090#if !CONFIG_EC_ADAPT
4091 int j;
4092#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004093
4094#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07004095 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004096 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07004097 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004098 "Failed to allocate bool decoder 0");
4099#else
Yaowu Xuf7ae12d2016-09-01 08:59:46 -07004100 if (ans_read_init(&r, data, (int)partition_size))
Yaowu Xuf883b422016-08-30 14:01:10 -07004101 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004102 "Failed to allocate compressed header ANS decoder");
4103#endif // !CONFIG_ANS
4104
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004105#if CONFIG_LOOP_RESTORATION
4106 decode_restoration(cm, &r);
4107#endif
4108
Yaowu Xuefc75352016-10-31 09:46:42 -07004109 if (cm->tx_mode == TX_MODE_SELECT) read_tx_size_probs(fc, &r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004110
Yushin Cho77bba8d2016-11-04 16:36:56 -07004111#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07004112 read_coef_probs(fc, cm->tx_mode, &r);
4113
4114#if CONFIG_VAR_TX
4115 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Michael Bebenita6048d052016-08-25 14:40:54 -07004116 av1_diff_update_prob(&r, &fc->txfm_partition_prob[k], ACCT_STR);
Yue Chena1e48dc2016-08-29 17:29:33 -07004117#if CONFIG_EXT_TX && CONFIG_RECT_TX
4118 if (cm->tx_mode == TX_MODE_SELECT) {
Jingning Han607fa6a2016-10-26 10:46:28 -07004119 for (i = 1; i < MAX_TX_DEPTH; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004120 av1_diff_update_prob(&r, &fc->rect_tx_prob[i], ACCT_STR);
Yue Chena1e48dc2016-08-29 17:29:33 -07004121 }
4122#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yushin Cho77bba8d2016-11-04 16:36:56 -07004123#endif // CONFIG_VAR_TX
4124#endif // !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07004125 for (k = 0; k < SKIP_CONTEXTS; ++k)
Michael Bebenita6048d052016-08-25 14:40:54 -07004126 av1_diff_update_prob(&r, &fc->skip_probs[k], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004127
Thomas Daviesf6936102016-09-05 16:51:31 +01004128#if CONFIG_DELTA_Q
4129 for (k = 0; k < DELTA_Q_CONTEXTS; ++k)
4130 av1_diff_update_prob(&r, &fc->delta_q_prob[k], ACCT_STR);
4131#endif
4132
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004133#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004134 if (cm->seg.enabled && cm->seg.update_map) {
4135 if (cm->seg.temporal_update) {
4136 for (k = 0; k < PREDICTION_PROBS; k++)
Michael Bebenita6048d052016-08-25 14:40:54 -07004137 av1_diff_update_prob(&r, &cm->fc->seg.pred_probs[k], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004138 }
4139 for (k = 0; k < MAX_SEGMENTS - 1; k++)
Michael Bebenita6048d052016-08-25 14:40:54 -07004140 av1_diff_update_prob(&r, &cm->fc->seg.tree_probs[k], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004141 }
4142
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004143 for (j = 0; j < INTRA_MODES; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004144 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004145 av1_diff_update_prob(&r, &fc->uv_mode_prob[j][i], ACCT_STR);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004146 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004147
4148#if CONFIG_EXT_PARTITION_TYPES
4149 for (i = 0; i < PARTITION_TYPES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004150 av1_diff_update_prob(&r, &fc->partition_prob[0][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004151 for (j = 1; j < PARTITION_CONTEXTS; ++j)
4152 for (i = 0; i < EXT_PARTITION_TYPES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004153 av1_diff_update_prob(&r, &fc->partition_prob[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004154#else
Thomas Davies6519beb2016-10-19 14:46:07 +01004155 for (j = 0; j < PARTITION_CONTEXTS; ++j)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004156 for (i = 0; i < PARTITION_TYPES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004157 av1_diff_update_prob(&r, &fc->partition_prob[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004158#endif // CONFIG_EXT_PARTITION_TYPES
Thomas9ac55082016-09-23 18:04:17 +01004159#endif // EC_ADAPT, DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004160#if CONFIG_EXT_INTRA
4161 for (i = 0; i < INTRA_FILTERS + 1; ++i)
4162 for (j = 0; j < INTRA_FILTERS - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07004163 av1_diff_update_prob(&r, &fc->intra_filter_probs[i][j], ACCT_STR);
Thomas Davies6519beb2016-10-19 14:46:07 +01004164#endif // EC_ADAPT, DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004165
4166 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004167 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04004168#if CONFIG_DAALA_EC
4169 av1_copy(cm->kf_y_cdf, av1_kf_y_mode_cdf);
4170#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004171#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004172 for (k = 0; k < INTRA_MODES; k++)
Thomas Davies6519beb2016-10-19 14:46:07 +01004173 for (j = 0; j < INTRA_MODES; j++)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004174 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004175 av1_diff_update_prob(&r, &cm->kf_y_prob[k][j][i], ACCT_STR);
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04004176#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004177 } else {
4178#if !CONFIG_REF_MV
4179 nmv_context *const nmvc = &fc->nmvc;
4180#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004181 read_inter_mode_probs(fc, &r);
4182
4183#if CONFIG_EXT_INTER
4184 read_inter_compound_mode_probs(fc, &r);
4185 if (cm->reference_mode != COMPOUND_REFERENCE) {
4186 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4187 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004188 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004189 }
4190 }
4191 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4192 for (j = 0; j < INTERINTRA_MODES - 1; j++)
Michael Bebenita6048d052016-08-25 14:40:54 -07004193 av1_diff_update_prob(&r, &fc->interintra_mode_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004194 }
4195 for (i = 0; i < BLOCK_SIZES; i++) {
4196 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004197 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004198 }
4199 }
4200 }
4201 if (cm->reference_mode != SINGLE_REFERENCE) {
4202 for (i = 0; i < BLOCK_SIZES; i++) {
4203 if (is_interinter_wedge_used(i)) {
Sarah Parker6fddd182016-11-10 20:57:20 -08004204 for (j = 0; j < COMPOUND_TYPES - 1; j++) {
4205 av1_diff_update_prob(&r, &fc->compound_type_prob[i][j], ACCT_STR);
4206 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004207 }
4208 }
4209 }
4210#endif // CONFIG_EXT_INTER
4211
Yue Chencb60b182016-10-13 15:18:22 -07004212#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004213 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i) {
Yue Chencb60b182016-10-13 15:18:22 -07004214 for (j = 0; j < MOTION_MODES - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07004215 av1_diff_update_prob(&r, &fc->motion_mode_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004216 }
Yue Chencb60b182016-10-13 15:18:22 -07004217#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004218
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004219#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004220 if (cm->interp_filter == SWITCHABLE) read_switchable_interp_probs(fc, &r);
Thomas9ac55082016-09-23 18:04:17 +01004221#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004222
4223 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07004224 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004225
4226 if (cm->reference_mode != SINGLE_REFERENCE)
4227 setup_compound_reference_mode(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004228 read_frame_reference_mode_probs(cm, &r);
4229
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004230#if !CONFIG_EC_ADAPT
Nathan E. Egge5710c722016-09-08 10:01:16 -04004231 for (j = 0; j < BLOCK_SIZE_GROUPS; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004232 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07004233 av1_diff_update_prob(&r, &fc->y_mode_prob[j][i], ACCT_STR);
Nathan E. Egge5710c722016-09-08 10:01:16 -04004234 }
Thomas9ac55082016-09-23 18:04:17 +01004235#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004236
4237#if CONFIG_REF_MV
4238 for (i = 0; i < NMV_CONTEXTS; ++i)
4239 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
4240#else
4241 read_mv_probs(nmvc, cm->allow_high_precision_mv, &r);
4242#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004243#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004244 read_ext_tx_probs(fc, &r);
Thomas9ac55082016-09-23 18:04:17 +01004245#endif // EC_ADAPT, DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004246#if CONFIG_SUPERTX
4247 if (!xd->lossless[0]) read_supertx_probs(fc, &r);
4248#endif
4249#if CONFIG_GLOBAL_MOTION
4250 read_global_motion(cm, &r);
Thomas Davies6519beb2016-10-19 14:46:07 +01004251#endif // EC_ADAPT, DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004252 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004253#if CONFIG_EC_MULTISYMBOL
4254 av1_coef_pareto_cdfs(fc);
David Barker599dfd02016-11-10 13:20:12 +00004255#if CONFIG_REF_MV
4256 for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]);
4257#else
Thomas Davies6519beb2016-10-19 14:46:07 +01004258 av1_set_mv_cdfs(&fc->nmvc);
David Barker599dfd02016-11-10 13:20:12 +00004259#endif
Thomas Davies6519beb2016-10-19 14:46:07 +01004260#if CONFIG_DAALA_EC
4261 av1_set_mode_cdfs(cm);
4262#endif
4263#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004264
Yaowu Xuf883b422016-08-30 14:01:10 -07004265 return aom_reader_has_error(&r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004266}
4267
4268#ifdef NDEBUG
4269#define debug_check_frame_counts(cm) (void)0
4270#else // !NDEBUG
4271// Counts should only be incremented when frame_parallel_decoding_mode and
4272// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07004273static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004274 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07004275 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004276 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
4277 cm->error_resilient_mode);
4278 assert(!memcmp(cm->counts.y_mode, zero_counts.y_mode,
4279 sizeof(cm->counts.y_mode)));
4280 assert(!memcmp(cm->counts.uv_mode, zero_counts.uv_mode,
4281 sizeof(cm->counts.uv_mode)));
4282 assert(!memcmp(cm->counts.partition, zero_counts.partition,
4283 sizeof(cm->counts.partition)));
4284 assert(!memcmp(cm->counts.coef, zero_counts.coef, sizeof(cm->counts.coef)));
4285 assert(!memcmp(cm->counts.eob_branch, zero_counts.eob_branch,
4286 sizeof(cm->counts.eob_branch)));
4287 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
4288 sizeof(cm->counts.switchable_interp)));
4289 assert(!memcmp(cm->counts.inter_mode, zero_counts.inter_mode,
4290 sizeof(cm->counts.inter_mode)));
4291#if CONFIG_EXT_INTER
4292 assert(!memcmp(cm->counts.inter_compound_mode,
4293 zero_counts.inter_compound_mode,
4294 sizeof(cm->counts.inter_compound_mode)));
4295 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
4296 sizeof(cm->counts.interintra)));
4297 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
4298 sizeof(cm->counts.wedge_interintra)));
Sarah Parker6fddd182016-11-10 20:57:20 -08004299 assert(!memcmp(cm->counts.compound_interinter,
4300 zero_counts.compound_interinter,
4301 sizeof(cm->counts.compound_interinter)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004302#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07004303#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
4304 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
4305 sizeof(cm->counts.motion_mode)));
4306#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004307 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
4308 sizeof(cm->counts.intra_inter)));
4309 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
4310 sizeof(cm->counts.comp_inter)));
4311 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
4312 sizeof(cm->counts.single_ref)));
4313 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
4314 sizeof(cm->counts.comp_ref)));
4315#if CONFIG_EXT_REFS
4316 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
4317 sizeof(cm->counts.comp_bwdref)));
4318#endif // CONFIG_EXT_REFS
4319 assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
4320 sizeof(cm->counts.tx_size)));
4321 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
4322#if CONFIG_REF_MV
4323 assert(
4324 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
4325 assert(
4326 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
4327#else
4328 assert(!memcmp(&cm->counts.mv, &zero_counts.mv, sizeof(cm->counts.mv)));
4329#endif
4330 assert(!memcmp(cm->counts.inter_ext_tx, zero_counts.inter_ext_tx,
4331 sizeof(cm->counts.inter_ext_tx)));
4332 assert(!memcmp(cm->counts.intra_ext_tx, zero_counts.intra_ext_tx,
4333 sizeof(cm->counts.intra_ext_tx)));
4334}
4335#endif // NDEBUG
4336
Yaowu Xuf883b422016-08-30 14:01:10 -07004337static struct aom_read_bit_buffer *init_read_bit_buffer(
4338 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
4339 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004340 rb->bit_offset = 0;
4341 rb->error_handler = error_handler;
4342 rb->error_handler_data = &pbi->common;
4343 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004344 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004345 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
4346 rb->bit_buffer = clear_data;
4347 rb->bit_buffer_end = clear_data + n;
4348 } else {
4349 rb->bit_buffer = data;
4350 rb->bit_buffer_end = data_end;
4351 }
4352 return rb;
4353}
4354
4355//------------------------------------------------------------------------------
4356
Yaowu Xuf883b422016-08-30 14:01:10 -07004357int av1_read_sync_code(struct aom_read_bit_buffer *const rb) {
4358 return aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_0 &&
4359 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_1 &&
4360 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004361}
4362
Yaowu Xuf883b422016-08-30 14:01:10 -07004363void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
4364 int *height) {
4365 *width = aom_rb_read_literal(rb, 16) + 1;
4366 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004367}
4368
Yaowu Xuf883b422016-08-30 14:01:10 -07004369BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
4370 int profile = aom_rb_read_bit(rb);
4371 profile |= aom_rb_read_bit(rb) << 1;
4372 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004373 return (BITSTREAM_PROFILE)profile;
4374}
4375
Yaowu Xuf883b422016-08-30 14:01:10 -07004376void av1_decode_frame(AV1Decoder *pbi, const uint8_t *data,
4377 const uint8_t *data_end, const uint8_t **p_data_end) {
4378 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004379 MACROBLOCKD *const xd = &pbi->mb;
Yaowu Xuf883b422016-08-30 14:01:10 -07004380 struct aom_read_bit_buffer rb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004381 int context_updated = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07004382 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07004383 size_t first_partition_size;
4384 YV12_BUFFER_CONFIG *new_fb;
4385
4386#if CONFIG_BITSTREAM_DEBUG
4387 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
4388#endif
4389
4390 first_partition_size = read_uncompressed_header(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004391 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
Thomas Davies72712e62016-11-09 12:17:51 +00004392#if CONFIG_TILE_GROUPS
4393 pbi->first_partition_size = first_partition_size;
4394 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
4395#endif
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07004396 new_fb = get_frame_new_buffer(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004397 xd->cur_buf = new_fb;
4398#if CONFIG_GLOBAL_MOTION
4399 xd->global_motion = cm->global_motion;
4400#endif // CONFIG_GLOBAL_MOTION
4401
4402 if (!first_partition_size) {
4403// showing a frame directly
4404#if CONFIG_EXT_REFS
4405 if (cm->show_existing_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07004406 *p_data_end = data + aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004407 else
4408#endif // CONFIG_EXT_REFS
4409 *p_data_end = data + (cm->profile <= PROFILE_2 ? 1 : 2);
4410
4411 return;
4412 }
4413
Yaowu Xuf883b422016-08-30 14:01:10 -07004414 data += aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004415 if (!read_is_valid(data, first_partition_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07004416 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004417 "Truncated packet or corrupt header length");
4418
Dengca8d24d2016-10-17 14:06:35 +08004419#if CONFIG_SIMP_MV_PRED
4420 cm->setup_mi(cm);
4421#endif
4422
Yaowu Xuc27fc142016-08-22 16:08:15 -07004423 cm->use_prev_frame_mvs =
4424 !cm->error_resilient_mode && cm->width == cm->last_width &&
4425 cm->height == cm->last_height && !cm->last_intra_only &&
4426 cm->last_show_frame && (cm->last_frame_type != KEY_FRAME);
4427#if CONFIG_EXT_REFS
4428 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
4429 // show_exisiting_frame=1, nor can it take a frame not used as
4430 // a reference, it is probable that by the time it is being
4431 // referred to, the frame buffer it originally points to may
4432 // already get expired and have been reassigned to the current
4433 // newly coded frame. Hence, we need to check whether this is
4434 // the case, and if yes, we have 2 choices:
4435 // (1) Simply disable the use of previous frame mvs; or
4436 // (2) Have cm->prev_frame point to one reference frame buffer,
4437 // e.g. LAST_FRAME.
4438 if (cm->use_prev_frame_mvs && !dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
4439 // Reassign the LAST_FRAME buffer to cm->prev_frame.
4440 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
4441 cm->prev_frame = &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx];
4442 }
4443#endif // CONFIG_EXT_REFS
4444
Yaowu Xuf883b422016-08-30 14:01:10 -07004445 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004446
4447 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
4448 if (!cm->fc->initialized)
Yaowu Xuf883b422016-08-30 14:01:10 -07004449 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004450 "Uninitialized entropy context.");
4451
Yaowu Xuf883b422016-08-30 14:01:10 -07004452 av1_zero(cm->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004453
4454 xd->corrupted = 0;
4455 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
4456 if (new_fb->corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07004457 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004458 "Decode failed. Frame data header is corrupted.");
4459
4460 if (cm->lf.filter_level && !cm->skip_loop_filter) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004461 av1_loop_filter_frame_init(cm, cm->lf.filter_level);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004462 }
4463
4464 // If encoded in frame parallel mode, frame context is ready after decoding
4465 // the frame header.
4466 if (cm->frame_parallel_decode &&
4467 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004468 AVxWorker *const worker = pbi->frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004469 FrameWorkerData *const frame_worker_data = worker->data1;
4470 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
4471 context_updated = 1;
4472 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
4473 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004474 av1_frameworker_lock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004475 pbi->cur_buf->row = -1;
4476 pbi->cur_buf->col = -1;
4477 frame_worker_data->frame_context_ready = 1;
4478 // Signal the main thread that context is ready.
Yaowu Xuf883b422016-08-30 14:01:10 -07004479 av1_frameworker_signal_stats(worker);
4480 av1_frameworker_unlock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004481 }
4482
4483#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07004484 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004485 cm->coef_probs_update_idx = 0;
4486#endif // CONFIG_ENTROPY
4487
4488 if (pbi->max_threads > 1
4489#if CONFIG_EXT_TILE
4490 && pbi->dec_tile_col < 0 // Decoding all columns
4491#endif // CONFIG_EXT_TILE
4492 && cm->tile_cols > 1) {
4493 // Multi-threaded tile decoder
4494 *p_data_end = decode_tiles_mt(pbi, data + first_partition_size, data_end);
4495 if (!xd->corrupted) {
4496 if (!cm->skip_loop_filter) {
4497 // If multiple threads are used to decode tiles, then we use those
4498 // threads to do parallel loopfiltering.
Yaowu Xuf883b422016-08-30 14:01:10 -07004499 av1_loop_filter_frame_mt(new_fb, cm, pbi->mb.plane, cm->lf.filter_level,
4500 0, 0, pbi->tile_workers, pbi->num_tile_workers,
4501 &pbi->lf_row_sync);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004502 }
4503 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004504 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004505 "Decode failed. Frame data is corrupted.");
4506 }
4507 } else {
4508 *p_data_end = decode_tiles(pbi, data + first_partition_size, data_end);
4509 }
4510#if CONFIG_LOOP_RESTORATION
4511 if (cm->rst_info.restoration_type != RESTORE_NONE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004512 av1_loop_restoration_init(&cm->rst_internal, &cm->rst_info,
4513 cm->frame_type == KEY_FRAME, cm->width,
4514 cm->height);
4515 av1_loop_restoration_rows(new_fb, cm, 0, cm->mi_rows, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004516 }
4517#endif // CONFIG_LOOP_RESTORATION
4518
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02004519#if CONFIG_DERING
4520 if (cm->dering_level && !cm->skip_loop_filter) {
4521 av1_dering_frame(&pbi->cur_buf->buf, cm, &pbi->mb, cm->dering_level);
4522 }
4523#endif // CONFIG_DERING
4524
Thomas Daedef56859f2016-04-19 16:57:24 -07004525#if CONFIG_CLPF
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02004526 if (!cm->skip_loop_filter) {
4527 const YV12_BUFFER_CONFIG *const frame = &pbi->cur_buf->buf;
4528 if (cm->clpf_strength_y) {
Yaowu Xud71be782016-10-14 08:47:03 -07004529 av1_clpf_frame(frame, NULL, cm, cm->clpf_size != CLPF_NOSIZE,
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02004530 cm->clpf_strength_y + (cm->clpf_strength_y == 3),
Yaowu Xud71be782016-10-14 08:47:03 -07004531 4 + cm->clpf_size, AOM_PLANE_Y, clpf_bit);
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02004532 }
4533 if (cm->clpf_strength_u) {
Yaowu Xud71be782016-10-14 08:47:03 -07004534 av1_clpf_frame(frame, NULL, cm, 0, // No block signals for chroma
4535 cm->clpf_strength_u + (cm->clpf_strength_u == 3), 4,
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02004536 AOM_PLANE_U, NULL);
4537 }
4538 if (cm->clpf_strength_v) {
Yaowu Xud71be782016-10-14 08:47:03 -07004539 av1_clpf_frame(frame, NULL, cm, 0, // No block signals for chroma
4540 cm->clpf_strength_v + (cm->clpf_strength_v == 3), 4,
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02004541 AOM_PLANE_V, NULL);
4542 }
Steinar Midtskogend06588a2016-05-06 13:48:20 +02004543 }
4544 if (cm->clpf_blocks) aom_free(cm->clpf_blocks);
Thomas Daedef56859f2016-04-19 16:57:24 -07004545#endif
Thomas Daedef56859f2016-04-19 16:57:24 -07004546
Yaowu Xuc27fc142016-08-22 16:08:15 -07004547 if (!xd->corrupted) {
4548 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4549#if CONFIG_ENTROPY
4550 cm->partial_prob_update = 0;
4551#endif // CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07004552 av1_adapt_coef_probs(cm);
4553 av1_adapt_intra_frame_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004554
4555 if (!frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004556 av1_adapt_inter_frame_probs(cm);
4557 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004558 }
4559 } else {
4560 debug_check_frame_counts(cm);
4561 }
4562 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004563 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004564 "Decode failed. Frame data is corrupted.");
4565 }
4566
4567 // Non frame parallel update frame context here.
4568 if (!cm->error_resilient_mode && !context_updated)
4569 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
4570}