blob: 39a0dc069bf03122f7aebd83075e31a02389ecd7 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070022#include "aom_dsp/bitreader.h"
23#include "aom_dsp/bitreader_buffer.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070024#include "aom_dsp/binary_codes_reader.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#include "av1/common/clpf.h"
39#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050040#if CONFIG_INSPECTION
41#include "av1/decoder/inspection.h"
42#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070044#include "av1/common/entropy.h"
45#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010046#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/common/idct.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
67
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070068#if CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070069#include "av1/common/warped_motion.h"
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070070#endif // CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070071
Yaowu Xuf883b422016-08-30 14:01:10 -070072#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070073#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070074
Yushin Cho77bba8d2016-11-04 16:36:56 -070075#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -070076#include "av1/common/partition.h"
hui suff0da2b2017-03-07 15:51:37 -080077#include "av1/common/pvq.h"
78#include "av1/common/scan.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070079#include "av1/decoder/decint.h"
hui suff0da2b2017-03-07 15:51:37 -080080#include "av1/decoder/pvq_decoder.h"
81#include "av1/encoder/encodemb.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070082#include "av1/encoder/hybrid_fwd_txfm.h"
83#endif
84
Luc Trudeaue3980282017-04-25 23:17:21 -040085#if CONFIG_CFL
86#include "av1/common/cfl.h"
87#endif
88
Thomas Davies80188d12016-10-26 16:08:35 -070089static struct aom_read_bit_buffer *init_read_bit_buffer(
90 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
91 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
92static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
93 size_t partition_size);
94static size_t read_uncompressed_header(AV1Decoder *pbi,
95 struct aom_read_bit_buffer *rb);
96
Yaowu Xuf883b422016-08-30 14:01:10 -070097static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liuc082bbc2017-05-17 13:31:37 -070098#if CONFIG_ONE_SIDED_COMPOUND || CONFIG_EXT_COMP_REFS // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +020099 return !frame_is_intra_only(cm);
100#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101 int i;
102 if (frame_is_intra_only(cm)) return 0;
103 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
104 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
105
106 return 0;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700107#endif // CONFIG_ONE_SIDED_COMPOUND || CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108}
109
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111#if CONFIG_EXT_REFS
112 cm->comp_fwd_ref[0] = LAST_FRAME;
113 cm->comp_fwd_ref[1] = LAST2_FRAME;
114 cm->comp_fwd_ref[2] = LAST3_FRAME;
115 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
116
117 cm->comp_bwd_ref[0] = BWDREF_FRAME;
118 cm->comp_bwd_ref[1] = ALTREF_FRAME;
119#else
120 if (cm->ref_frame_sign_bias[LAST_FRAME] ==
121 cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
122 cm->comp_fixed_ref = ALTREF_FRAME;
123 cm->comp_var_ref[0] = LAST_FRAME;
124 cm->comp_var_ref[1] = GOLDEN_FRAME;
125 } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
126 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
127 cm->comp_fixed_ref = GOLDEN_FRAME;
128 cm->comp_var_ref[0] = LAST_FRAME;
129 cm->comp_var_ref[1] = ALTREF_FRAME;
130 } else {
131 cm->comp_fixed_ref = LAST_FRAME;
132 cm->comp_var_ref[0] = GOLDEN_FRAME;
133 cm->comp_var_ref[1] = ALTREF_FRAME;
134 }
135#endif // CONFIG_EXT_REFS
136}
137
138static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
139 return len != 0 && len <= (size_t)(end - start);
140}
141
Yaowu Xuf883b422016-08-30 14:01:10 -0700142static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
143 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700144 return data > max ? max : data;
145}
146
Yue Cheneeacc4c2017-01-17 17:29:17 -0800147static TX_MODE read_tx_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
148 struct aom_read_bit_buffer *rb) {
149 int i, all_lossless = 1;
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800150#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800151 TX_MODE tx_mode;
152#endif
153
154 if (cm->seg.enabled) {
155 for (i = 0; i < MAX_SEGMENTS; ++i) {
156 if (!xd->lossless[i]) {
157 all_lossless = 0;
158 break;
159 }
160 }
161 } else {
162 all_lossless = xd->lossless[0];
163 }
164
165 if (all_lossless) return ONLY_4X4;
166#if CONFIG_TX64X64
167 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800168 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
169 return tx_mode;
170#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800172#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -0700173}
174
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -0500175#if !CONFIG_EC_ADAPT
Yaowu Xuefc75352016-10-31 09:46:42 -0700176static void read_tx_size_probs(FRAME_CONTEXT *fc, aom_reader *r) {
177 int i, j, k;
178 for (i = 0; i < MAX_TX_DEPTH; ++i)
179 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
180 for (k = 0; k < i + 1; ++k)
181 av1_diff_update_prob(r, &fc->tx_size_probs[i][j][k], ACCT_STR);
182}
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -0500183#endif
Yaowu Xuefc75352016-10-31 09:46:42 -0700184
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400185#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700186static void read_switchable_interp_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700187 int i, j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400188 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700189 for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700190 av1_diff_update_prob(r, &fc->switchable_interp_prob[j][i], ACCT_STR);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400191 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192}
Thomas Davies6519beb2016-10-19 14:46:07 +0100193#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700194
Thomas Davies149eda52017-06-12 18:11:55 +0100195#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700196static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700197 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700199 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700200 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700201 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700202 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700203 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700204 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700205 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206}
Thomas Davies149eda52017-06-12 18:11:55 +0100207#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208
209#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700210static void read_inter_compound_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700211 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -0700212 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700213 for (j = 0; j < INTER_MODE_CONTEXTS; ++j) {
214 for (i = 0; i < INTER_COMPOUND_MODES - 1; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700215 av1_diff_update_prob(r, &fc->inter_compound_mode_probs[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216 }
217 }
218 }
219}
Zoe Liu85b66462017-04-20 14:28:19 -0700220
221#if CONFIG_COMPOUND_SINGLEREF
222static void read_inter_singleref_comp_mode_probs(FRAME_CONTEXT *fc,
223 aom_reader *r) {
224 int i, j;
225 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
226 for (j = 0; j < INTER_MODE_CONTEXTS; ++j) {
227 for (i = 0; i < INTER_SINGLEREF_COMP_MODES - 1; ++i) {
228 av1_diff_update_prob(r, &fc->inter_singleref_comp_mode_probs[j][i],
229 ACCT_STR);
230 }
231 }
232 }
233}
234#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -0700235#endif // CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -0700236
Yaowu Xu17fd2f22016-11-17 18:23:28 -0800237#if !CONFIG_EC_ADAPT
Thomas9ac55082016-09-23 18:04:17 +0100238#if !CONFIG_EXT_TX
239static void read_ext_tx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
240 int i, j, k;
241 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
242 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
243 for (j = 0; j < TX_TYPES; ++j) {
244 for (k = 0; k < TX_TYPES - 1; ++k)
245 av1_diff_update_prob(r, &fc->intra_ext_tx_prob[i][j][k], ACCT_STR);
Thomas9ac55082016-09-23 18:04:17 +0100246 }
247 }
248 }
249 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
250 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
251 for (k = 0; k < TX_TYPES - 1; ++k)
252 av1_diff_update_prob(r, &fc->inter_ext_tx_prob[i][k], ACCT_STR);
Thomas9ac55082016-09-23 18:04:17 +0100253 }
254 }
255}
256#endif
257#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700258
259static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700260 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700261 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800262#if CONFIG_REF_ADAPT
263 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
264#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700265 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700266 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700267 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800268#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700269 } else {
270 return SINGLE_REFERENCE;
271 }
272}
273
Yaowu Xuf883b422016-08-30 14:01:10 -0700274static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275 FRAME_CONTEXT *const fc = cm->fc;
276 int i, j;
277
Thomas Davies860def62017-06-14 10:00:03 +0100278#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700279 if (cm->reference_mode == REFERENCE_MODE_SELECT)
280 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700281 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700282
283 if (cm->reference_mode != COMPOUND_REFERENCE) {
284 for (i = 0; i < REF_CONTEXTS; ++i) {
285 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700286 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700287 }
288 }
289 }
Thomas Davies315f5782017-06-14 15:14:55 +0100290#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700291
292 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700293#if CONFIG_EXT_COMP_REFS
294 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
295 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
296
297 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i)
298 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
299 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
300#endif // CONFIG_EXT_COMP_REFS
301
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302 for (i = 0; i < REF_CONTEXTS; ++i) {
303#if CONFIG_EXT_REFS
304 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700305 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700306 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700307 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308#else
309 for (j = 0; j < (COMP_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700310 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700311#endif // CONFIG_EXT_REFS
312 }
313 }
314}
315
Yaowu Xuf883b422016-08-30 14:01:10 -0700316static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700317 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700318 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700319}
320
Yaowu Xuf883b422016-08-30 14:01:10 -0700321static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100322 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700323
Yaowu Xue86288d2016-10-31 15:56:38 -0700324#if !CONFIG_EC_ADAPT
Thomas9ac55082016-09-23 18:04:17 +0100325 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700326 update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
327
Yaowu Xuc27fc142016-08-22 16:08:15 -0700328 for (i = 0; i < 2; ++i) {
329 nmv_component *const comp_ctx = &ctx->comps[i];
330 update_mv_probs(&comp_ctx->sign, 1, r);
331 update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
332 update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
333 update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
334 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700335 for (i = 0; i < 2; ++i) {
336 nmv_component *const comp_ctx = &ctx->comps[i];
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400337 for (j = 0; j < CLASS0_SIZE; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700338 update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400339 }
Nathan E. Eggeac499f32016-09-08 15:38:57 -0400340 update_mv_probs(comp_ctx->fp, MV_FP_SIZE - 1, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700341 }
Alex Converseaca9feb2016-10-10 11:08:10 -0700342#endif // !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700343
344 if (allow_hp) {
345 for (i = 0; i < 2; ++i) {
346 nmv_component *const comp_ctx = &ctx->comps[i];
347 update_mv_probs(&comp_ctx->class0_hp, 1, r);
348 update_mv_probs(&comp_ctx->hp, 1, r);
349 }
350 }
351}
352
353static void inverse_transform_block(MACROBLOCKD *xd, int plane,
354 const TX_TYPE tx_type,
355 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700356 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700357 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700358 tran_low_t *const dqcoeff = pd->dqcoeff;
Angie Chiangd92d4bf2017-04-02 17:49:18 -0700359 av1_inverse_transform_block(xd, dqcoeff, tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700360 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700361}
362
Yushin Cho77bba8d2016-11-04 16:36:56 -0700363#if CONFIG_PVQ
Thomas Daede6ff6af62017-02-03 16:29:24 -0800364static int av1_pvq_decode_helper(MACROBLOCKD *xd, tran_low_t *ref_coeff,
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800365 tran_low_t *dqcoeff, int16_t *quant, int pli,
Yushin Cho77bba8d2016-11-04 16:36:56 -0700366 int bs, TX_TYPE tx_type, int xdec,
ltrudeaue1c09292017-01-20 15:42:13 -0500367 PVQ_SKIP_TYPE ac_dc_coded) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700368 unsigned int flags; // used for daala's stream analyzer.
369 int off;
370 const int is_keyframe = 0;
371 const int has_dc_skip = 1;
Jingning Hanff705452017-04-27 11:32:15 -0700372 int coeff_shift = 3 - av1_get_tx_scale(bs);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800373 int hbd_downshift = 0;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800374 int rounding_mask;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700375 // DC quantizer for PVQ
376 int pvq_dc_quant;
377 int lossless = (quant[0] == 0);
378 const int blk_size = tx_size_wide[bs];
379 int eob = 0;
380 int i;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800381 od_dec_ctx *dec = &xd->daala_dec;
Yushin Cho70669122016-12-08 09:53:14 -1000382 int use_activity_masking = dec->use_activity_masking;
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800383 DECLARE_ALIGNED(16, tran_low_t, dqcoeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
384 DECLARE_ALIGNED(16, tran_low_t, ref_coeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700385
Yushin Cho48f84db2016-11-07 21:20:17 -0800386 od_coeff ref_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
387 od_coeff out_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700388
Thomas Daede6ff6af62017-02-03 16:29:24 -0800389 hbd_downshift = xd->bd - 8;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800390
Yushin Cho77bba8d2016-11-04 16:36:56 -0700391 od_raster_to_coding_order(ref_coeff_pvq, blk_size, tx_type, ref_coeff,
392 blk_size);
393
Thomas Daede6ff6af62017-02-03 16:29:24 -0800394 assert(OD_COEFF_SHIFT >= 4);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700395 if (lossless)
396 pvq_dc_quant = 1;
397 else {
Yushin Cho70669122016-12-08 09:53:14 -1000398 if (use_activity_masking)
399 pvq_dc_quant = OD_MAXI(
Thomas Daede6ff6af62017-02-03 16:29:24 -0800400 1, (quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift) *
Yushin Cho70669122016-12-08 09:53:14 -1000401 dec->state.pvq_qm_q4[pli][od_qm_get_index(bs, 0)] >>
402 4);
403 else
Thomas Daede6ff6af62017-02-03 16:29:24 -0800404 pvq_dc_quant =
405 OD_MAXI(1, quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700406 }
407
408 off = od_qm_offset(bs, xdec);
409
410 // copy int16 inputs to int32
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800411 for (i = 0; i < blk_size * blk_size; i++) {
Timothy B. Terriberry4e6a8f32017-02-24 11:00:59 -0800412 ref_int32[i] =
Thomas Daede6ff6af62017-02-03 16:29:24 -0800413 AOM_SIGNED_SHL(ref_coeff_pvq[i], OD_COEFF_SHIFT - coeff_shift) >>
414 hbd_downshift;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800415 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700416
Thomas Daede6ff6af62017-02-03 16:29:24 -0800417 od_pvq_decode(dec, ref_int32, out_int32,
418 OD_MAXI(1, quant[1] << (OD_COEFF_SHIFT - 3) >> hbd_downshift),
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800419 pli, bs, OD_PVQ_BETA[use_activity_masking][pli][bs],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700420 is_keyframe, &flags, ac_dc_coded, dec->state.qm + off,
421 dec->state.qm_inv + off);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700422
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800423 if (!has_dc_skip || out_int32[0]) {
424 out_int32[0] =
Yushin Choc49ef3a2017-03-13 17:27:25 -0700425 has_dc_skip + generic_decode(dec->r, &dec->state.adapt->model_dc[pli],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700426 &dec->state.adapt->ex_dc[pli][bs][0], 2,
427 "dc:mag");
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800428 if (out_int32[0]) out_int32[0] *= aom_read_bit(dec->r, "dc:sign") ? -1 : 1;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700429 }
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800430 out_int32[0] = out_int32[0] * pvq_dc_quant + ref_int32[0];
431
432 // copy int32 result back to int16
433 assert(OD_COEFF_SHIFT > coeff_shift);
434 rounding_mask = (1 << (OD_COEFF_SHIFT - coeff_shift - 1)) - 1;
435 for (i = 0; i < blk_size * blk_size; i++) {
Thomas Daede6ff6af62017-02-03 16:29:24 -0800436 out_int32[i] = AOM_SIGNED_SHL(out_int32[i], hbd_downshift);
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800437 dqcoeff_pvq[i] = (out_int32[i] + (out_int32[i] < 0) + rounding_mask) >>
438 (OD_COEFF_SHIFT - coeff_shift);
439 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700440
441 od_coding_order_to_raster(dqcoeff, blk_size, tx_type, dqcoeff_pvq, blk_size);
442
443 eob = blk_size * blk_size;
444
445 return eob;
446}
447
ltrudeaue1c09292017-01-20 15:42:13 -0500448static PVQ_SKIP_TYPE read_pvq_skip(AV1_COMMON *cm, MACROBLOCKD *const xd,
449 int plane, TX_SIZE tx_size) {
450 // decode ac/dc coded flag. bit0: DC coded, bit1 : AC coded
451 // NOTE : we don't use 5 symbols for luma here in aom codebase,
452 // since block partition is taken care of by aom.
453 // So, only AC/DC skip info is coded
Yushin Cho00779272017-02-21 10:38:16 -0800454 const int ac_dc_coded = aom_read_symbol(
ltrudeaue1c09292017-01-20 15:42:13 -0500455 xd->daala_dec.r,
Yushin Choc49ef3a2017-03-13 17:27:25 -0700456 xd->daala_dec.state.adapt->skip_cdf[2 * tx_size + (plane != 0)], 4,
Yushin Cho00779272017-02-21 10:38:16 -0800457 "skip");
ltrudeaue1c09292017-01-20 15:42:13 -0500458 if (ac_dc_coded < 0 || ac_dc_coded > 3) {
459 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
460 "Invalid PVQ Skip Type");
461 }
462 return ac_dc_coded;
463}
464
465static int av1_pvq_decode_helper2(AV1_COMMON *cm, MACROBLOCKD *const xd,
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800466 MB_MODE_INFO *const mbmi, int plane, int row,
467 int col, TX_SIZE tx_size, TX_TYPE tx_type) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700468 struct macroblockd_plane *const pd = &xd->plane[plane];
469 // transform block size in pixels
470 int tx_blk_size = tx_size_wide[tx_size];
471 int i, j;
472 tran_low_t *pvq_ref_coeff = pd->pvq_ref_coeff;
473 const int diff_stride = tx_blk_size;
474 int16_t *pred = pd->pred;
475 tran_low_t *const dqcoeff = pd->dqcoeff;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700476 uint8_t *dst;
477 int eob;
ltrudeaue1c09292017-01-20 15:42:13 -0500478 const PVQ_SKIP_TYPE ac_dc_coded = read_pvq_skip(cm, xd, plane, tx_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700479
480 eob = 0;
481 dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col];
482
Yushin Cho77bba8d2016-11-04 16:36:56 -0700483 if (ac_dc_coded) {
484 int xdec = pd->subsampling_x;
485 int seg_id = mbmi->segment_id;
486 int16_t *quant;
487 FWD_TXFM_PARAM fwd_txfm_param;
Yaowu Xufc1b2132016-11-07 15:16:15 -0800488 // ToDo(yaowu): correct this with optimal number from decoding process.
489 const int max_scan_line = tx_size_2d[tx_size];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200490#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800491 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
492 for (j = 0; j < tx_blk_size; j++)
493 for (i = 0; i < tx_blk_size; i++)
494 pred[diff_stride * j + i] =
495 CONVERT_TO_SHORTPTR(dst)[pd->dst.stride * j + i];
496 } else {
497#endif
498 for (j = 0; j < tx_blk_size; j++)
499 for (i = 0; i < tx_blk_size; i++)
500 pred[diff_stride * j + i] = dst[pd->dst.stride * j + i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200501#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800502 }
503#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -0700504
505 fwd_txfm_param.tx_type = tx_type;
506 fwd_txfm_param.tx_size = tx_size;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700507 fwd_txfm_param.lossless = xd->lossless[seg_id];
508
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200509#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800510 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
511 fwd_txfm_param.bd = xd->bd;
hui suf11fb882017-03-27 14:56:33 -0700512 av1_highbd_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &fwd_txfm_param);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800513 } else {
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200514#endif // CONFIG_HIGHBITDEPTH
hui suf11fb882017-03-27 14:56:33 -0700515 av1_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &fwd_txfm_param);
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200516#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800517 }
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200518#endif // CONFIG_HIGHBITDEPTH
Yushin Cho77bba8d2016-11-04 16:36:56 -0700519
520 quant = &pd->seg_dequant[seg_id][0]; // aom's quantizer
521
Thomas Daede6ff6af62017-02-03 16:29:24 -0800522 eob = av1_pvq_decode_helper(xd, pvq_ref_coeff, dqcoeff, quant, plane,
523 tx_size, tx_type, xdec, ac_dc_coded);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700524
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800525 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
526 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700527 }
528
529 return eob;
530}
531#endif
532
Angie Chiang752ccce2017-04-09 13:41:13 -0700533static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
534 const int bsize = xd->mi[0]->mbmi.sb_type;
535 const struct macroblockd_plane *pd = &xd->plane[plane];
536#if CONFIG_CB4X4
537#if CONFIG_CHROMA_2X2
538 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
539#else
540 const BLOCK_SIZE plane_bsize =
541 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
542#endif // CONFIG_CHROMA_2X2
543#else
544 const BLOCK_SIZE plane_bsize =
545 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
546#endif
547 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
548 const TX_SIZE tx_size = get_tx_size(plane, xd);
549 const uint8_t txh_unit = tx_size_high_unit[tx_size];
550 return row * max_blocks_wide + col * txh_unit;
551}
552
hui sub8a6fd62017-05-10 10:57:57 -0700553#if CONFIG_DPCM_INTRA
554static void process_block_dpcm_vert(TX_SIZE tx_size, TX_TYPE_1D tx_type_1d,
555 const tran_low_t *dqcoeff, uint8_t *dst,
556 int dst_stride) {
557 const int tx1d_width = tx_size_wide[tx_size];
558 const int tx1d_height = tx_size_high[tx_size];
559 dpcm_inv_txfm_add_func inverse_tx =
560 av1_get_dpcm_inv_txfm_add_func(tx1d_width);
561 for (int r = 0; r < tx1d_height; ++r) {
562 if (r > 0) memcpy(dst, dst - dst_stride, tx1d_width * sizeof(dst[0]));
563 inverse_tx(dqcoeff, 1, tx_type_1d, dst);
564 dqcoeff += tx1d_width;
565 dst += dst_stride;
566 }
567}
568
569static void process_block_dpcm_horz(TX_SIZE tx_size, TX_TYPE_1D tx_type_1d,
570 const tran_low_t *dqcoeff, uint8_t *dst,
571 int dst_stride) {
572 const int tx1d_width = tx_size_wide[tx_size];
573 const int tx1d_height = tx_size_high[tx_size];
574 dpcm_inv_txfm_add_func inverse_tx =
575 av1_get_dpcm_inv_txfm_add_func(tx1d_height);
576 tran_low_t tx_buff[64];
577 for (int c = 0; c < tx1d_width; ++c, ++dqcoeff, ++dst) {
578 for (int r = 0; r < tx1d_height; ++r) {
579 if (c > 0) dst[r * dst_stride] = dst[r * dst_stride - 1];
580 tx_buff[r] = dqcoeff[r * tx1d_width];
581 }
582 inverse_tx(tx_buff, dst_stride, tx_type_1d, dst);
583 }
584}
585
586#if CONFIG_HIGHBITDEPTH
587static void hbd_process_block_dpcm_vert(TX_SIZE tx_size, TX_TYPE_1D tx_type_1d,
588 int bd, const tran_low_t *dqcoeff,
589 uint8_t *dst8, int dst_stride) {
590 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
591 const int tx1d_width = tx_size_wide[tx_size];
592 const int tx1d_height = tx_size_high[tx_size];
593 hbd_dpcm_inv_txfm_add_func inverse_tx =
594 av1_get_hbd_dpcm_inv_txfm_add_func(tx1d_width);
595 for (int r = 0; r < tx1d_height; ++r) {
596 if (r > 0) memcpy(dst, dst - dst_stride, tx1d_width * sizeof(dst[0]));
Sarah Parker31c66502017-05-19 16:51:07 -0700597 inverse_tx(dqcoeff, 1, tx_type_1d, bd, dst, 1);
hui sub8a6fd62017-05-10 10:57:57 -0700598 dqcoeff += tx1d_width;
599 dst += dst_stride;
600 }
601}
602
603static void hbd_process_block_dpcm_horz(TX_SIZE tx_size, TX_TYPE_1D tx_type_1d,
604 int bd, const tran_low_t *dqcoeff,
605 uint8_t *dst8, int dst_stride) {
606 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
607 const int tx1d_width = tx_size_wide[tx_size];
608 const int tx1d_height = tx_size_high[tx_size];
609 hbd_dpcm_inv_txfm_add_func inverse_tx =
610 av1_get_hbd_dpcm_inv_txfm_add_func(tx1d_height);
611 tran_low_t tx_buff[64];
612 switch (tx1d_height) {
613 case 4: inverse_tx = av1_hbd_dpcm_inv_txfm_add_4_c; break;
614 case 8: inverse_tx = av1_hbd_dpcm_inv_txfm_add_8_c; break;
615 case 16: inverse_tx = av1_hbd_dpcm_inv_txfm_add_16_c; break;
616 case 32: inverse_tx = av1_hbd_dpcm_inv_txfm_add_32_c; break;
617 default: assert(0);
618 }
619
620 for (int c = 0; c < tx1d_width; ++c, ++dqcoeff, ++dst) {
621 for (int r = 0; r < tx1d_height; ++r) {
622 if (c > 0) dst[r * dst_stride] = dst[r * dst_stride - 1];
623 tx_buff[r] = dqcoeff[r * tx1d_width];
624 }
Sarah Parker31c66502017-05-19 16:51:07 -0700625 inverse_tx(tx_buff, dst_stride, tx_type_1d, bd, dst, 0);
hui sub8a6fd62017-05-10 10:57:57 -0700626 }
627}
628#endif // CONFIG_HIGHBITDEPTH
629
630static void inverse_transform_block_dpcm(MACROBLOCKD *xd, int plane,
631 PREDICTION_MODE mode, TX_SIZE tx_size,
632 TX_TYPE tx_type, uint8_t *dst,
633 int dst_stride, int16_t scan_line) {
634 struct macroblockd_plane *const pd = &xd->plane[plane];
635 tran_low_t *const dqcoeff = pd->dqcoeff;
636 TX_TYPE_1D tx_type_1d = DCT_1D;
637 switch (tx_type) {
638 case IDTX: tx_type_1d = IDTX_1D; break;
639 case V_DCT:
640 assert(mode == H_PRED);
641 tx_type_1d = DCT_1D;
642 break;
643 case H_DCT:
644 assert(mode == V_PRED);
645 tx_type_1d = DCT_1D;
646 break;
647 default: assert(0);
648 }
649 switch (mode) {
650 case V_PRED:
651#if CONFIG_HIGHBITDEPTH
652 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
653 hbd_process_block_dpcm_vert(tx_size, tx_type_1d, xd->bd, dqcoeff, dst,
654 dst_stride);
655 } else {
656#endif // CONFIG_HIGHBITDEPTH
657 process_block_dpcm_vert(tx_size, tx_type_1d, dqcoeff, dst, dst_stride);
658#if CONFIG_HIGHBITDEPTH
659 }
660#endif // CONFIG_HIGHBITDEPTH
661 break;
662 case H_PRED:
663#if CONFIG_HIGHBITDEPTH
664 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
665 hbd_process_block_dpcm_horz(tx_size, tx_type_1d, xd->bd, dqcoeff, dst,
666 dst_stride);
667 } else {
668#endif // CONFIG_HIGHBITDEPTH
669 process_block_dpcm_horz(tx_size, tx_type_1d, dqcoeff, dst, dst_stride);
670#if CONFIG_HIGHBITDEPTH
671 }
672#endif // CONFIG_HIGHBITDEPTH
673 break;
674 default: assert(0);
675 }
676 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
677}
678#endif // CONFIG_DPCM_INTRA
679
Alex Converse8aca36d2017-01-31 12:33:15 -0800680static void predict_and_reconstruct_intra_block(
681 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
682 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500683 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700684 const int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700685#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -0700686 (void)r;
687#endif
Angie Chiang3d005e42017-04-02 16:31:35 -0700688 av1_predict_intra_block_facade(xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700689
690 if (!mbmi->skip) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700691#if !CONFIG_PVQ
Angie Chiang3d005e42017-04-02 16:31:35 -0700692 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700693#if CONFIG_LV_MAP
694 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700695 int eob;
696 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700697 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700698 // tx_type will be read out in av1_read_coeffs_txb_facade
699 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700700#else // CONFIG_LV_MAP
Angie Chiangb6d770c2017-04-14 16:27:57 -0700701 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Angie Chiangff6d8902016-10-21 11:02:09 -0700702 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, 0);
Jingning Han1be18782016-10-21 11:48:15 -0700703 int16_t max_scan_line = 0;
704 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700705 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700706 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700707#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700708 if (eob) {
709 uint8_t *dst =
710 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
hui sub8a6fd62017-05-10 10:57:57 -0700711#if CONFIG_DPCM_INTRA
712 const int block_raster_idx =
713 av1_block_index_to_raster_order(tx_size, block_idx);
714 const PREDICTION_MODE mode = (plane == 0)
715 ? get_y_mode(xd->mi[0], block_raster_idx)
716 : mbmi->uv_mode;
717 if (av1_use_dpcm_intra(plane, mode, tx_type, mbmi)) {
718 inverse_transform_block_dpcm(xd, plane, mode, tx_size, tx_type, dst,
719 pd->dst.stride, max_scan_line);
720 } else {
721#endif // CONFIG_DPCM_INTRA
722 inverse_transform_block(xd, plane, tx_type, tx_size, dst,
723 pd->dst.stride, max_scan_line, eob);
724#if CONFIG_DPCM_INTRA
725 }
726#endif // CONFIG_DPCM_INTRA
Angie Chiang3d005e42017-04-02 16:31:35 -0700727 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700728#else
Angie Chiangb6d770c2017-04-14 16:27:57 -0700729 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500730 av1_pvq_decode_helper2(cm, xd, mbmi, plane, row, col, tx_size, tx_type);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700731#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700732 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400733#if CONFIG_CFL
734 if (plane == AOM_PLANE_Y) {
735 struct macroblockd_plane *const pd = &xd->plane[plane];
736 uint8_t *dst =
737 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
738 cfl_store(xd->cfl, dst, pd->dst.stride, row, col, tx_size);
739 }
740#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700741}
742
Jingning Handddb21f2017-02-28 14:44:05 -0800743#if CONFIG_VAR_TX && !CONFIG_COEF_INTERLEAVE
Angie Chiangff6d8902016-10-21 11:02:09 -0700744static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
745 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700746 int plane, BLOCK_SIZE plane_bsize,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700747 int blk_row, int blk_col, TX_SIZE tx_size,
748 int *eob_total) {
749 const struct macroblockd_plane *const pd = &xd->plane[plane];
750 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
751 const int tx_row = blk_row >> (1 - pd->subsampling_y);
752 const int tx_col = blk_col >> (1 - pd->subsampling_x);
753 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700754 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700756 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700757 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
758 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759
760 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
761
762 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500763 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700764 int block_idx = get_block_idx(xd, plane, blk_row, blk_col);
Angie Chiang133733c2017-03-17 12:50:20 -0700765#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700766 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700767 int eob;
Jingning Han5ab7ed42017-05-18 16:15:52 -0700768 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700769 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700770 // tx_type will be read out in av1_read_coeffs_txb_facade
771 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700772#else // CONFIG_LV_MAP
Angie Chiangb6d770c2017-04-14 16:27:57 -0700773 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, plane_tx_size);
Angie Chiangff6d8902016-10-21 11:02:09 -0700774 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, 1);
Jingning Han1be18782016-10-21 11:48:15 -0700775 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700776 const int eob = av1_decode_block_tokens(
777 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
778 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700779#endif // CONFIG_LV_MAP
Jingning Han9ca05b72017-01-03 14:41:36 -0800780 inverse_transform_block(xd, plane, tx_type, plane_tx_size,
781 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
782 << tx_size_wide_log2[0]],
783 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700784 *eob_total += eob;
785 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700786 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
787 const int bsl = tx_size_wide_unit[sub_txs];
Alex Conversee16b2662017-05-24 14:00:00 -0700788 assert(sub_txs < tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789 int i;
790
791 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700792
793 for (i = 0; i < 4; ++i) {
Jingning Han5f614262016-10-27 14:27:43 -0700794 const int offsetr = blk_row + (i >> 1) * bsl;
795 const int offsetc = blk_col + (i & 0x01) * bsl;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700796
797 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
798
Jingning Han8fd62b72016-10-21 12:55:54 -0700799 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hanf64062f2016-11-02 16:22:18 -0700800 offsetc, sub_txs, eob_total);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 }
802 }
803}
804#endif // CONFIG_VAR_TX
805
Jingning Handddb21f2017-02-28 14:44:05 -0800806#if !CONFIG_VAR_TX || CONFIG_SUPERTX || CONFIG_COEF_INTERLEAVE || \
Jingning Hanfe45b212016-11-22 10:30:23 -0800807 (!CONFIG_VAR_TX && CONFIG_EXT_TX && CONFIG_RECT_TX)
Angie Chiangff6d8902016-10-21 11:02:09 -0700808static int reconstruct_inter_block(AV1_COMMON *cm, MACROBLOCKD *const xd,
Alex Converse8aca36d2017-01-31 12:33:15 -0800809 aom_reader *const r, int segment_id,
810 int plane, int row, int col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700811 TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500812 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700813 int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700814#if CONFIG_PVQ
815 int eob;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700816 (void)r;
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800817 (void)segment_id;
818#else
819 struct macroblockd_plane *const pd = &xd->plane[plane];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700820#endif
821
822#if !CONFIG_PVQ
Angie Chiang133733c2017-03-17 12:50:20 -0700823#if CONFIG_LV_MAP
824 (void)segment_id;
Jingning Han1be18782016-10-21 11:48:15 -0700825 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700826 int eob;
827 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane, pd->dqcoeff,
828 &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700829 // tx_type will be read out in av1_read_coeffs_txb_facade
830 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700831#else // CONFIG_LV_MAP
832 int16_t max_scan_line = 0;
Angie Chiangb6d770c2017-04-14 16:27:57 -0700833 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700834 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, 1);
Jingning Han1be18782016-10-21 11:48:15 -0700835 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700836 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
837 tx_type, &max_scan_line, r, segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700838#endif // CONFIG_LV_MAP
Jingning Hanca14dda2016-12-09 09:36:00 -0800839 uint8_t *dst =
840 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Jingning Han1be18782016-10-21 11:48:15 -0700841 if (eob)
Jingning Hanca14dda2016-12-09 09:36:00 -0800842 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
843 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700844#else
Angie Chiangb6d770c2017-04-14 16:27:57 -0700845 TX_TYPE tx_type = get_tx_type(plane_type, xd, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500846 eob = av1_pvq_decode_helper2(cm, xd, &xd->mi[0]->mbmi, plane, row, col,
847 tx_size, tx_type);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700848#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 return eob;
850}
851#endif // !CONFIG_VAR_TX || CONFIG_SUPER_TX
852
Angie Chiang44701f22017-02-27 10:36:44 -0800853static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
854 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
855 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856 const int offset = mi_row * cm->mi_stride + mi_col;
857 int x, y;
858 const TileInfo *const tile = &xd->tile;
859
860 xd->mi = cm->mi_grid_visible + offset;
861 xd->mi[0] = &cm->mi[offset];
862 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
863 // passing bsize from decode_partition().
864 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700865#if CONFIG_RD_DEBUG
866 xd->mi[0]->mbmi.mi_row = mi_row;
867 xd->mi[0]->mbmi.mi_col = mi_col;
868#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 for (y = 0; y < y_mis; ++y)
Jingning Han97d85482016-07-15 11:06:05 -0700870 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700871
Jingning Hanfaad0e12016-12-07 10:54:57 -0800872 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873 set_skip_context(xd, mi_row, mi_col);
874
875#if CONFIG_VAR_TX
876 xd->max_tx_size = max_txsize_lookup[bsize];
877#endif
878
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700879 // Distance of Mb to the various image edges. These are specified to 8th pel
880 // as they are always compared to values that are in 1/8th pel units
881 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800882#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700883 cm->dependent_horz_tiles,
884#endif // CONFIG_DEPENDENT_HORZTILES
885 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700886
Jingning Han91d9a792017-04-18 12:01:52 -0700887 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
888 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700889}
890
891#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700892static MB_MODE_INFO *set_offsets_extend(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 MACROBLOCKD *const xd,
894 const TileInfo *const tile,
895 BLOCK_SIZE bsize_pred, int mi_row_pred,
896 int mi_col_pred, int mi_row_ori,
897 int mi_col_ori) {
898 // Used in supertx
899 // (mi_row_ori, mi_col_ori): location for mv
900 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
Jingning Han93531242016-12-20 11:54:36 -0800901 const int bw = mi_size_wide[bsize_pred];
902 const int bh = mi_size_high[bsize_pred];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700903 const int offset = mi_row_ori * cm->mi_stride + mi_col_ori;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700904 xd->mi = cm->mi_grid_visible + offset;
905 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700906 set_mi_row_col(xd, tile, mi_row_pred, bh, mi_col_pred, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800907#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700908 cm->dependent_horz_tiles,
909#endif // CONFIG_DEPENDENT_HORZTILES
910 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911
912 xd->up_available = (mi_row_ori > tile->mi_row_start);
913 xd->left_available = (mi_col_ori > tile->mi_col_start);
914
Jingning Hanfaad0e12016-12-07 10:54:57 -0800915 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916
917 return &xd->mi[0]->mbmi;
918}
919
Angie Chiang7fcfee42017-02-24 15:51:03 -0800920#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700921static MB_MODE_INFO *set_mb_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
922 BLOCK_SIZE bsize, int mi_row, int mi_col,
923 int bw, int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924 const int offset = mi_row * cm->mi_stride + mi_col;
925 const TileInfo *const tile = &xd->tile;
926 int x, y;
927
928 xd->mi = cm->mi_grid_visible + offset;
929 xd->mi[0] = cm->mi + offset;
930 xd->mi[0]->mbmi.sb_type = bsize;
931 for (y = 0; y < y_mis; ++y)
932 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
933
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700934 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800935#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700936 cm->dependent_horz_tiles,
937#endif // CONFIG_DEPENDENT_HORZTILES
938 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700939 return &xd->mi[0]->mbmi;
940}
Angie Chiang7fcfee42017-02-24 15:51:03 -0800941#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700942
Yaowu Xuf883b422016-08-30 14:01:10 -0700943static void set_offsets_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700944 const TileInfo *const tile, BLOCK_SIZE bsize,
945 int mi_row, int mi_col) {
Jingning Han93531242016-12-20 11:54:36 -0800946 const int bw = mi_size_wide[bsize];
947 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700948 const int offset = mi_row * cm->mi_stride + mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700949
950 xd->mi = cm->mi_grid_visible + offset;
951 xd->mi[0] = cm->mi + offset;
952
Jingning Hanfaad0e12016-12-07 10:54:57 -0800953 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700954
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700955 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800956#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700957 cm->dependent_horz_tiles,
958#endif // CONFIG_DEPENDENT_HORZTILES
959 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700960
Jingning Han91d9a792017-04-18 12:01:52 -0700961 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
962 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700963}
964
Yaowu Xuf883b422016-08-30 14:01:10 -0700965static void set_param_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700966 BLOCK_SIZE bsize, int mi_row, int mi_col,
967 int txfm, int skip) {
Jingning Han93531242016-12-20 11:54:36 -0800968 const int bw = mi_size_wide[bsize];
969 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700970 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
971 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700972 const int offset = mi_row * cm->mi_stride + mi_col;
973 int x, y;
974
975 xd->mi = cm->mi_grid_visible + offset;
976 xd->mi[0] = cm->mi + offset;
977
978 for (y = 0; y < y_mis; ++y)
979 for (x = 0; x < x_mis; ++x) {
980 xd->mi[y * cm->mi_stride + x]->mbmi.skip = skip;
981 xd->mi[y * cm->mi_stride + x]->mbmi.tx_type = txfm;
982 }
983#if CONFIG_VAR_TX
984 xd->above_txfm_context = cm->above_txfm_context + mi_col;
985 xd->left_txfm_context =
986 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
Yaowu Xu52a17632016-11-17 15:48:21 -0800987 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bw, bh, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700988#endif
989}
990
Yaowu Xuf883b422016-08-30 14:01:10 -0700991static void set_ref(AV1_COMMON *const cm, MACROBLOCKD *const xd, int idx,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700992 int mi_row, int mi_col) {
993 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Zoe Liu85b66462017-04-20 14:28:19 -0700994#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
995 RefBuffer *ref_buffer =
996 has_second_ref(mbmi) ? &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME]
997 : &cm->frame_refs[mbmi->ref_frame[0] - LAST_FRAME];
998#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
Zoe Liu85b66462017-04-20 14:28:19 -07001000#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001001 xd->block_refs[idx] = ref_buffer;
Yaowu Xuf883b422016-08-30 14:01:10 -07001002 if (!av1_is_valid_scale(&ref_buffer->sf))
1003 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001004 "Invalid scale factors");
Yaowu Xuf883b422016-08-30 14:01:10 -07001005 av1_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col,
1006 &ref_buffer->sf);
Angie Chiangd0916d92017-03-10 17:54:18 -08001007 aom_merge_corrupted_flag(&xd->corrupted, ref_buffer->buf->corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001008}
1009
1010static void dec_predict_b_extend(
Yaowu Xuf883b422016-08-30 14:01:10 -07001011 AV1Decoder *const pbi, MACROBLOCKD *const xd, const TileInfo *const tile,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001012 int block, int mi_row_ori, int mi_col_ori, int mi_row_pred, int mi_col_pred,
Yue Chen8e689e42017-06-02 10:56:10 -07001013 int mi_row_top, int mi_col_top, int plane, uint8_t *dst_buf, int dst_stride,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001014 BLOCK_SIZE bsize_top, BLOCK_SIZE bsize_pred, int b_sub8x8, int bextend) {
1015 // Used in supertx
1016 // (mi_row_ori, mi_col_ori): location for mv
1017 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
1018 // (mi_row_top, mi_col_top, bsize_top): region of the top partition size
1019 // block: sub location of sub8x8 blocks
1020 // b_sub8x8: 1: ori is sub8x8; 0: ori is not sub8x8
1021 // bextend: 1: region to predict is an extension of ori; 0: not
1022 int r = (mi_row_pred - mi_row_top) * MI_SIZE;
1023 int c = (mi_col_pred - mi_col_top) * MI_SIZE;
Jingning Han93531242016-12-20 11:54:36 -08001024 const int mi_width_top = mi_size_wide[bsize_top];
1025 const int mi_height_top = mi_size_high[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001026 MB_MODE_INFO *mbmi;
Yaowu Xuf883b422016-08-30 14:01:10 -07001027 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001028
1029 if (mi_row_pred < mi_row_top || mi_col_pred < mi_col_top ||
1030 mi_row_pred >= mi_row_top + mi_height_top ||
1031 mi_col_pred >= mi_col_top + mi_width_top || mi_row_pred >= cm->mi_rows ||
1032 mi_col_pred >= cm->mi_cols)
1033 return;
1034
1035 mbmi = set_offsets_extend(cm, xd, tile, bsize_pred, mi_row_pred, mi_col_pred,
1036 mi_row_ori, mi_col_ori);
1037 set_ref(cm, xd, 0, mi_row_pred, mi_col_pred);
Zoe Liu85b66462017-04-20 14:28:19 -07001038 if (has_second_ref(&xd->mi[0]->mbmi)
1039#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
1040 || is_inter_singleref_comp_mode(xd->mi[0]->mbmi.mode)
1041#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
1042 )
Yaowu Xuc27fc142016-08-22 16:08:15 -07001043 set_ref(cm, xd, 1, mi_row_pred, mi_col_pred);
Jingning Han2511c662016-12-22 11:57:34 -08001044 if (!bextend) mbmi->tx_size = max_txsize_lookup[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045
Yue Chen8e689e42017-06-02 10:56:10 -07001046 xd->plane[plane].dst.stride = dst_stride;
1047 xd->plane[plane].dst.buf =
1048 dst_buf + (r >> xd->plane[plane].subsampling_y) * dst_stride +
1049 (c >> xd->plane[plane].subsampling_x);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001050
1051 if (!b_sub8x8)
Yue Chen8e689e42017-06-02 10:56:10 -07001052 av1_build_inter_predictor_sb_extend(&pbi->common, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001053#if CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -07001054 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001055#endif // CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -07001056 mi_row_pred, mi_col_pred, plane,
1057 bsize_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001058 else
Yue Chen8e689e42017-06-02 10:56:10 -07001059 av1_build_inter_predictor_sb_sub8x8_extend(&pbi->common, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060#if CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -07001061 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001062#endif // CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -07001063 mi_row_pred, mi_col_pred, plane,
1064 bsize_pred, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001065}
1066
Yaowu Xuf883b422016-08-30 14:01:10 -07001067static void dec_extend_dir(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001068 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -07001069 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1070 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001071 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001072 int plane, uint8_t *dst_buf, int dst_stride,
1073 int dir) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 // dir: 0-lower, 1-upper, 2-left, 3-right
1075 // 4-lowerleft, 5-upperleft, 6-lowerright, 7-upperright
Jingning Han93531242016-12-20 11:54:36 -08001076 const int mi_width = mi_size_wide[bsize];
1077 const int mi_height = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001078 int xss = xd->plane[1].subsampling_x;
1079 int yss = xd->plane[1].subsampling_y;
Jingning Hanfeb517c2016-12-21 16:02:07 -08001080#if CONFIG_CB4X4
1081 const int unify_bsize = 1;
1082#else
1083 const int unify_bsize = 0;
1084#endif
1085 int b_sub8x8 = (bsize < BLOCK_8X8) && !unify_bsize ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001086 BLOCK_SIZE extend_bsize;
Jingning Han24f24a52016-12-27 10:13:28 -08001087 int mi_row_pred, mi_col_pred;
1088
1089 int wide_unit, high_unit;
1090 int i, j;
1091 int ext_offset = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001092
1093 if (dir == 0 || dir == 1) {
Jingning Han93531242016-12-20 11:54:36 -08001094 extend_bsize =
1095 (mi_width == mi_size_wide[BLOCK_8X8] || bsize < BLOCK_8X8 || xss < yss)
1096 ? BLOCK_8X8
1097 : BLOCK_16X8;
Jingning Han24f24a52016-12-27 10:13:28 -08001098#if CONFIG_CB4X4
1099 if (bsize < BLOCK_8X8) {
1100 extend_bsize = BLOCK_4X4;
1101 ext_offset = mi_size_wide[BLOCK_8X8];
1102 }
1103#endif
1104
1105 wide_unit = mi_size_wide[extend_bsize];
1106 high_unit = mi_size_high[extend_bsize];
1107
1108 mi_row_pred = mi_row + ((dir == 0) ? mi_height : -(mi_height + ext_offset));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001109 mi_col_pred = mi_col;
1110
Jingning Han24f24a52016-12-27 10:13:28 -08001111 for (j = 0; j < mi_height + ext_offset; j += high_unit)
1112 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -07001113 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -08001114 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001115 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -08001116 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001117 } else if (dir == 2 || dir == 3) {
Jingning Han5b7706a2016-12-21 09:55:10 -08001118 extend_bsize =
1119 (mi_height == mi_size_high[BLOCK_8X8] || bsize < BLOCK_8X8 || yss < xss)
1120 ? BLOCK_8X8
1121 : BLOCK_8X16;
Jingning Han24f24a52016-12-27 10:13:28 -08001122#if CONFIG_CB4X4
1123 if (bsize < BLOCK_8X8) {
1124 extend_bsize = BLOCK_4X4;
1125 ext_offset = mi_size_wide[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001126 }
Jingning Han24f24a52016-12-27 10:13:28 -08001127#endif
1128
1129 wide_unit = mi_size_wide[extend_bsize];
1130 high_unit = mi_size_high[extend_bsize];
1131
1132 mi_row_pred = mi_row;
1133 mi_col_pred = mi_col + ((dir == 3) ? mi_width : -(mi_width + ext_offset));
1134
1135 for (j = 0; j < mi_height + ext_offset; j += high_unit)
1136 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -07001137 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -08001138 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001139 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -08001140 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001141 } else {
1142 extend_bsize = BLOCK_8X8;
Jingning Han24f24a52016-12-27 10:13:28 -08001143#if CONFIG_CB4X4
1144 if (bsize < BLOCK_8X8) {
1145 extend_bsize = BLOCK_4X4;
1146 ext_offset = mi_size_wide[BLOCK_8X8];
1147 }
1148#endif
1149 wide_unit = mi_size_wide[extend_bsize];
1150 high_unit = mi_size_high[extend_bsize];
1151
Jingning Han5b7706a2016-12-21 09:55:10 -08001152 mi_row_pred = mi_row + ((dir == 4 || dir == 6) ? mi_height
Jingning Han24f24a52016-12-27 10:13:28 -08001153 : -(mi_height + ext_offset));
Jingning Han5b7706a2016-12-21 09:55:10 -08001154 mi_col_pred =
Jingning Han24f24a52016-12-27 10:13:28 -08001155 mi_col + ((dir == 6 || dir == 7) ? mi_width : -(mi_width + ext_offset));
1156
1157 for (j = 0; j < mi_height + ext_offset; j += high_unit)
1158 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -07001159 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -08001160 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001161 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -08001162 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001163 }
1164}
1165
Yaowu Xuf883b422016-08-30 14:01:10 -07001166static void dec_extend_all(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001167 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -07001168 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1169 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001170 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001171 int plane, uint8_t *dst_buf, int dst_stride) {
Sarah Parkerfb9e6652017-04-25 16:32:06 -07001172 for (int i = 0; i < 8; ++i) {
Yue Chen8e689e42017-06-02 10:56:10 -07001173 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row_ori,
1174 mi_col_ori, mi_row, mi_col, mi_row_top, mi_col_top, plane,
1175 dst_buf, dst_stride, i);
Sarah Parkerfb9e6652017-04-25 16:32:06 -07001176 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001177}
1178
Yaowu Xuf883b422016-08-30 14:01:10 -07001179static void dec_predict_sb_complex(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001180 const TileInfo *const tile, int mi_row,
1181 int mi_col, int mi_row_top, int mi_col_top,
1182 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1183 uint8_t *dst_buf[3], int dst_stride[3]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001184 const AV1_COMMON *const cm = &pbi->common;
Jingning Han5b7706a2016-12-21 09:55:10 -08001185 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001186 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1187 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
1188#if CONFIG_EXT_PARTITION_TYPES
1189 const BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
1190#endif
1191 int i;
1192 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1193 uint8_t *dst_buf1[3], *dst_buf2[3], *dst_buf3[3];
Jingning Hanfeb517c2016-12-21 16:02:07 -08001194#if CONFIG_CB4X4
1195 const int unify_bsize = 1;
1196#else
1197 const int unify_bsize = 0;
1198#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001199
1200 DECLARE_ALIGNED(16, uint8_t, tmp_buf1[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1201 DECLARE_ALIGNED(16, uint8_t, tmp_buf2[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1202 DECLARE_ALIGNED(16, uint8_t, tmp_buf3[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1203 int dst_stride1[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1204 int dst_stride2[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1205 int dst_stride3[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1206
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001207#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001208 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
1209 int len = sizeof(uint16_t);
1210 dst_buf1[0] = CONVERT_TO_BYTEPTR(tmp_buf1);
1211 dst_buf1[1] = CONVERT_TO_BYTEPTR(tmp_buf1 + MAX_TX_SQUARE * len);
1212 dst_buf1[2] = CONVERT_TO_BYTEPTR(tmp_buf1 + 2 * MAX_TX_SQUARE * len);
1213 dst_buf2[0] = CONVERT_TO_BYTEPTR(tmp_buf2);
1214 dst_buf2[1] = CONVERT_TO_BYTEPTR(tmp_buf2 + MAX_TX_SQUARE * len);
1215 dst_buf2[2] = CONVERT_TO_BYTEPTR(tmp_buf2 + 2 * MAX_TX_SQUARE * len);
1216 dst_buf3[0] = CONVERT_TO_BYTEPTR(tmp_buf3);
1217 dst_buf3[1] = CONVERT_TO_BYTEPTR(tmp_buf3 + MAX_TX_SQUARE * len);
1218 dst_buf3[2] = CONVERT_TO_BYTEPTR(tmp_buf3 + 2 * MAX_TX_SQUARE * len);
1219 } else {
1220#endif
1221 dst_buf1[0] = tmp_buf1;
1222 dst_buf1[1] = tmp_buf1 + MAX_TX_SQUARE;
1223 dst_buf1[2] = tmp_buf1 + 2 * MAX_TX_SQUARE;
1224 dst_buf2[0] = tmp_buf2;
1225 dst_buf2[1] = tmp_buf2 + MAX_TX_SQUARE;
1226 dst_buf2[2] = tmp_buf2 + 2 * MAX_TX_SQUARE;
1227 dst_buf3[0] = tmp_buf3;
1228 dst_buf3[1] = tmp_buf3 + MAX_TX_SQUARE;
1229 dst_buf3[2] = tmp_buf3 + 2 * MAX_TX_SQUARE;
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001230#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001231 }
1232#endif
1233
1234 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1235
1236 xd->mi = cm->mi_grid_visible + mi_offset;
1237 xd->mi[0] = cm->mi + mi_offset;
1238
1239 for (i = 0; i < MAX_MB_PLANE; i++) {
1240 xd->plane[i].dst.buf = dst_buf[i];
1241 xd->plane[i].dst.stride = dst_stride[i];
1242 }
1243
1244 switch (partition) {
1245 case PARTITION_NONE:
1246 assert(bsize < top_bsize);
Yue Chen8e689e42017-06-02 10:56:10 -07001247 for (i = 0; i < MAX_MB_PLANE; i++) {
1248 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1249 mi_row_top, mi_col_top, i, dst_buf[i],
1250 dst_stride[i], top_bsize, bsize, 0, 0);
1251 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row, mi_col,
1252 mi_row, mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1253 dst_stride[i]);
1254 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255 break;
1256 case PARTITION_HORZ:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001257 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001258 for (i = 0; i < MAX_MB_PLANE; i++) {
1259 // For sub8x8, predict in 8x8 unit
1260 // First half
1261 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1262 mi_row_top, mi_col_top, i, dst_buf[i],
1263 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1264 if (bsize < top_bsize)
1265 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1266 mi_row, mi_col, mi_row_top, mi_col_top, i,
1267 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001268
Yue Chen8e689e42017-06-02 10:56:10 -07001269 // Second half
1270 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1271 mi_row_top, mi_col_top, i, dst_buf1[i],
1272 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1273 if (bsize < top_bsize)
1274 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1275 mi_row, mi_col, mi_row_top, mi_col_top, i,
1276 dst_buf1[i], dst_stride1[i]);
1277 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001278
1279 // weighted average to smooth the boundary
1280 xd->plane[0].dst.buf = dst_buf[0];
1281 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001282 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001283 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1284 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1285 0);
1286 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001287 for (i = 0; i < MAX_MB_PLANE; i++) {
1288#if CONFIG_CB4X4
1289 const struct macroblockd_plane *pd = &xd->plane[i];
1290 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1291 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001292
Yue Chen8e689e42017-06-02 10:56:10 -07001293 if (handle_chroma_sub8x8) {
1294 int mode_offset_row = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001295
Yue Chen8e689e42017-06-02 10:56:10 -07001296 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1297 mi_col, mi_row, mi_col, mi_row_top, mi_col_top,
1298 i, dst_buf[i], dst_stride[i], top_bsize, bsize,
1299 0, 0);
1300 if (bsize < top_bsize)
1301 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize,
1302 mi_row + mode_offset_row, mi_col, mi_row, mi_col,
1303 mi_row_top, mi_col_top, i, dst_buf[i],
1304 dst_stride[i]);
1305 } else {
1306#endif
1307 // First half
1308 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1309 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1310 dst_stride[i], top_bsize, subsize, 0, 0);
1311 if (bsize < top_bsize)
1312 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1313 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1314 dst_buf[i], dst_stride[i]);
1315 else
1316 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1317 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1318 dst_buf[i], dst_stride[i], 0);
1319
1320 if (mi_row + hbs < cm->mi_rows) {
1321 // Second half
1322 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1323 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1324 i, dst_buf1[i], dst_stride1[i], top_bsize,
1325 subsize, 0, 0);
1326 if (bsize < top_bsize)
1327 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1328 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1329 mi_row_top, mi_col_top, i, dst_buf1[i],
1330 dst_stride1[i]);
1331 else
1332 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize,
1333 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1334 mi_row_top, mi_col_top, i, dst_buf1[i],
1335 dst_stride1[i], 1);
1336
1337 // weighted average to smooth the boundary
1338 xd->plane[i].dst.buf = dst_buf[i];
1339 xd->plane[i].dst.stride = dst_stride[i];
1340 av1_build_masked_inter_predictor_complex(
1341 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1342 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1343 PARTITION_HORZ, i);
1344 }
1345#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001346 }
Yue Chen8e689e42017-06-02 10:56:10 -07001347#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001348 }
1349 }
1350 break;
1351 case PARTITION_VERT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001352 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001353 for (i = 0; i < MAX_MB_PLANE; i++) {
1354 // First half
1355 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1356 mi_row_top, mi_col_top, i, dst_buf[i],
1357 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1358 if (bsize < top_bsize)
1359 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1360 mi_row, mi_col, mi_row_top, mi_col_top, i,
1361 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001362
Yue Chen8e689e42017-06-02 10:56:10 -07001363 // Second half
1364 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1365 mi_row_top, mi_col_top, i, dst_buf1[i],
1366 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1367 if (bsize < top_bsize)
1368 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1369 mi_row, mi_col, mi_row_top, mi_col_top, i,
1370 dst_buf1[i], dst_stride1[i]);
1371 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001372
1373 // Smooth
1374 xd->plane[0].dst.buf = dst_buf[0];
1375 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001376 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001377 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1378 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1379 0);
1380 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001381 for (i = 0; i < MAX_MB_PLANE; i++) {
1382#if CONFIG_CB4X4
1383 const struct macroblockd_plane *pd = &xd->plane[i];
1384 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1385 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001386
Yue Chen8e689e42017-06-02 10:56:10 -07001387 if (handle_chroma_sub8x8) {
1388 int mode_offset_col = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
1389 assert(i > 0 && bsize == BLOCK_8X8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001390
Yue Chen8e689e42017-06-02 10:56:10 -07001391 dec_predict_b_extend(pbi, xd, tile, 0, mi_row,
1392 mi_col + mode_offset_col, mi_row, mi_col,
1393 mi_row_top, mi_col_top, i, dst_buf[i],
1394 dst_stride[i], top_bsize, bsize, 0, 0);
1395 if (bsize < top_bsize)
1396 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row,
1397 mi_col + mode_offset_col, mi_row, mi_col,
1398 mi_row_top, mi_col_top, i, dst_buf[i],
1399 dst_stride[i]);
1400 } else {
1401#endif
1402 // First half
1403 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1404 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1405 dst_stride[i], top_bsize, subsize, 0, 0);
1406 if (bsize < top_bsize)
1407 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1408 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1409 dst_buf[i], dst_stride[i]);
1410 else
1411 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1412 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1413 dst_buf[i], dst_stride[i], 3);
1414
1415 // Second half
1416 if (mi_col + hbs < cm->mi_cols) {
1417 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1418 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1419 i, dst_buf1[i], dst_stride1[i], top_bsize,
1420 subsize, 0, 0);
1421 if (bsize < top_bsize)
1422 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1423 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1424 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1425 else
1426 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1427 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1428 mi_col_top, i, dst_buf1[i], dst_stride1[i], 2);
1429
1430 // Smooth
1431 xd->plane[i].dst.buf = dst_buf[i];
1432 xd->plane[i].dst.stride = dst_stride[i];
1433 av1_build_masked_inter_predictor_complex(
1434 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1435 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1436 PARTITION_VERT, i);
1437 }
1438#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 }
Yue Chen8e689e42017-06-02 10:56:10 -07001440#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441 }
1442 }
1443 break;
1444 case PARTITION_SPLIT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001445 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001446 for (i = 0; i < MAX_MB_PLANE; i++) {
1447 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1448 mi_row_top, mi_col_top, i, dst_buf[i],
1449 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1450 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1451 mi_row_top, mi_col_top, i, dst_buf1[i],
1452 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1453 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1454 mi_row_top, mi_col_top, i, dst_buf2[i],
1455 dst_stride2[i], top_bsize, BLOCK_8X8, 1, 1);
1456 dec_predict_b_extend(pbi, xd, tile, 3, mi_row, mi_col, mi_row, mi_col,
1457 mi_row_top, mi_col_top, i, dst_buf3[i],
1458 dst_stride3[i], top_bsize, BLOCK_8X8, 1, 1);
1459 if (bsize < top_bsize) {
1460 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1461 mi_row, mi_col, mi_row_top, mi_col_top, i,
1462 dst_buf[i], dst_stride[i]);
1463 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1464 mi_row, mi_col, mi_row_top, mi_col_top, i,
1465 dst_buf1[i], dst_stride1[i]);
1466 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1467 mi_row, mi_col, mi_row_top, mi_col_top, i,
1468 dst_buf2[i], dst_stride2[i]);
1469 dec_extend_all(pbi, xd, tile, 3, subsize, top_bsize, mi_row, mi_col,
1470 mi_row, mi_col, mi_row_top, mi_col_top, i,
1471 dst_buf3[i], dst_stride3[i]);
1472 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001473 }
Yue Chen8e689e42017-06-02 10:56:10 -07001474#if CONFIG_CB4X4
1475 } else if (bsize == BLOCK_8X8) {
1476 for (i = 0; i < MAX_MB_PLANE; i++) {
1477 const struct macroblockd_plane *pd = &xd->plane[i];
1478 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1479 subsize, pd->subsampling_x, pd->subsampling_y);
1480
1481 if (handle_chroma_sub8x8) {
1482 int mode_offset_row =
1483 CONFIG_CHROMA_SUB8X8 && mi_row + hbs < cm->mi_rows ? hbs : 0;
1484 int mode_offset_col =
1485 CONFIG_CHROMA_SUB8X8 && mi_col + hbs < cm->mi_cols ? hbs : 0;
1486
1487 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1488 mi_col + mode_offset_col, mi_row, mi_col,
1489 mi_row_top, mi_col_top, i, dst_buf[i],
1490 dst_stride[i], top_bsize, BLOCK_8X8, 0, 0);
1491 if (bsize < top_bsize)
1492 dec_extend_all(pbi, xd, tile, 0, BLOCK_8X8, top_bsize,
1493 mi_row + mode_offset_row, mi_col + mode_offset_col,
1494 mi_row, mi_col, mi_row_top, mi_col_top, i,
1495 dst_buf[i], dst_stride[i]);
1496 } else {
1497 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1498 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1499 dst_stride[i], top_bsize, subsize, 0, 0);
1500 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1501 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1502 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1503 i, dst_buf1[i], dst_stride1[i], top_bsize,
1504 subsize, 0, 0);
1505 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1506 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1507 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1508 i, dst_buf2[i], dst_stride2[i], top_bsize,
1509 subsize, 0, 0);
1510 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1511 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1512 mi_row + hbs, mi_col + hbs, mi_row_top,
1513 mi_col_top, i, dst_buf3[i], dst_stride3[i],
1514 top_bsize, subsize, 0, 0);
1515
1516 if (bsize < top_bsize) {
1517 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1518 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1519 dst_buf[i], dst_stride[i]);
1520 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1521 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1522 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1523 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1524 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1525 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1526 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1527 mi_row_top, mi_col_top, i, dst_buf2[i],
1528 dst_stride2[i]);
1529 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1530 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1531 mi_row + hbs, mi_col + hbs, mi_row + hbs,
1532 mi_col + hbs, mi_row_top, mi_col_top, i,
1533 dst_buf3[i], dst_stride3[i]);
1534 }
1535 }
1536 }
1537#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001538 } else {
1539 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row_top,
1540 mi_col_top, subsize, top_bsize, dst_buf,
1541 dst_stride);
1542 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1543 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col + hbs,
1544 mi_row_top, mi_col_top, subsize, top_bsize,
1545 dst_buf1, dst_stride1);
1546 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1547 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col,
1548 mi_row_top, mi_col_top, subsize, top_bsize,
1549 dst_buf2, dst_stride2);
1550 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1551 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col + hbs,
1552 mi_row_top, mi_col_top, subsize, top_bsize,
1553 dst_buf3, dst_stride3);
1554 }
1555 for (i = 0; i < MAX_MB_PLANE; i++) {
Yue Chen8e689e42017-06-02 10:56:10 -07001556#if CONFIG_CB4X4
1557 const struct macroblockd_plane *pd = &xd->plane[i];
1558 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1559 subsize, pd->subsampling_x, pd->subsampling_y);
1560 if (handle_chroma_sub8x8) continue; // Skip <4x4 chroma smoothing
1561#else
Jingning Han24f24a52016-12-27 10:13:28 -08001562 if (bsize == BLOCK_8X8 && i != 0)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001563 continue; // Skip <4x4 chroma smoothing
Jingning Han9e0976a2016-12-27 17:52:42 -08001564#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001565 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001566 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1568 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1569 PARTITION_VERT, i);
1570 if (mi_row + hbs < cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001571 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001572 xd, dst_buf2[i], dst_stride2[i], dst_buf3[i], dst_stride3[i],
1573 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1574 PARTITION_VERT, i);
Yaowu Xuf883b422016-08-30 14:01:10 -07001575 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1577 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1578 PARTITION_HORZ, i);
1579 }
1580 } else if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001581 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001582 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1583 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1584 PARTITION_HORZ, i);
1585 }
1586 }
1587 break;
1588#if CONFIG_EXT_PARTITION_TYPES
1589 case PARTITION_HORZ_A:
1590 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1591 mi_row_top, mi_col_top, dst_buf, dst_stride,
1592 top_bsize, bsize2, 0, 0);
1593 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1594 mi_row_top, mi_col_top, dst_buf, dst_stride);
1595
1596 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1597 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1598 dst_stride1, top_bsize, bsize2, 0, 0);
1599 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1600 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1601
1602 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1603 mi_col, mi_row_top, mi_col_top, dst_buf2,
1604 dst_stride2, top_bsize, subsize, 0, 0);
1605 if (bsize < top_bsize)
1606 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1607 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2);
1608 else
1609 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1610 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2,
1611 1);
1612
1613 for (i = 0; i < MAX_MB_PLANE; i++) {
1614 xd->plane[i].dst.buf = dst_buf[i];
1615 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001616 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001617 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1618 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1619 i);
1620 }
1621 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001622 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001623 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1624 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1625 i);
1626 }
1627 break;
1628 case PARTITION_VERT_A:
1629
1630 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1631 mi_row_top, mi_col_top, dst_buf, dst_stride,
1632 top_bsize, bsize2, 0, 0);
1633 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1634 mi_row_top, mi_col_top, dst_buf, dst_stride);
1635
1636 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1637 mi_col, mi_row_top, mi_col_top, dst_buf1,
1638 dst_stride1, top_bsize, bsize2, 0, 0);
1639 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1640 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1641
1642 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1643 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1644 dst_stride2, top_bsize, subsize, 0, 0);
1645 if (bsize < top_bsize)
1646 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1647 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1648 dst_stride2);
1649 else
1650 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1651 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1652 dst_stride2, 2);
1653
1654 for (i = 0; i < MAX_MB_PLANE; i++) {
1655 xd->plane[i].dst.buf = dst_buf[i];
1656 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001657 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001658 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1659 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1660 i);
1661 }
1662 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001663 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001664 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1665 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1666 i);
1667 }
1668 break;
1669 case PARTITION_HORZ_B:
1670 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1671 mi_row_top, mi_col_top, dst_buf, dst_stride,
1672 top_bsize, subsize, 0, 0);
1673 if (bsize < top_bsize)
1674 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1675 mi_row_top, mi_col_top, dst_buf, dst_stride);
1676 else
1677 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1678 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
1679
1680 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1681 mi_col, mi_row_top, mi_col_top, dst_buf1,
1682 dst_stride1, top_bsize, bsize2, 0, 0);
1683 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1684 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1685
1686 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1687 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1688 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1689 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1690 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1691 dst_stride2);
1692
1693 for (i = 0; i < MAX_MB_PLANE; i++) {
1694 xd->plane[i].dst.buf = dst_buf1[i];
1695 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001696 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001697 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1698 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1699 PARTITION_VERT, i);
1700 }
1701 for (i = 0; i < MAX_MB_PLANE; i++) {
1702 xd->plane[i].dst.buf = dst_buf[i];
1703 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001704 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001705 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1706 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1707 i);
1708 }
1709 break;
1710 case PARTITION_VERT_B:
1711 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1712 mi_row_top, mi_col_top, dst_buf, dst_stride,
1713 top_bsize, subsize, 0, 0);
1714 if (bsize < top_bsize)
1715 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1716 mi_row_top, mi_col_top, dst_buf, dst_stride);
1717 else
1718 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1719 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
1720
1721 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1722 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1723 dst_stride1, top_bsize, bsize2, 0, 0);
1724 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1725 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1726
1727 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1728 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1729 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1730 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1731 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1732 dst_stride2);
1733
1734 for (i = 0; i < MAX_MB_PLANE; i++) {
1735 xd->plane[i].dst.buf = dst_buf1[i];
1736 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001737 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001738 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1739 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1740 PARTITION_HORZ, i);
1741 }
1742 for (i = 0; i < MAX_MB_PLANE; i++) {
1743 xd->plane[i].dst.buf = dst_buf[i];
1744 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001745 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001746 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1747 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1748 i);
1749 }
1750 break;
1751#endif // CONFIG_EXT_PARTITION_TYPES
1752 default: assert(0);
1753 }
1754}
1755
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001756static void set_segment_id_supertx(const AV1_COMMON *const cm, int mi_row,
1757 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001758 const struct segmentation *seg = &cm->seg;
Jingning Han5b7706a2016-12-21 09:55:10 -08001759 const int miw = AOMMIN(mi_size_wide[bsize], cm->mi_cols - mi_col);
1760 const int mih = AOMMIN(mi_size_high[bsize], cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001761 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1762 MODE_INFO **const mip = cm->mi_grid_visible + mi_offset;
1763 int r, c;
1764 int seg_id_supertx = MAX_SEGMENTS;
1765
1766 if (!seg->enabled) {
1767 seg_id_supertx = 0;
1768 } else {
1769 // Find the minimum segment_id
1770 for (r = 0; r < mih; r++)
1771 for (c = 0; c < miw; c++)
1772 seg_id_supertx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001773 AOMMIN(mip[r * cm->mi_stride + c]->mbmi.segment_id, seg_id_supertx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001774 assert(0 <= seg_id_supertx && seg_id_supertx < MAX_SEGMENTS);
1775 }
1776
1777 // Assign the the segment_id back to segment_id_supertx
1778 for (r = 0; r < mih; r++)
1779 for (c = 0; c < miw; c++)
1780 mip[r * cm->mi_stride + c]->mbmi.segment_id_supertx = seg_id_supertx;
1781}
1782#endif // CONFIG_SUPERTX
1783
Yue Chen64550b62017-01-12 12:18:22 -08001784static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001785#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001786 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001787#endif // CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001788 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001789#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001790 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001791#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001792 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001793 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -08001794 const int bw = mi_size_wide[bsize];
1795 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001796 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1797 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -04001798
Michael Bebenita6048d052016-08-25 14:40:54 -07001799#if CONFIG_ACCOUNTING
1800 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
1801#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001802#if CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001803 if (supertx_enabled) {
Yue Chen64550b62017-01-12 12:18:22 -08001804 set_mb_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001805 } else {
Yue Chen64550b62017-01-12 12:18:22 -08001806 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001807 }
1808#if CONFIG_EXT_PARTITION_TYPES
1809 xd->mi[0]->mbmi.partition = partition;
1810#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001811 av1_read_mode_info(pbi, xd, supertx_enabled, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001812#else
Yue Chen64550b62017-01-12 12:18:22 -08001813 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001814#if CONFIG_EXT_PARTITION_TYPES
1815 xd->mi[0]->mbmi.partition = partition;
1816#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001817 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001818#endif // CONFIG_SUPERTX
1819
1820 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
1821 const BLOCK_SIZE uv_subsize =
1822 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
1823 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -07001824 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001825 "Invalid block size.");
1826 }
1827
1828#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001829 xd->mi[0]->mbmi.segment_id_supertx = MAX_SEGMENTS;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001830#endif // CONFIG_SUPERTX
1831
Angie Chiangd0916d92017-03-10 17:54:18 -08001832 int reader_corrupted_flag = aom_reader_has_error(r);
1833 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -08001834}
1835
1836static void decode_token_and_recon_block(AV1Decoder *const pbi,
1837 MACROBLOCKD *const xd, int mi_row,
1838 int mi_col, aom_reader *r,
1839 BLOCK_SIZE bsize) {
1840 AV1_COMMON *const cm = &pbi->common;
1841 const int bw = mi_size_wide[bsize];
1842 const int bh = mi_size_high[bsize];
1843 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1844 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -08001845
Angie Chiang44701f22017-02-27 10:36:44 -08001846 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
1847 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Yue Chen19e7aa82016-11-30 14:05:39 -08001848
Arild Fuldseth07441162016-08-15 15:07:52 +02001849#if CONFIG_DELTA_Q
1850 if (cm->delta_q_present_flag) {
1851 int i;
1852 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -07001853#if CONFIG_EXT_DELTA_Q
1854 xd->plane[0].seg_dequant[i][0] =
1855 av1_dc_quant(av1_get_qindex(&cm->seg, i, xd->current_qindex),
1856 cm->y_dc_delta_q, cm->bit_depth);
1857 xd->plane[0].seg_dequant[i][1] = av1_ac_quant(
1858 av1_get_qindex(&cm->seg, i, xd->current_qindex), 0, cm->bit_depth);
1859 xd->plane[1].seg_dequant[i][0] =
1860 av1_dc_quant(av1_get_qindex(&cm->seg, i, xd->current_qindex),
1861 cm->uv_dc_delta_q, cm->bit_depth);
1862 xd->plane[1].seg_dequant[i][1] =
1863 av1_ac_quant(av1_get_qindex(&cm->seg, i, xd->current_qindex),
1864 cm->uv_ac_delta_q, cm->bit_depth);
1865 xd->plane[2].seg_dequant[i][0] =
1866 av1_dc_quant(av1_get_qindex(&cm->seg, i, xd->current_qindex),
1867 cm->uv_dc_delta_q, cm->bit_depth);
1868 xd->plane[2].seg_dequant[i][1] =
1869 av1_ac_quant(av1_get_qindex(&cm->seg, i, xd->current_qindex),
1870 cm->uv_ac_delta_q, cm->bit_depth);
1871#else
Arild Fuldseth07441162016-08-15 15:07:52 +02001872 xd->plane[0].seg_dequant[i][0] =
1873 av1_dc_quant(xd->current_qindex, cm->y_dc_delta_q, cm->bit_depth);
1874 xd->plane[0].seg_dequant[i][1] =
1875 av1_ac_quant(xd->current_qindex, 0, cm->bit_depth);
1876 xd->plane[1].seg_dequant[i][0] =
1877 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
1878 xd->plane[1].seg_dequant[i][1] =
1879 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
1880 xd->plane[2].seg_dequant[i][0] =
1881 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
1882 xd->plane[2].seg_dequant[i][1] =
1883 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
Fangwen Fu6160df22017-04-24 09:45:51 -07001884#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02001885 }
1886 }
1887#endif
1888
Jingning Han41bb3392016-12-14 10:46:48 -08001889#if CONFIG_CB4X4
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001890 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08001891#else
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001892 if (mbmi->skip) {
1893 av1_reset_skip_context(xd, mi_row, mi_col, AOMMAX(BLOCK_8X8, bsize));
1894 }
Jingning Han41bb3392016-12-14 10:46:48 -08001895#endif
Jingning Hand39cc722016-12-02 14:03:26 -08001896
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001897#if CONFIG_COEF_INTERLEAVE
1898 {
1899 const struct macroblockd_plane *const pd_y = &xd->plane[0];
1900 const struct macroblockd_plane *const pd_c = &xd->plane[1];
1901 const TX_SIZE tx_log2_y = mbmi->tx_size;
1902 const TX_SIZE tx_log2_c = get_uv_tx_size(mbmi, pd_c);
1903 const int tx_sz_y = (1 << tx_log2_y);
1904 const int tx_sz_c = (1 << tx_log2_c);
1905 const int num_4x4_w_y = pd_y->n4_w;
1906 const int num_4x4_h_y = pd_y->n4_h;
1907 const int num_4x4_w_c = pd_c->n4_w;
1908 const int num_4x4_h_c = pd_c->n4_h;
1909 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
1910 pd_y->subsampling_x);
1911 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
1912 pd_y->subsampling_y);
1913 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
1914 pd_c->subsampling_x);
1915 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
1916 pd_c->subsampling_y);
1917
1918 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
1919 // i.e. when the SB is splitted by tile boundaries.
1920 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
1921 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
1922 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
1923 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001924 const int tu_num_c = tu_num_w_c * tu_num_h_c;
1925
1926 if (!is_inter_block(mbmi)) {
1927 int tu_idx_c = 0;
1928 int row_y, col_y, row_c, col_c;
1929 int plane;
1930
1931#if CONFIG_PALETTE
1932 for (plane = 0; plane <= 1; ++plane) {
1933 if (mbmi->palette_mode_info.palette_size[plane])
1934 av1_decode_palette_tokens(xd, plane, r);
1935 }
1936#endif
1937
1938 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1939 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1940 // luma
1941 predict_and_reconstruct_intra_block(
1942 cm, xd, r, mbmi, 0, row_y * tx_sz_y, col_y * tx_sz_y, tx_log2_y);
1943 // chroma
1944 if (tu_idx_c < tu_num_c) {
1945 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1946 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1947 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c,
1948 col_c, tx_log2_c);
1949 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c,
1950 col_c, tx_log2_c);
1951 tu_idx_c++;
1952 }
1953 }
1954 }
1955
1956 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1957 while (tu_idx_c < tu_num_c) {
1958 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1959 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1960 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c, col_c,
1961 tx_log2_c);
1962 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c, col_c,
1963 tx_log2_c);
1964 tu_idx_c++;
1965 }
1966 } else {
1967 // Prediction
Jingning Hanc44009c2017-05-06 11:36:49 -07001968 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001969 AOMMAX(bsize, BLOCK_8X8));
1970
1971 // Reconstruction
1972 if (!mbmi->skip) {
1973 int eobtotal = 0;
1974 int tu_idx_c = 0;
1975 int row_y, col_y, row_c, col_c;
1976
1977 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1978 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1979 // luma
1980 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 0,
1981 row_y * tx_sz_y,
1982 col_y * tx_sz_y, tx_log2_y);
1983 // chroma
1984 if (tu_idx_c < tu_num_c) {
1985 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1986 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1987 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1988 1, row_c, col_c, tx_log2_c);
1989 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1990 2, row_c, col_c, tx_log2_c);
1991 tu_idx_c++;
1992 }
1993 }
1994 }
1995
1996 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1997 while (tu_idx_c < tu_num_c) {
1998 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1999 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
2000 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 1,
2001 row_c, col_c, tx_log2_c);
2002 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 2,
2003 row_c, col_c, tx_log2_c);
2004 tu_idx_c++;
2005 }
2006
Alex Converse64d7ef62017-03-22 18:09:16 -07002007 // TODO(CONFIG_COEF_INTERLEAVE owners): bring eob == 0 corner case
2008 // into line with the defaut configuration
2009 if (bsize >= BLOCK_8X8 && eobtotal == 0) mbmi->skip = 1;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002010 }
2011 }
2012 }
Angie Chiang133733c2017-03-17 12:50:20 -07002013#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002014 if (!is_inter_block(mbmi)) {
2015 int plane;
Fangwen Fub3be9262017-03-06 15:34:28 -08002016#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002017 for (plane = 0; plane <= 1; ++plane) {
2018 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -07002019 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002020 }
Fangwen Fub3be9262017-03-06 15:34:28 -08002021#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002022 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2023 const struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang7fcfee42017-02-24 15:51:03 -08002024 const TX_SIZE tx_size = get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07002025 const int stepr = tx_size_high_unit[tx_size];
2026 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han41bb3392016-12-14 10:46:48 -08002027#if CONFIG_CB4X4
Jingning Han31b6a4f2017-02-23 11:05:53 -08002028#if CONFIG_CHROMA_2X2
2029 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
2030#else
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002031 const BLOCK_SIZE plane_bsize =
2032 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Han31b6a4f2017-02-23 11:05:53 -08002033#endif // CONFIG_CHROMA_2X2
Jingning Han41bb3392016-12-14 10:46:48 -08002034#else
Jingning Hanbafee8d2016-12-02 10:25:03 -08002035 const BLOCK_SIZE plane_bsize =
2036 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002037#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002038 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -08002039 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2040 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002041#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002042 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
2043 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002044 continue;
2045#endif
2046
Yaowu Xuc27fc142016-08-22 16:08:15 -07002047 for (row = 0; row < max_blocks_high; row += stepr)
2048 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002049 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane, row, col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002050 tx_size);
2051 }
2052 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -08002053 int ref;
2054
Zoe Liu85b66462017-04-20 14:28:19 -07002055#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2056 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref) {
2057 const MV_REFERENCE_FRAME frame =
2058 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
2059#else
Yue Chen9ab6d712017-01-12 15:50:46 -08002060 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref) {
2061 const MV_REFERENCE_FRAME frame = mbmi->ref_frame[ref];
Zoe Liu85b66462017-04-20 14:28:19 -07002062#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -07002063 if (frame < LAST_FRAME) {
2064#if CONFIG_INTRABC
2065 assert(is_intrabc_block(mbmi));
2066 assert(frame == INTRA_FRAME);
2067 assert(ref == 0);
2068#else
2069 assert(0);
2070#endif // CONFIG_INTRABC
2071 } else {
2072 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -08002073
Alex Converse28744302017-04-13 14:46:22 -07002074 xd->block_refs[ref] = ref_buf;
2075 if ((!av1_is_valid_scale(&ref_buf->sf)))
2076 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
2077 "Reference frame has invalid dimensions");
2078 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
2079 &ref_buf->sf);
2080 }
Yue Chen9ab6d712017-01-12 15:50:46 -08002081 }
Yue Chen69f18e12016-09-08 14:48:15 -07002082
Jingning Han41bb3392016-12-14 10:46:48 -08002083#if CONFIG_CB4X4
Jingning Hanc44009c2017-05-06 11:36:49 -07002084 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08002085#else
Jingning Hanc44009c2017-05-06 11:36:49 -07002086 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
Jingning Han41bb3392016-12-14 10:46:48 -08002087 AOMMAX(bsize, BLOCK_8X8));
2088#endif
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002089
Yue Chencb60b182016-10-13 15:18:22 -07002090#if CONFIG_MOTION_VAR
2091 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -08002092#if CONFIG_NCOBMC
2093 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
2094#else
Yue Chen894fcce2016-10-21 16:50:52 -07002095 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -08002096#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002097 }
Yue Chencb60b182016-10-13 15:18:22 -07002098#endif // CONFIG_MOTION_VAR
Yaowu Xuc27fc142016-08-22 16:08:15 -07002099
2100 // Reconstruction
2101 if (!mbmi->skip) {
2102 int eobtotal = 0;
2103 int plane;
2104
2105 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2106 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han41bb3392016-12-14 10:46:48 -08002107#if CONFIG_CB4X4
Jingning Han31b6a4f2017-02-23 11:05:53 -08002108#if CONFIG_CHROMA_2X2
2109 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
2110#else
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002111 const BLOCK_SIZE plane_bsize =
2112 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Han31b6a4f2017-02-23 11:05:53 -08002113#endif // CONFIG_CHROMA_2X2
Jingning Han41bb3392016-12-14 10:46:48 -08002114#else
Jingning Hanbafee8d2016-12-02 10:25:03 -08002115 const BLOCK_SIZE plane_bsize =
2116 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002117#endif
Jingning Hanbafee8d2016-12-02 10:25:03 -08002118 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2119 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002120 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002121
2122#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002123 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
2124 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002125 continue;
2126#endif
2127
Yaowu Xuc27fc142016-08-22 16:08:15 -07002128#if CONFIG_VAR_TX
Sarah Parker106b3cb2017-04-21 12:13:37 -07002129 const TX_SIZE max_tx_size = get_vartx_max_txsize(mbmi, plane_bsize);
Jingning Hanf64062f2016-11-02 16:22:18 -07002130 const int bh_var_tx = tx_size_high_unit[max_tx_size];
2131 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hanbafee8d2016-12-02 10:25:03 -08002132 for (row = 0; row < max_blocks_high; row += bh_var_tx)
2133 for (col = 0; col < max_blocks_wide; col += bw_var_tx)
Jingning Hanfe45b212016-11-22 10:30:23 -08002134 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, row, col,
2135 max_tx_size, &eobtotal);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002136#else
Angie Chiang7fcfee42017-02-24 15:51:03 -08002137 const TX_SIZE tx_size = get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07002138 const int stepr = tx_size_high_unit[tx_size];
2139 const int stepc = tx_size_wide_unit[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002140 for (row = 0; row < max_blocks_high; row += stepr)
2141 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002142 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
2143 plane, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002144#endif
2145 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002146 }
2147 }
Angie Chiang133733c2017-03-17 12:50:20 -07002148#endif // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002149
Angie Chiangd0916d92017-03-10 17:54:18 -08002150 int reader_corrupted_flag = aom_reader_has_error(r);
2151 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002152}
2153
Yue Chen9ab6d712017-01-12 15:50:46 -08002154#if CONFIG_NCOBMC && CONFIG_MOTION_VAR
2155static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2156 int mi_row, int mi_col, aom_reader *r,
2157 BLOCK_SIZE bsize) {
2158 AV1_COMMON *const cm = &pbi->common;
2159 const int hbs = mi_size_wide[bsize] >> 1;
2160#if CONFIG_CB4X4
2161 const int unify_bsize = 1;
2162#else
2163 const int unify_bsize = 0;
2164#endif
2165#if CONFIG_EXT_PARTITION_TYPES
2166 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
2167#endif
2168 PARTITION_TYPE partition;
2169 BLOCK_SIZE subsize;
2170 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2171 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2172
2173 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2174
2175 partition = get_partition(cm, mi_row, mi_col, bsize);
2176 subsize = subsize_lookup[partition][bsize];
2177
2178 if (!hbs && !unify_bsize) {
2179 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2180 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
2181 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2182 } else {
2183 switch (partition) {
2184 case PARTITION_NONE:
2185 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
2186 break;
2187 case PARTITION_HORZ:
2188 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2189 if (has_rows)
2190 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r,
2191 subsize);
2192 break;
2193 case PARTITION_VERT:
2194 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2195 if (has_cols)
2196 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r,
2197 subsize);
2198 break;
2199 case PARTITION_SPLIT:
2200 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
2201 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2202 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2203 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
2204 break;
2205#if CONFIG_EXT_PARTITION_TYPES
2206 case PARTITION_HORZ_A:
2207 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2208 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2209 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2210 break;
2211 case PARTITION_HORZ_B:
2212 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2213 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2214 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2215 bsize2);
2216 break;
2217 case PARTITION_VERT_A:
2218 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2219 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2220 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2221 break;
2222 case PARTITION_VERT_B:
2223 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2224 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2225 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2226 bsize2);
2227 break;
2228#endif
2229 default: assert(0 && "Invalid partition type");
2230 }
2231 }
2232}
2233#endif
2234
Yue Chen64550b62017-01-12 12:18:22 -08002235static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2236#if CONFIG_SUPERTX
2237 int supertx_enabled,
2238#endif // CONFIG_SUPERTX
2239 int mi_row, int mi_col, aom_reader *r,
2240#if CONFIG_EXT_PARTITION_TYPES
2241 PARTITION_TYPE partition,
2242#endif // CONFIG_EXT_PARTITION_TYPES
2243 BLOCK_SIZE bsize) {
2244 decode_mbmi_block(pbi, xd,
2245#if CONFIG_SUPERTX
2246 supertx_enabled,
2247#endif
2248 mi_row, mi_col, r,
2249#if CONFIG_EXT_PARTITION_TYPES
2250 partition,
2251#endif
2252 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002253
Yue Chen9ab6d712017-01-12 15:50:46 -08002254#if !(CONFIG_MOTION_VAR && CONFIG_NCOBMC)
Yue Chen64550b62017-01-12 12:18:22 -08002255#if CONFIG_SUPERTX
2256 if (!supertx_enabled)
2257#endif // CONFIG_SUPERTX
2258 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08002259#endif
Yue Chen64550b62017-01-12 12:18:22 -08002260}
2261
Yaowu Xuf883b422016-08-30 14:01:10 -07002262static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
2263 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002264 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -08002265 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -08002266#if CONFIG_UNPOISON_PARTITION_CTX
2267 const int ctx =
2268 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse2b9d19d2017-04-03 11:11:17 -07002269 const aom_prob *const probs =
2270 ctx < PARTITION_CONTEXTS ? cm->fc->partition_prob[ctx] : NULL;
2271 FRAME_COUNTS *const counts = ctx < PARTITION_CONTEXTS ? xd->counts : NULL;
Alex Converse55c6bde2017-01-12 15:55:31 -08002272#else
Jingning Han1beb0102016-12-07 11:08:30 -08002273 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Yaowu Xuf883b422016-08-30 14:01:10 -07002274 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Alex Converse55c6bde2017-01-12 15:55:31 -08002275 FRAME_COUNTS *const counts = xd->counts;
2276#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002278#if CONFIG_EC_ADAPT
2279 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2280 (void)cm;
Nathan E. Egge476c63c2017-05-18 18:35:16 -04002281#else
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002282 FRAME_CONTEXT *ec_ctx = cm->fc;
2283#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284
Jingning Han5fe79db2017-03-27 15:10:30 -07002285 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -07002286
Yaowu Xuc27fc142016-08-22 16:08:15 -07002287 if (has_rows && has_cols)
2288#if CONFIG_EXT_PARTITION_TYPES
2289 if (bsize <= BLOCK_8X8)
Jingning Han5fe79db2017-03-27 15:10:30 -07002290 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, PARTITION_TYPES,
2291 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002292 else
Jingning Han5fe79db2017-03-27 15:10:30 -07002293 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, EXT_PARTITION_TYPES,
2294 ACCT_STR);
Alex Converse57795a42017-03-14 12:18:25 -07002295#else
Jingning Han5fe79db2017-03-27 15:10:30 -07002296 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, PARTITION_TYPES,
2297 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002298#endif // CONFIG_EXT_PARTITION_TYPES
2299 else if (!has_rows && has_cols)
Michael Bebenita6048d052016-08-25 14:40:54 -07002300 p = aom_read(r, probs[1], ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002301 else if (has_rows && !has_cols)
Michael Bebenita6048d052016-08-25 14:40:54 -07002302 p = aom_read(r, probs[2], ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303 else
2304 p = PARTITION_SPLIT;
2305
2306 if (counts) ++counts->partition[ctx][p];
2307
2308 return p;
2309}
2310
2311#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002312static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
2313 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002314 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
2315 return 1;
2316 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002317 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +01002318#if CONFIG_NEW_MULTISYMBOL
Thomas Davies61e3e372017-04-04 16:10:23 +01002319 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +01002320 const int skip = aom_read_symbol(r, ec_ctx->skip_cdfs[ctx], 2, ACCT_STR);
2321#else
Michael Bebenita6048d052016-08-25 14:40:54 -07002322 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Thomas Davies61e3e372017-04-04 16:10:23 +01002323#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002324 FRAME_COUNTS *counts = xd->counts;
2325 if (counts) ++counts->skip[ctx][skip];
2326 return skip;
2327 }
2328}
2329#endif // CONFIG_SUPERTX
2330
2331// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -07002332static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002333#if CONFIG_SUPERTX
2334 int supertx_enabled,
2335#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002336 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002337 BLOCK_SIZE bsize, int n4x4_l2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002338 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002339 const int n8x8_l2 = n4x4_l2 - 1;
Jingning Hanff17e162016-12-07 17:58:18 -08002340 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002341 const int hbs = num_8x8_wh >> 1;
Jingning Han41bb3392016-12-14 10:46:48 -08002342#if CONFIG_CB4X4
2343 const int unify_bsize = 1;
2344#else
2345 const int unify_bsize = 0;
2346#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002347 PARTITION_TYPE partition;
2348 BLOCK_SIZE subsize;
2349#if CONFIG_EXT_PARTITION_TYPES
2350 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
2351#endif
2352 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2353 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2354#if CONFIG_SUPERTX
2355 const int read_token = !supertx_enabled;
2356 int skip = 0;
Jingning Han2511c662016-12-22 11:57:34 -08002357 TX_SIZE supertx_size = max_txsize_lookup[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002358 const TileInfo *const tile = &xd->tile;
2359 int txfm = DCT_DCT;
2360#endif // CONFIG_SUPERTX
2361
2362 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2363
Jingning Hancd959762017-03-27 14:49:59 -07002364 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
2365 : read_partition(cm, xd, mi_row, mi_col, r,
2366 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002367 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002368
2369#if CONFIG_PVQ
2370 assert(partition < PARTITION_TYPES);
2371 assert(subsize < BLOCK_SIZES);
2372#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002373#if CONFIG_SUPERTX
2374 if (!frame_is_intra_only(cm) && partition != PARTITION_NONE &&
2375 bsize <= MAX_SUPERTX_BLOCK_SIZE && !supertx_enabled && !xd->lossless[0]) {
2376 const int supertx_context = partition_supertx_context_lookup[partition];
Michael Bebenita6048d052016-08-25 14:40:54 -07002377 supertx_enabled = aom_read(
2378 r, cm->fc->supertx_prob[supertx_context][supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002379 if (xd->counts)
2380 xd->counts->supertx[supertx_context][supertx_size][supertx_enabled]++;
2381#if CONFIG_VAR_TX
2382 if (supertx_enabled) xd->supertx_size = supertx_size;
2383#endif
2384 }
2385#endif // CONFIG_SUPERTX
Jingning Han41bb3392016-12-14 10:46:48 -08002386 if (!hbs && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002387 // calculate bmode block dimensions (log 2)
2388 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2389 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
2390 decode_block(pbi, xd,
2391#if CONFIG_SUPERTX
2392 supertx_enabled,
2393#endif // CONFIG_SUPERTX
2394 mi_row, mi_col, r,
2395#if CONFIG_EXT_PARTITION_TYPES
2396 partition,
2397#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002398 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002399 } else {
2400 switch (partition) {
2401 case PARTITION_NONE:
2402 decode_block(pbi, xd,
2403#if CONFIG_SUPERTX
2404 supertx_enabled,
2405#endif // CONFIG_SUPERTX
2406 mi_row, mi_col, r,
2407#if CONFIG_EXT_PARTITION_TYPES
2408 partition,
2409#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002410 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002411 break;
2412 case PARTITION_HORZ:
2413 decode_block(pbi, xd,
2414#if CONFIG_SUPERTX
2415 supertx_enabled,
2416#endif // CONFIG_SUPERTX
2417 mi_row, mi_col, r,
2418#if CONFIG_EXT_PARTITION_TYPES
2419 partition,
2420#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002421 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002422 if (has_rows)
2423 decode_block(pbi, xd,
2424#if CONFIG_SUPERTX
2425 supertx_enabled,
2426#endif // CONFIG_SUPERTX
2427 mi_row + hbs, mi_col, r,
2428#if CONFIG_EXT_PARTITION_TYPES
2429 partition,
2430#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002431 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002432 break;
2433 case PARTITION_VERT:
2434 decode_block(pbi, xd,
2435#if CONFIG_SUPERTX
2436 supertx_enabled,
2437#endif // CONFIG_SUPERTX
2438 mi_row, mi_col, r,
2439#if CONFIG_EXT_PARTITION_TYPES
2440 partition,
2441#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002442 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002443 if (has_cols)
2444 decode_block(pbi, xd,
2445#if CONFIG_SUPERTX
2446 supertx_enabled,
2447#endif // CONFIG_SUPERTX
2448 mi_row, mi_col + hbs, r,
2449#if CONFIG_EXT_PARTITION_TYPES
2450 partition,
2451#endif // CONFIG_EXT_PARTITION_TYPES
Jingning Hanfaad0e12016-12-07 10:54:57 -08002452 subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002453 break;
2454 case PARTITION_SPLIT:
2455 decode_partition(pbi, xd,
2456#if CONFIG_SUPERTX
2457 supertx_enabled,
2458#endif // CONFIG_SUPERTX
2459 mi_row, mi_col, r, subsize, n8x8_l2);
2460 decode_partition(pbi, xd,
2461#if CONFIG_SUPERTX
2462 supertx_enabled,
2463#endif // CONFIG_SUPERTX
2464 mi_row, mi_col + hbs, r, subsize, n8x8_l2);
2465 decode_partition(pbi, xd,
2466#if CONFIG_SUPERTX
2467 supertx_enabled,
2468#endif // CONFIG_SUPERTX
2469 mi_row + hbs, mi_col, r, subsize, n8x8_l2);
2470 decode_partition(pbi, xd,
2471#if CONFIG_SUPERTX
2472 supertx_enabled,
2473#endif // CONFIG_SUPERTX
2474 mi_row + hbs, mi_col + hbs, r, subsize, n8x8_l2);
2475 break;
2476#if CONFIG_EXT_PARTITION_TYPES
2477 case PARTITION_HORZ_A:
2478 decode_block(pbi, xd,
2479#if CONFIG_SUPERTX
2480 supertx_enabled,
2481#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002482 mi_row, mi_col, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002483 decode_block(pbi, xd,
2484#if CONFIG_SUPERTX
2485 supertx_enabled,
2486#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002487 mi_row, mi_col + hbs, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002488 decode_block(pbi, xd,
2489#if CONFIG_SUPERTX
2490 supertx_enabled,
2491#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002492 mi_row + hbs, mi_col, r, partition, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002493 break;
2494 case PARTITION_HORZ_B:
2495 decode_block(pbi, xd,
2496#if CONFIG_SUPERTX
2497 supertx_enabled,
2498#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002499 mi_row, mi_col, r, partition, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002500 decode_block(pbi, xd,
2501#if CONFIG_SUPERTX
2502 supertx_enabled,
2503#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002504 mi_row + hbs, mi_col, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002505 decode_block(pbi, xd,
2506#if CONFIG_SUPERTX
2507 supertx_enabled,
2508#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002509 mi_row + hbs, mi_col + hbs, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002510 break;
2511 case PARTITION_VERT_A:
2512 decode_block(pbi, xd,
2513#if CONFIG_SUPERTX
2514 supertx_enabled,
2515#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002516 mi_row, mi_col, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 decode_block(pbi, xd,
2518#if CONFIG_SUPERTX
2519 supertx_enabled,
2520#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002521 mi_row + hbs, mi_col, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002522 decode_block(pbi, xd,
2523#if CONFIG_SUPERTX
2524 supertx_enabled,
2525#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002526 mi_row, mi_col + hbs, r, partition, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527 break;
2528 case PARTITION_VERT_B:
2529 decode_block(pbi, xd,
2530#if CONFIG_SUPERTX
2531 supertx_enabled,
2532#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002533 mi_row, mi_col, r, partition, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002534 decode_block(pbi, xd,
2535#if CONFIG_SUPERTX
2536 supertx_enabled,
2537#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002538 mi_row, mi_col + hbs, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002539 decode_block(pbi, xd,
2540#if CONFIG_SUPERTX
2541 supertx_enabled,
2542#endif
Jingning Hanfaad0e12016-12-07 10:54:57 -08002543 mi_row + hbs, mi_col + hbs, r, partition, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002544 break;
2545#endif
2546 default: assert(0 && "Invalid partition type");
2547 }
2548 }
2549
2550#if CONFIG_SUPERTX
2551 if (supertx_enabled && read_token) {
2552 uint8_t *dst_buf[3];
2553 int dst_stride[3], i;
2554 int offset = mi_row * cm->mi_stride + mi_col;
2555
2556 set_segment_id_supertx(cm, mi_row, mi_col, bsize);
2557
David Barker3aec8d62017-01-31 14:55:32 +00002558#if CONFIG_DELTA_Q
2559 if (cm->delta_q_present_flag) {
2560 for (i = 0; i < MAX_SEGMENTS; i++) {
2561 xd->plane[0].seg_dequant[i][0] =
2562 av1_dc_quant(xd->current_qindex, cm->y_dc_delta_q, cm->bit_depth);
2563 xd->plane[0].seg_dequant[i][1] =
2564 av1_ac_quant(xd->current_qindex, 0, cm->bit_depth);
2565 xd->plane[1].seg_dequant[i][0] =
2566 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
2567 xd->plane[1].seg_dequant[i][1] =
2568 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
2569 xd->plane[2].seg_dequant[i][0] =
2570 av1_dc_quant(xd->current_qindex, cm->uv_dc_delta_q, cm->bit_depth);
2571 xd->plane[2].seg_dequant[i][1] =
2572 av1_ac_quant(xd->current_qindex, cm->uv_ac_delta_q, cm->bit_depth);
2573 }
2574 }
2575#endif
2576
Yaowu Xuc27fc142016-08-22 16:08:15 -07002577 xd->mi = cm->mi_grid_visible + offset;
2578 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002579 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2580 mi_size_wide[bsize],
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002581#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002582 cm->dependent_horz_tiles,
2583#endif // CONFIG_DEPENDENT_HORZTILES
2584 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002585 set_skip_context(xd, mi_row, mi_col);
2586 skip = read_skip(cm, xd, xd->mi[0]->mbmi.segment_id_supertx, r);
2587 if (skip) {
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07002588 av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002589 } else {
2590#if CONFIG_EXT_TX
Sarah Parkere68a3e42017-02-16 14:03:24 -08002591 if (get_ext_tx_types(supertx_size, bsize, 1, cm->reduced_tx_set_used) >
2592 1) {
2593 const int eset =
2594 get_ext_tx_set(supertx_size, bsize, 1, cm->reduced_tx_set_used);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002595 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002596 txfm = aom_read_tree(r, av1_ext_tx_inter_tree[eset],
Michael Bebenita6048d052016-08-25 14:40:54 -07002597 cm->fc->inter_ext_tx_prob[eset][supertx_size],
2598 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002599 if (xd->counts) ++xd->counts->inter_ext_tx[eset][supertx_size][txfm];
2600 }
2601 }
2602#else
2603 if (supertx_size < TX_32X32) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002604 txfm = aom_read_tree(r, av1_ext_tx_tree,
Jingning Han8f6eb182016-10-19 13:48:57 -07002605 cm->fc->inter_ext_tx_prob[supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002606 if (xd->counts) ++xd->counts->inter_ext_tx[supertx_size][txfm];
2607 }
2608#endif // CONFIG_EXT_TX
2609 }
2610
Jingning Han91d9a792017-04-18 12:01:52 -07002611 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
2612 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002613 for (i = 0; i < MAX_MB_PLANE; i++) {
2614 dst_buf[i] = xd->plane[i].dst.buf;
2615 dst_stride[i] = xd->plane[i].dst.stride;
2616 }
2617 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row, mi_col, bsize,
2618 bsize, dst_buf, dst_stride);
2619
2620 if (!skip) {
2621 int eobtotal = 0;
2622 MB_MODE_INFO *mbmi;
2623 set_offsets_topblock(cm, xd, tile, bsize, mi_row, mi_col);
2624 mbmi = &xd->mi[0]->mbmi;
2625 mbmi->tx_type = txfm;
2626 assert(mbmi->segment_id_supertx != MAX_SEGMENTS);
2627 for (i = 0; i < MAX_MB_PLANE; ++i) {
2628 const struct macroblockd_plane *const pd = &xd->plane[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002629 int row, col;
Angie Chiang7fcfee42017-02-24 15:51:03 -08002630 const TX_SIZE tx_size = get_tx_size(i, xd);
Jingning Han5b7706a2016-12-21 09:55:10 -08002631 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han32b20282016-10-28 15:42:44 -07002632 const int stepr = tx_size_high_unit[tx_size];
2633 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002634 const int max_blocks_wide = max_block_wide(xd, plane_bsize, i);
2635 const int max_blocks_high = max_block_high(xd, plane_bsize, i);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002636
2637 for (row = 0; row < max_blocks_high; row += stepr)
2638 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002639 eobtotal += reconstruct_inter_block(
2640 cm, xd, r, mbmi->segment_id_supertx, i, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002641 }
Jingning Han24f24a52016-12-27 10:13:28 -08002642 if ((unify_bsize || !(subsize < BLOCK_8X8)) && eobtotal == 0) skip = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002643 }
2644 set_param_topblock(cm, xd, bsize, mi_row, mi_col, txfm, skip);
2645 }
2646#endif // CONFIG_SUPERTX
2647
2648#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07002649 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002650#else
2651 // update partition context
2652 if (bsize >= BLOCK_8X8 &&
2653 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08002654 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002655#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07002656
Jean-Marc Valin01435132017-02-18 14:12:53 -05002657#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07002658 if (bsize == cm->sb_size) {
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002659 if (!sb_all_skip(cm, mi_row, mi_col)) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002660 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.cdef_strength =
2661 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02002662 } else {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002663 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.cdef_strength =
Jean-Marc Valina8764952017-04-11 04:01:15 -04002664 -1;
Yaowu Xud71be782016-10-14 08:47:03 -07002665 }
2666 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05002667#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002668}
2669
Yaowu Xuc27fc142016-08-22 16:08:15 -07002670static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
2671 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002672 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08002673 aom_reader *r,
2674#if CONFIG_ANS && ANS_MAX_SYMBOLS
2675 int window_size,
2676#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2677 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002678 // Validate the calculated partition length. If the buffer
2679 // described by the partition can't be fully read, then restrict
2680 // it to the portion that can be (for EC mode) or throw an error.
2681 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002682 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002683 "Truncated packet or corrupt tile length");
2684
Alex Converse2cdf0d82016-12-13 13:53:09 -08002685#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08002686 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08002687#endif
Alex Converse346440b2017-01-03 13:47:37 -08002688 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07002689 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002690 "Failed to allocate bool decoder %d", 1);
2691}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692
Nathan E. Eggead66b812017-05-09 11:57:31 -04002693#if !CONFIG_PVQ && !CONFIG_EC_ADAPT && !CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -07002694static void read_coef_probs_common(av1_coeff_probs_model *coef_probs,
2695 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002696 int i, j, k, l, m;
Alex Conversea9598cd2017-02-03 14:18:05 -08002697#if CONFIG_EC_ADAPT
2698 const int node_limit = UNCONSTRAINED_NODES - 1;
2699#else
Thomas9ac55082016-09-23 18:04:17 +01002700 const int node_limit = UNCONSTRAINED_NODES;
Alex Conversea9598cd2017-02-03 14:18:05 -08002701#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002702
Michael Bebenita6048d052016-08-25 14:40:54 -07002703 if (aom_read_bit(r, ACCT_STR))
Yaowu Xuc27fc142016-08-22 16:08:15 -07002704 for (i = 0; i < PLANE_TYPES; ++i)
2705 for (j = 0; j < REF_TYPES; ++j)
2706 for (k = 0; k < COEF_BANDS; ++k)
2707 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
Thomas9ac55082016-09-23 18:04:17 +01002708 for (m = 0; m < node_limit; ++m)
Michael Bebenita6048d052016-08-25 14:40:54 -07002709 av1_diff_update_prob(r, &coef_probs[i][j][k][l][m], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002710}
2711
Yaowu Xuf883b422016-08-30 14:01:10 -07002712static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002713 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2714 TX_SIZE tx_size;
Jingning Han83630632016-12-16 11:27:25 -08002715 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002716 read_coef_probs_common(fc->coef_probs[tx_size], r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002717}
Yushin Cho77bba8d2016-11-04 16:36:56 -07002718#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002719
Yaowu Xuf883b422016-08-30 14:01:10 -07002720static void setup_segmentation(AV1_COMMON *const cm,
2721 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002722 struct segmentation *const seg = &cm->seg;
2723 int i, j;
2724
2725 seg->update_map = 0;
2726 seg->update_data = 0;
2727
Yaowu Xuf883b422016-08-30 14:01:10 -07002728 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002729 if (!seg->enabled) return;
2730
2731 // Segmentation map update
2732 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2733 seg->update_map = 1;
2734 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002735 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002736 }
2737 if (seg->update_map) {
2738 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2739 seg->temporal_update = 0;
2740 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002741 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002742 }
2743 }
2744
2745 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07002746 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002747 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002748 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002749
Yaowu Xuf883b422016-08-30 14:01:10 -07002750 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002751
2752 for (i = 0; i < MAX_SEGMENTS; i++) {
2753 for (j = 0; j < SEG_LVL_MAX; j++) {
2754 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002755 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002756 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002757 av1_enable_segfeature(seg, i, j);
2758 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
2759 if (av1_is_segfeature_signed(j))
2760 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002761 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002762 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002763 }
2764 }
2765 }
2766}
2767
2768#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002769static void decode_restoration_mode(AV1_COMMON *cm,
2770 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002771 int p;
2772 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002773 if (aom_rb_read_bit(rb)) {
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08002774 rsi->frame_restoration_type =
2775 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002776 } else {
2777 rsi->frame_restoration_type =
2778 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
2779 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002780 for (p = 1; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002781 rsi = &cm->rst_info[p];
2782 if (aom_rb_read_bit(rb)) {
2783 rsi->frame_restoration_type =
2784 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
2785 } else {
2786 rsi->frame_restoration_type = RESTORE_NONE;
2787 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002788 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002789
2790 cm->rst_info[0].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2791 cm->rst_info[1].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2792 cm->rst_info[2].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2793 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
2794 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
2795 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
2796 rsi = &cm->rst_info[0];
2797 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2798 if (rsi->restoration_tilesize != RESTORATION_TILESIZE_MAX) {
2799 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2800 }
2801 cm->rst_info[1].restoration_tilesize = cm->rst_info[0].restoration_tilesize;
2802 cm->rst_info[2].restoration_tilesize = cm->rst_info[0].restoration_tilesize;
2803 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002804}
2805
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002806static void read_wiener_filter(WienerInfo *wiener_info,
2807 WienerInfo *ref_wiener_info, aom_reader *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002808 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002809 aom_read_primitive_refsubexpfin(
2810 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2811 WIENER_FILT_TAP0_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002812 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002813 WIENER_FILT_TAP0_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002814 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002815 aom_read_primitive_refsubexpfin(
2816 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2817 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002818 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002819 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002820 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002821 aom_read_primitive_refsubexpfin(
2822 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2823 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002824 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002825 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002826 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002827 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002828 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
2829 wiener_info->vfilter[2]);
2830
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002831 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002832 aom_read_primitive_refsubexpfin(
2833 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2834 WIENER_FILT_TAP0_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002835 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002836 WIENER_FILT_TAP0_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002837 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002838 aom_read_primitive_refsubexpfin(
2839 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2840 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002841 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002842 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002843 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002844 aom_read_primitive_refsubexpfin(
2845 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2846 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002847 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002848 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002849 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002850 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002851 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
2852 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002853 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002854}
2855
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002856static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
2857 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002858 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
2859 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002860 aom_read_primitive_refsubexpfin(
2861 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002862 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002863 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002864 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002865 aom_read_primitive_refsubexpfin(
2866 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002867 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002868 SGRPROJ_PRJ_MIN1;
2869 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002870}
2871
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002872static void decode_restoration(AV1_COMMON *cm, aom_reader *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002873 int i, p;
Debargha Mukherjee2dd982e2017-06-05 13:55:12 -07002874#if CONFIG_FRAME_SUPERRES
2875 const int width = cm->superres_upscaled_width;
2876 const int height = cm->superres_upscaled_height;
2877#else
2878 const int width = cm->width;
2879 const int height = cm->height;
2880#endif // CONFIG_FRAME_SUPERRES
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002881 SgrprojInfo ref_sgrproj_info;
2882 WienerInfo ref_wiener_info;
2883 set_default_wiener(&ref_wiener_info);
2884 set_default_sgrproj(&ref_sgrproj_info);
Debargha Mukherjee2dd982e2017-06-05 13:55:12 -07002885 const int ntiles =
2886 av1_get_rest_ntiles(width, height, cm->rst_info[0].restoration_tilesize,
2887 NULL, NULL, NULL, NULL);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002888 const int ntiles_uv = av1_get_rest_ntiles(
Debargha Mukherjee2dd982e2017-06-05 13:55:12 -07002889 ROUND_POWER_OF_TWO(width, cm->subsampling_x),
2890 ROUND_POWER_OF_TWO(height, cm->subsampling_y),
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002891 cm->rst_info[1].restoration_tilesize, NULL, NULL, NULL, NULL);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002892 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002893 if (rsi->frame_restoration_type != RESTORE_NONE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002894 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002895 for (i = 0; i < ntiles; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002896 rsi->restoration_type[i] =
2897 aom_read_tree(rb, av1_switchable_restore_tree,
2898 cm->fc->switchable_restore_prob, ACCT_STR);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002899 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002900 read_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, rb);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002901 } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002902 read_sgrproj_filter(&rsi->sgrproj_info[i], &ref_sgrproj_info, rb);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002903 }
2904 }
2905 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002906 for (i = 0; i < ntiles; ++i) {
Michael Bebenita6048d052016-08-25 14:40:54 -07002907 if (aom_read(rb, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002908 rsi->restoration_type[i] = RESTORE_WIENER;
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002909 read_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002910 } else {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002911 rsi->restoration_type[i] = RESTORE_NONE;
2912 }
2913 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002914 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002915 for (i = 0; i < ntiles; ++i) {
2916 if (aom_read(rb, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
2917 rsi->restoration_type[i] = RESTORE_SGRPROJ;
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002918 read_sgrproj_filter(&rsi->sgrproj_info[i], &ref_sgrproj_info, rb);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002919 } else {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002920 rsi->restoration_type[i] = RESTORE_NONE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002921 }
2922 }
2923 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002924 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002925 for (p = 1; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002926 set_default_wiener(&ref_wiener_info);
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002927 set_default_sgrproj(&ref_sgrproj_info);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002928 rsi = &cm->rst_info[p];
2929 if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08002930 for (i = 0; i < ntiles_uv; ++i) {
2931 if (ntiles_uv > 1)
2932 rsi->restoration_type[i] =
2933 aom_read(rb, RESTORE_NONE_WIENER_PROB, ACCT_STR) ? RESTORE_WIENER
2934 : RESTORE_NONE;
2935 else
2936 rsi->restoration_type[i] = RESTORE_WIENER;
2937 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002938 read_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, rb);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08002939 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002940 }
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002941 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
2942 for (i = 0; i < ntiles_uv; ++i) {
2943 if (ntiles_uv > 1)
2944 rsi->restoration_type[i] =
2945 aom_read(rb, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)
2946 ? RESTORE_SGRPROJ
2947 : RESTORE_NONE;
2948 else
2949 rsi->restoration_type[i] = RESTORE_SGRPROJ;
2950 if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
2951 read_sgrproj_filter(&rsi->sgrproj_info[i], &ref_sgrproj_info, rb);
2952 }
2953 }
2954 } else if (rsi->frame_restoration_type != RESTORE_NONE) {
2955 assert(0);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002956 }
2957 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002958}
2959#endif // CONFIG_LOOP_RESTORATION
2960
Yaowu Xuf883b422016-08-30 14:01:10 -07002961static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002962 struct loopfilter *lf = &cm->lf;
Yaowu Xuf883b422016-08-30 14:01:10 -07002963 lf->filter_level = aom_rb_read_literal(rb, 6);
2964 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002965
2966 // Read in loop filter deltas applied at the MB level based on mode or ref
2967 // frame.
2968 lf->mode_ref_delta_update = 0;
2969
Yaowu Xuf883b422016-08-30 14:01:10 -07002970 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002971 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002972 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002973 if (lf->mode_ref_delta_update) {
2974 int i;
2975
2976 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002977 if (aom_rb_read_bit(rb))
2978 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002979
2980 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002981 if (aom_rb_read_bit(rb))
2982 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002983 }
2984 }
2985}
2986
Jean-Marc Valin01435132017-02-18 14:12:53 -05002987#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002988static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002989 int i;
Steinar Midtskogen0c966a52017-04-18 14:38:13 +02002990 cm->cdef_dering_damping = aom_rb_read_literal(rb, 1) + 5;
2991 cm->cdef_clpf_damping = aom_rb_read_literal(rb, 2) + 3;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002992 cm->cdef_bits = aom_rb_read_literal(rb, 2);
2993 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
2994 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2995 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Jean-Marc Valine9f77422017-03-22 17:09:51 -04002996 cm->cdef_uv_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002997 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002998}
Jean-Marc Valin01435132017-02-18 14:12:53 -05002999#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07003000
Yaowu Xuf883b422016-08-30 14:01:10 -07003001static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
3002 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003003}
3004
Yaowu Xuf883b422016-08-30 14:01:10 -07003005static void setup_quantization(AV1_COMMON *const cm,
3006 struct aom_read_bit_buffer *rb) {
3007 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003008 cm->y_dc_delta_q = read_delta_q(rb);
3009 cm->uv_dc_delta_q = read_delta_q(rb);
3010 cm->uv_ac_delta_q = read_delta_q(rb);
3011 cm->dequant_bit_depth = cm->bit_depth;
3012#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07003013 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003014 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003015 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
3016 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003017 } else {
3018 cm->min_qmlevel = 0;
3019 cm->max_qmlevel = 0;
3020 }
3021#endif
3022}
3023
Alex Converse05a3e7d2017-05-16 12:20:07 -07003024// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07003025static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003027 const int using_qm = cm->using_qmatrix;
3028 const int minqm = cm->min_qmlevel;
3029 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003030#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07003031 // When segmentation is disabled, only the first value is used. The
3032 // remaining are don't cares.
3033 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
3034 for (int i = 0; i < max_segments; ++i) {
3035 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
3036 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
3037 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
3038 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07003039 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07003040 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07003041 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003042#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003043 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
3044 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3045 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003046 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003047 const int qmlevel = (lossless || using_qm == 0)
3048 ? NUM_QM_LEVELS - 1
3049 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01003050 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003051 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
3052 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
3053 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
3054 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
3055 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07003056#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07003057#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07003058 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
3059 for (int b = 0; b < COEF_BANDS; ++b) {
3060 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
3061 cm->y_dequant_nuq[i][dq][b], NULL, dq);
3062 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
3063 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003064 }
3065 }
3066#endif // CONFIG_NEW_QUANT
3067 }
3068}
3069
Angie Chiang5678ad92016-11-21 09:38:40 -08003070static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003071 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07003072 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003073}
3074
Yaowu Xuf883b422016-08-30 14:01:10 -07003075static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003076#if CONFIG_FRAME_SUPERRES
3077 cm->render_width = cm->superres_upscaled_width;
3078 cm->render_height = cm->superres_upscaled_height;
3079#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003080 cm->render_width = cm->width;
3081 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003082#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07003083 if (aom_rb_read_bit(rb))
3084 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003085}
3086
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003087#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003088// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003089static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
3090 int *width, int *height) {
3091 cm->superres_upscaled_width = *width;
3092 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07003093 if (aom_rb_read_bit(rb)) {
3094 cm->superres_scale_numerator =
3095 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
3096 cm->superres_scale_numerator += SUPERRES_SCALE_NUMERATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003097 // Don't edit cm->width or cm->height directly, or the buffers won't get
3098 // resized correctly
Fergus Simpsonbfbf6a52017-06-14 23:13:12 -07003099 av1_calculate_scaled_size(width, height, cm->superres_scale_numerator);
Fergus Simpsone7508412017-03-14 18:14:09 -07003100 } else {
3101 // 1:1 scaling - ie. no scaling, scale not provided
Fergus Simpsonbfbf6a52017-06-14 23:13:12 -07003102 cm->superres_scale_numerator = SCALE_DENOMINATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07003103 }
3104}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003105#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003106
Yaowu Xuf883b422016-08-30 14:01:10 -07003107static void resize_mv_buffer(AV1_COMMON *cm) {
3108 aom_free(cm->cur_frame->mvs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003109 cm->cur_frame->mi_rows = cm->mi_rows;
3110 cm->cur_frame->mi_cols = cm->mi_cols;
3111 CHECK_MEM_ERROR(cm, cm->cur_frame->mvs,
Yaowu Xuf883b422016-08-30 14:01:10 -07003112 (MV_REF *)aom_calloc(cm->mi_rows * cm->mi_cols,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003113 sizeof(*cm->cur_frame->mvs)));
3114}
3115
Yaowu Xuf883b422016-08-30 14:01:10 -07003116static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003117#if CONFIG_SIZE_LIMIT
3118 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07003119 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003120 "Dimensions of %dx%d beyond allowed size of %dx%d.",
3121 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
3122#endif
3123 if (cm->width != width || cm->height != height) {
3124 const int new_mi_rows =
3125 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
3126 const int new_mi_cols =
3127 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
3128
Yaowu Xuf883b422016-08-30 14:01:10 -07003129 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07003130 // dimensions as well as the overall size.
3131 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003132 if (av1_alloc_context_buffers(cm, width, height))
3133 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003134 "Failed to allocate context buffers");
3135 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003136 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003137 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003138 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003139 cm->width = width;
3140 cm->height = height;
3141 }
3142 if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
3143 cm->mi_cols > cm->cur_frame->mi_cols) {
3144 resize_mv_buffer(cm);
3145 }
3146}
3147
Yaowu Xuf883b422016-08-30 14:01:10 -07003148static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003149 int width, height;
3150 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07003151 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003152#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003153 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003154#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003155 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003156 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003157
3158 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003159 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3161 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003162#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003163 cm->use_highbitdepth,
3164#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003165 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003166 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3167 pool->cb_priv)) {
3168 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003169 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003170 "Failed to allocate frame buffer");
3171 }
3172 unlock_buffer_pool(pool);
3173
3174 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3175 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3176 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3177 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003178#if CONFIG_COLORSPACE_HEADERS
3179 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3180 cm->transfer_function;
3181 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3182 cm->chroma_sample_position;
3183#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003184 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3185 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3186 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3187}
3188
Yaowu Xuf883b422016-08-30 14:01:10 -07003189static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003190 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07003191 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003192 int this_xss, int this_yss) {
3193 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
3194 ref_yss == this_yss;
3195}
3196
Yaowu Xuf883b422016-08-30 14:01:10 -07003197static void setup_frame_size_with_refs(AV1_COMMON *cm,
3198 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003199 int width, height;
3200 int found = 0, i;
3201 int has_valid_ref_frame = 0;
3202 BufferPool *const pool = cm->buffer_pool;
3203 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003204 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003205 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
3206 width = buf->y_crop_width;
3207 height = buf->y_crop_height;
3208 cm->render_width = buf->render_width;
3209 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003210#if CONFIG_FRAME_SUPERRES
3211 setup_superres(cm, rb, &width, &height);
3212#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003213 found = 1;
3214 break;
3215 }
3216 }
3217
3218 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003219 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003220#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003221 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003222#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003223 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003224 }
3225
3226 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07003227 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003228 "Invalid frame size");
3229
3230 // Check to make sure at least one of frames that this frame references
3231 // has valid dimensions.
3232 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3233 RefBuffer *const ref_frame = &cm->frame_refs[i];
3234 has_valid_ref_frame |=
3235 valid_ref_frame_size(ref_frame->buf->y_crop_width,
3236 ref_frame->buf->y_crop_height, width, height);
3237 }
3238 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07003239 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240 "Referenced frame has invalid size");
3241 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3242 RefBuffer *const ref_frame = &cm->frame_refs[i];
3243 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
3244 ref_frame->buf->subsampling_x,
3245 ref_frame->buf->subsampling_y, cm->bit_depth,
3246 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07003247 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003248 "Referenced frame has incompatible color format");
3249 }
3250
3251 resize_context_buffers(cm, width, height);
3252
3253 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003254 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003255 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3256 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003257#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003258 cm->use_highbitdepth,
3259#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003260 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003261 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3262 pool->cb_priv)) {
3263 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003264 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003265 "Failed to allocate frame buffer");
3266 }
3267 unlock_buffer_pool(pool);
3268
3269 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3270 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3271 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3272 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003273#if CONFIG_COLORSPACE_HEADERS
3274 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3275 cm->transfer_function;
3276 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3277 cm->chroma_sample_position;
3278#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003279 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3280 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3281 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3282}
3283
Yaowu Xuf883b422016-08-30 14:01:10 -07003284static void read_tile_info(AV1Decoder *const pbi,
3285 struct aom_read_bit_buffer *const rb) {
3286 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003287#if CONFIG_EXT_TILE
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003288 cm->tile_encoding_mode = aom_rb_read_literal(rb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003289// Read the tile width/height
3290#if CONFIG_EXT_PARTITION
3291 if (cm->sb_size == BLOCK_128X128) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003292 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
3293 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003294 } else
3295#endif // CONFIG_EXT_PARTITION
3296 {
Yaowu Xuf883b422016-08-30 14:01:10 -07003297 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
3298 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003299 }
3300
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003301#if CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003302 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003303#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003304
Yaowu Xuc27fc142016-08-22 16:08:15 -07003305 cm->tile_width <<= cm->mib_size_log2;
3306 cm->tile_height <<= cm->mib_size_log2;
3307
Yaowu Xuf883b422016-08-30 14:01:10 -07003308 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
3309 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003310
3311 // Get the number of tiles
3312 cm->tile_cols = 1;
3313 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
3314
3315 cm->tile_rows = 1;
3316 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
3317
3318 if (cm->tile_cols * cm->tile_rows > 1) {
3319 // Read the number of bytes used to store tile size
Yaowu Xuf883b422016-08-30 14:01:10 -07003320 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
3321 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003322 }
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003323
3324#if CONFIG_DEPENDENT_HORZTILES
3325 if (cm->tile_rows <= 1)
3326 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
3327 else
3328 cm->dependent_horz_tiles = 0;
3329#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003330#else
3331 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07003332 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003333
3334 // columns
3335 max_ones = max_log2_tile_cols - min_log2_tile_cols;
3336 cm->log2_tile_cols = min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07003337 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003338
3339 if (cm->log2_tile_cols > 6)
Yaowu Xuf883b422016-08-30 14:01:10 -07003340 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003341 "Invalid number of tile columns");
3342
3343 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07003344 cm->log2_tile_rows = aom_rb_read_bit(rb);
3345 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003346#if CONFIG_DEPENDENT_HORZTILES
3347 if (cm->log2_tile_rows != 0)
3348 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
3349 else
3350 cm->dependent_horz_tiles = 0;
3351#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003352#if CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003353 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003354#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003355
Yaowu Xuc27fc142016-08-22 16:08:15 -07003356 cm->tile_cols = 1 << cm->log2_tile_cols;
3357 cm->tile_rows = 1 << cm->log2_tile_rows;
3358
3359 cm->tile_width = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
3360 cm->tile_width >>= cm->log2_tile_cols;
3361 cm->tile_height = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
3362 cm->tile_height >>= cm->log2_tile_rows;
3363
3364 // round to integer multiples of superblock size
3365 cm->tile_width = ALIGN_POWER_OF_TWO(cm->tile_width, MAX_MIB_SIZE_LOG2);
3366 cm->tile_height = ALIGN_POWER_OF_TWO(cm->tile_height, MAX_MIB_SIZE_LOG2);
3367
Thomas Davies4974e522016-11-07 17:44:05 +00003368// tile size magnitude
3369#if !CONFIG_TILE_GROUPS
3370 if (cm->tile_rows > 1 || cm->tile_cols > 1)
3371#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003372 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003373#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00003374
Thomas Davies80188d12016-10-26 16:08:35 -07003375#if CONFIG_TILE_GROUPS
3376 // Store an index to the location of the tile group information
3377 pbi->tg_size_bit_offset = rb->bit_offset;
3378 pbi->tg_size = 1 << (cm->log2_tile_rows + cm->log2_tile_cols);
3379 if (cm->log2_tile_rows + cm->log2_tile_cols > 0) {
3380 pbi->tg_start =
3381 aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
3382 pbi->tg_size =
3383 1 + aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
3384 }
3385#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386}
3387
Yaowu Xu4ff59b52017-04-24 12:41:56 -07003388static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003389 switch (sz) {
3390 case 1: return src[0];
3391 case 2: return mem_get_le16(src);
3392 case 3: return mem_get_le24(src);
3393 case 4: return mem_get_le32(src);
3394 default: assert("Invalid size" && 0); return -1;
3395 }
3396}
3397
3398#if CONFIG_EXT_TILE
3399// Reads the next tile returning its size and adjusting '*data' accordingly
3400// based on 'is_last'.
3401static void get_tile_buffer(const uint8_t *const data_end,
Yaowu Xuf883b422016-08-30 14:01:10 -07003402 struct aom_internal_error_info *error_info,
3403 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003404 void *decrypt_state,
3405 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003406 int tile_size_bytes, int col, int row,
3407 unsigned int tile_encoding_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003408 size_t size;
3409
3410 size_t copy_size = 0;
3411 const uint8_t *copy_data = NULL;
3412
3413 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003414 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003415 "Truncated packet or corrupt tile length");
3416 if (decrypt_cb) {
3417 uint8_t be_data[4];
3418 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3419
3420 // Only read number of bytes in cm->tile_size_bytes.
3421 size = mem_get_varsize(be_data, tile_size_bytes);
3422 } else {
3423 size = mem_get_varsize(*data, tile_size_bytes);
3424 }
3425
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003426 // If cm->tile_encoding_mode = 1 (i.e. TILE_VR), then the top bit of the tile
3427 // header indicates copy mode.
3428 if (tile_encoding_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003429 // The remaining bits in the top byte signal the row offset
3430 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
3431
3432 // Currently, only use tiles in same column as reference tiles.
3433 copy_data = tile_buffers[row - offset][col].data;
3434 copy_size = tile_buffers[row - offset][col].size;
3435 size = 0;
3436 }
3437
3438 *data += tile_size_bytes;
3439
3440 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003441 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003442 "Truncated packet or corrupt tile size");
3443
3444 if (size > 0) {
3445 tile_buffers[row][col].data = *data;
3446 tile_buffers[row][col].size = size;
3447 } else {
3448 tile_buffers[row][col].data = copy_data;
3449 tile_buffers[row][col].size = copy_size;
3450 }
3451
3452 *data += size;
3453
3454 tile_buffers[row][col].raw_data_end = *data;
3455}
3456
3457static void get_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07003458 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003459 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003460 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003461 const int tile_cols = cm->tile_cols;
3462 const int tile_rows = cm->tile_rows;
3463 const int have_tiles = tile_cols * tile_rows > 1;
3464
3465 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07003466 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003467 tile_buffers[0][0].data = data;
3468 tile_buffers[0][0].size = tile_size;
3469 tile_buffers[0][0].raw_data_end = NULL;
3470 } else {
3471 // We locate only the tile buffers that are required, which are the ones
3472 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
3473 // need the last (bottom right) tile buffer, as we need to know where the
3474 // end of the compressed frame buffer is for proper superframe decoding.
3475
3476 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
3477 const uint8_t *const data_start = data;
3478
Yaowu Xuf883b422016-08-30 14:01:10 -07003479 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003480 const int single_row = pbi->dec_tile_row >= 0;
3481 const int tile_rows_start = single_row ? dec_tile_row : 0;
3482 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003483 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003484 const int single_col = pbi->dec_tile_col >= 0;
3485 const int tile_cols_start = single_col ? dec_tile_col : 0;
3486 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3487
3488 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
3489 const int tile_size_bytes = pbi->tile_size_bytes;
3490
3491 size_t tile_col_size;
3492 int r, c;
3493
3494 // Read tile column sizes for all columns (we need the last tile buffer)
3495 for (c = 0; c < tile_cols; ++c) {
3496 const int is_last = c == tile_cols - 1;
3497 if (!is_last) {
3498 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
3499 data += tile_col_size_bytes;
3500 tile_col_data_end[c] = data + tile_col_size;
3501 } else {
3502 tile_col_size = data_end - data;
3503 tile_col_data_end[c] = data_end;
3504 }
3505 data += tile_col_size;
3506 }
3507
3508 data = data_start;
3509
3510 // Read the required tile sizes.
3511 for (c = tile_cols_start; c < tile_cols_end; ++c) {
3512 const int is_last = c == tile_cols - 1;
3513
3514 if (c > 0) data = tile_col_data_end[c - 1];
3515
3516 if (!is_last) data += tile_col_size_bytes;
3517
3518 // Get the whole of the last column, otherwise stop at the required tile.
3519 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
3520 tile_buffers[r][c].col = c;
3521
3522 get_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3523 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003524 tile_size_bytes, c, r, cm->tile_encoding_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003525 }
3526 }
3527
3528 // If we have not read the last column, then read it to get the last tile.
3529 if (tile_cols_end != tile_cols) {
3530 c = tile_cols - 1;
3531
3532 data = tile_col_data_end[c - 1];
3533
3534 for (r = 0; r < tile_rows; ++r) {
3535 tile_buffers[r][c].col = c;
3536
3537 get_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3538 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003539 tile_size_bytes, c, r, cm->tile_encoding_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003540 }
3541 }
3542 }
3543}
3544#else
3545// Reads the next tile returning its size and adjusting '*data' accordingly
3546// based on 'is_last'.
3547static void get_tile_buffer(const uint8_t *const data_end,
3548 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07003549 struct aom_internal_error_info *error_info,
3550 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003551 void *decrypt_state, TileBufferDec *const buf) {
3552 size_t size;
3553
3554 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08003555 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003556 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003557 "Truncated packet or corrupt tile length");
3558
3559 if (decrypt_cb) {
3560 uint8_t be_data[4];
3561 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3562 size = mem_get_varsize(be_data, tile_size_bytes);
3563 } else {
3564 size = mem_get_varsize(*data, tile_size_bytes);
3565 }
3566 *data += tile_size_bytes;
3567
3568 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003569 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003570 "Truncated packet or corrupt tile size");
3571 } else {
3572 size = data_end - *data;
3573 }
3574
3575 buf->data = *data;
3576 buf->size = size;
3577
3578 *data += size;
3579}
3580
3581static void get_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07003582 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003583 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003584 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003585#if CONFIG_TILE_GROUPS
3586 int r, c;
3587 const int tile_cols = cm->tile_cols;
3588 const int tile_rows = cm->tile_rows;
3589 int tc = 0;
3590 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003591 struct aom_read_bit_buffer rb_tg_hdr;
3592 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
3593 const int num_tiles = tile_rows * tile_cols;
3594 const int num_bits = OD_ILOG(num_tiles) - 1;
James Zern6efba482017-04-20 20:53:49 -07003595 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003596 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Fangwen Fu73126c02017-02-08 22:37:47 -08003597#if CONFIG_DEPENDENT_HORZTILES
3598 int tile_group_start_col = 0;
3599 int tile_group_start_row = 0;
3600#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003601
3602 for (r = 0; r < tile_rows; ++r) {
3603 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07003604 TileBufferDec *const buf = &tile_buffers[r][c];
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003605 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07003606 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003607
3608 buf->col = c;
3609 if (hdr_offset) {
3610 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
3611 rb_tg_hdr.bit_offset = tg_size_bit_offset;
3612 if (num_tiles) {
3613 pbi->tg_start = aom_rb_read_literal(&rb_tg_hdr, num_bits);
3614 pbi->tg_size = 1 + aom_rb_read_literal(&rb_tg_hdr, num_bits);
Fangwen Fu73126c02017-02-08 22:37:47 -08003615#if CONFIG_DEPENDENT_HORZTILES
3616 tile_group_start_row = r;
3617 tile_group_start_col = c;
3618#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003619 }
3620 }
3621 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
3622 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003623 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
3624 &pbi->common.error, &data, pbi->decrypt_cb,
3625 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08003626#if CONFIG_DEPENDENT_HORZTILES
3627 cm->tile_group_start_row[r][c] = tile_group_start_row;
3628 cm->tile_group_start_col[r][c] = tile_group_start_col;
3629#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003630 }
3631 }
3632#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 int r, c;
3634 const int tile_cols = cm->tile_cols;
3635 const int tile_rows = cm->tile_rows;
3636
3637 for (r = 0; r < tile_rows; ++r) {
3638 for (c = 0; c < tile_cols; ++c) {
3639 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
3640 TileBufferDec *const buf = &tile_buffers[r][c];
3641 buf->col = c;
3642 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last, &cm->error,
3643 &data, pbi->decrypt_cb, pbi->decrypt_state, buf);
3644 }
3645 }
Thomas Davies80188d12016-10-26 16:08:35 -07003646#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003647}
3648#endif // CONFIG_EXT_TILE
3649
Yushin Cho77bba8d2016-11-04 16:36:56 -07003650#if CONFIG_PVQ
Yushin Cho70669122016-12-08 09:53:14 -10003651static void daala_dec_init(AV1_COMMON *const cm, daala_dec_ctx *daala_dec,
Nathan E. Eggeab083972016-12-28 15:31:46 -05003652 aom_reader *r) {
3653 daala_dec->r = r;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003654
Yushin Cho70669122016-12-08 09:53:14 -10003655 // TODO(yushin) : activity masking info needs be signaled by a bitstream
3656 daala_dec->use_activity_masking = AV1_PVQ_ENABLE_ACTIVITY_MASKING;
3657
Yushin Cho7a428ba2017-01-12 16:28:49 -08003658#if !CONFIG_DAALA_DIST
3659 daala_dec->use_activity_masking = 0;
3660#endif
3661
Yushin Cho70669122016-12-08 09:53:14 -10003662 if (daala_dec->use_activity_masking)
3663 daala_dec->qm = OD_HVS_QM;
3664 else
3665 daala_dec->qm = OD_FLAT_QM;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003666
3667 od_init_qm(daala_dec->state.qm, daala_dec->state.qm_inv,
3668 daala_dec->qm == OD_HVS_QM ? OD_QM8_Q4_HVS : OD_QM8_Q4_FLAT);
Yushin Cho70669122016-12-08 09:53:14 -10003669
3670 if (daala_dec->use_activity_masking) {
3671 int pli;
3672 int use_masking = daala_dec->use_activity_masking;
3673 int segment_id = 0;
3674 int qindex = av1_get_qindex(&cm->seg, segment_id, cm->base_qindex);
3675
3676 for (pli = 0; pli < MAX_MB_PLANE; pli++) {
3677 int i;
3678 int q;
3679
3680 q = qindex;
3681 if (q <= OD_DEFAULT_QMS[use_masking][0][pli].interp_q << OD_COEFF_SHIFT) {
3682 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3683 &OD_DEFAULT_QMS[use_masking][0][pli], NULL);
3684 } else {
3685 i = 0;
3686 while (OD_DEFAULT_QMS[use_masking][i + 1][pli].qm_q4 != NULL &&
3687 q > OD_DEFAULT_QMS[use_masking][i + 1][pli].interp_q
3688 << OD_COEFF_SHIFT) {
3689 i++;
3690 }
3691 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3692 &OD_DEFAULT_QMS[use_masking][i][pli],
3693 &OD_DEFAULT_QMS[use_masking][i + 1][pli]);
3694 }
3695 }
3696 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07003697}
Yushin Cho70669122016-12-08 09:53:14 -10003698#endif // #if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -07003699
Yaowu Xuf883b422016-08-30 14:01:10 -07003700static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003701 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003702 AV1_COMMON *const cm = &pbi->common;
3703 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003704 const int tile_cols = cm->tile_cols;
3705 const int tile_rows = cm->tile_rows;
3706 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07003707 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003708#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003709 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003710 const int single_row = pbi->dec_tile_row >= 0;
3711 const int tile_rows_start = single_row ? dec_tile_row : 0;
3712 const int tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003713 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003714 const int single_col = pbi->dec_tile_col >= 0;
3715 const int tile_cols_start = single_col ? dec_tile_col : 0;
3716 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3717 const int inv_col_order = pbi->inv_tile_order && !single_col;
3718 const int inv_row_order = pbi->inv_tile_order && !single_row;
3719#else
3720 const int tile_rows_start = 0;
3721 const int tile_rows_end = tile_rows;
3722 const int tile_cols_start = 0;
3723 const int tile_cols_end = tile_cols;
3724 const int inv_col_order = pbi->inv_tile_order;
3725 const int inv_row_order = pbi->inv_tile_order;
3726#endif // CONFIG_EXT_TILE
3727 int tile_row, tile_col;
3728
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 if (cm->lf.filter_level && !cm->skip_loop_filter &&
3730 pbi->lf_worker.data1 == NULL) {
3731 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07003732 aom_memalign(32, sizeof(LFWorkerData)));
3733 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003734 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003735 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003736 "Loop filter thread creation failed");
3737 }
3738 }
3739
3740 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3741 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3742 // Be sure to sync as we might be resuming after a failed frame decode.
3743 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003744 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
3745 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003746 }
3747
3748 assert(tile_rows <= MAX_TILE_ROWS);
3749 assert(tile_cols <= MAX_TILE_COLS);
3750
3751 get_tile_buffers(pbi, data, data_end, tile_buffers);
3752
3753 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003754 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003756 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003757 pbi->allocated_tiles = n_tiles;
3758 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003759#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003760 if (pbi->acct_enabled) {
3761 aom_accounting_reset(&pbi->accounting);
3762 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003763#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003764 // Load all tile information into tile_data.
3765 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3766 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3767 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
3768 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
3769
3770 td->cm = cm;
3771 td->xd = pbi->mb;
3772 td->xd.corrupted = 0;
3773 td->xd.counts =
3774 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
3775 ? &cm->counts
3776 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07003777 av1_zero(td->dqcoeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003778#if CONFIG_PVQ
Yaowu Xud6ea71c2016-11-07 10:24:14 -08003779 av1_zero(td->pvq_ref_coeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003780#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003781 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003782 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08003783 &td->bit_reader,
3784#if CONFIG_ANS && ANS_MAX_SYMBOLS
3785 1 << cm->ans_window_size_log2,
3786#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
3787 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07003788#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003789 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003790 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003791 } else {
David Barkerd971f402016-10-25 13:52:07 +01003792 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003793 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003794#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003795 av1_init_macroblockd(cm, &td->xd,
3796#if CONFIG_PVQ
3797 td->pvq_ref_coeff,
3798#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04003799#if CONFIG_CFL
3800 &td->cfl,
3801#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003802 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07003803
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00003804#if CONFIG_EC_ADAPT
3805 // Initialise the tile context from the frame context
3806 td->tctx = *cm->fc;
3807 td->xd.tile_ctx = &td->tctx;
3808#endif
Yushin Choc49ef3a2017-03-13 17:27:25 -07003809
3810#if CONFIG_PVQ
3811 daala_dec_init(cm, &td->xd.daala_dec, &td->bit_reader);
3812 td->xd.daala_dec.state.adapt = &td->tctx.pvq_context;
3813#endif
3814
Urvang Joshib100db72016-10-12 16:28:56 -07003815#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003816 td->xd.plane[0].color_index_map = td->color_index_map[0];
3817 td->xd.plane[1].color_index_map = td->color_index_map[1];
Urvang Joshib100db72016-10-12 16:28:56 -07003818#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003819 }
3820 }
3821
3822 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3823 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
3824 int mi_row = 0;
3825 TileInfo tile_info;
3826
Yaowu Xuf883b422016-08-30 14:01:10 -07003827 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003828
3829 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3830 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
3831 TileData *const td = pbi->tile_data + tile_cols * row + col;
Michael Bebenita6048d052016-08-25 14:40:54 -07003832#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003833 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003834 td->bit_reader.accounting->last_tell_frac =
3835 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003836 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003837#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003838
Yaowu Xuf883b422016-08-30 14:01:10 -07003839 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003840
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003841#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003842#if CONFIG_TILE_GROUPS
3843 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
3844 if (!cm->dependent_horz_tiles || tile_row == 0 ||
3845 tile_info.tg_horz_boundary) {
3846#else
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003847 if (!cm->dependent_horz_tiles || tile_row == 0) {
Fangwen Fu73126c02017-02-08 22:37:47 -08003848#endif
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003849 av1_zero_above_context(cm, tile_info.mi_col_start,
3850 tile_info.mi_col_end);
3851 }
3852#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003853 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003854#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003855
3856 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
3857 mi_row += cm->mib_size) {
3858 int mi_col;
3859
Yaowu Xuf883b422016-08-30 14:01:10 -07003860 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003861
3862 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
3863 mi_col += cm->mib_size) {
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003864 av1_update_boundary_info(cm, &tile_info, mi_row, mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003865 decode_partition(pbi, &td->xd,
3866#if CONFIG_SUPERTX
3867 0,
3868#endif // CONFIG_SUPERTX
3869 mi_row, mi_col, &td->bit_reader, cm->sb_size,
3870 b_width_log2_lookup[cm->sb_size]);
Yue Chen9ab6d712017-01-12 15:50:46 -08003871#if CONFIG_NCOBMC && CONFIG_MOTION_VAR
3872 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
3873 cm->sb_size);
3874#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003875 }
Angie Chiangd0916d92017-03-10 17:54:18 -08003876 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003877 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07003878 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003879 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003880 }
3881 }
3882
3883 assert(mi_row > 0);
3884
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003885// when Parallel deblocking is enabled, deblocking should not
3886// be interleaved with decoding. Instead, deblocking should be done
3887// after the entire frame is decoded.
Jingning Han52ece882017-04-07 14:58:25 -07003888#if !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING && !CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07003889 // Loopfilter one tile row.
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003890 // Note: If out-of-order tile decoding is used(for example, inv_row_order
3891 // = 1), the loopfiltering has be done after all tile rows are decoded.
3892 if (!inv_row_order && cm->lf.filter_level && !cm->skip_loop_filter) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003893 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003894 const int lf_start = AOMMAX(0, tile_info.mi_row_start - cm->mib_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003895 const int lf_end = tile_info.mi_row_end - cm->mib_size;
3896
3897 // Delay the loopfilter if the first tile row is only
3898 // a single superblock high.
3899 if (lf_end <= 0) continue;
3900
3901 // Decoding has completed. Finish up the loop filter in this thread.
3902 if (tile_info.mi_row_end >= cm->mi_rows) continue;
3903
3904 winterface->sync(&pbi->lf_worker);
3905 lf_data->start = lf_start;
3906 lf_data->stop = lf_end;
3907 if (pbi->max_threads > 1) {
3908 winterface->launch(&pbi->lf_worker);
3909 } else {
3910 winterface->execute(&pbi->lf_worker);
3911 }
3912 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003913#endif // !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003914
3915 // After loopfiltering, the last 7 row pixels in each superblock row may
3916 // still be changed by the longest loopfilter of the next superblock row.
3917 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003918 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003919 }
3920
Jingning Han52ece882017-04-07 14:58:25 -07003921#if CONFIG_VAR_TX || CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07003922 // Loopfilter the whole frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003923 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
3924 cm->lf.filter_level, 0, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003925#else
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003926#if CONFIG_PARALLEL_DEBLOCKING
3927 // Loopfilter all rows in the frame in the frame.
3928 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3929 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3930 winterface->sync(&pbi->lf_worker);
3931 lf_data->start = 0;
3932 lf_data->stop = cm->mi_rows;
3933 winterface->execute(&pbi->lf_worker);
3934 }
3935#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003936 // Loopfilter remaining rows in the frame.
3937 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3938 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3939 winterface->sync(&pbi->lf_worker);
3940 lf_data->start = lf_data->stop;
3941 lf_data->stop = cm->mi_rows;
3942 winterface->execute(&pbi->lf_worker);
3943 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003944#endif // CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003945#endif // CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003946 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003947 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003948
3949#if CONFIG_EXT_TILE
3950 if (n_tiles == 1) {
3951#if CONFIG_ANS
3952 return data_end;
3953#else
3954 // Find the end of the single tile buffer
Yaowu Xuf883b422016-08-30 14:01:10 -07003955 return aom_reader_find_end(&pbi->tile_data->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003956#endif // CONFIG_ANS
3957 } else {
3958 // Return the end of the last tile buffer
3959 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
3960 }
3961#else
3962#if CONFIG_ANS
3963 return data_end;
3964#else
3965 {
3966 // Get last tile data.
3967 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003968 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003969 }
3970#endif // CONFIG_ANS
3971#endif // CONFIG_EXT_TILE
3972}
3973
3974static int tile_worker_hook(TileWorkerData *const tile_data,
3975 const TileInfo *const tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003976 AV1Decoder *const pbi = tile_data->pbi;
3977 const AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003978 int mi_row, mi_col;
3979
3980 if (setjmp(tile_data->error_info.jmp)) {
3981 tile_data->error_info.setjmp = 0;
Angie Chiangd0916d92017-03-10 17:54:18 -08003982 aom_merge_corrupted_flag(&tile_data->xd.corrupted, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003983 return 0;
3984 }
3985
3986 tile_data->error_info.setjmp = 1;
3987 tile_data->xd.error_info = &tile_data->error_info;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003988#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003989#if CONFIG_TILE_GROUPS
3990 if (!cm->dependent_horz_tiles || tile->tg_horz_boundary) {
3991#else
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003992 if (!cm->dependent_horz_tiles) {
Fangwen Fu73126c02017-02-08 22:37:47 -08003993#endif
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003994 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
3995 }
3996#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003997 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003998#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003999
4000 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
4001 mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004002 av1_zero_left_context(&tile_data->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004003
4004 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
4005 mi_col += cm->mib_size) {
4006 decode_partition(pbi, &tile_data->xd,
4007#if CONFIG_SUPERTX
4008 0,
4009#endif
4010 mi_row, mi_col, &tile_data->bit_reader, cm->sb_size,
4011 b_width_log2_lookup[cm->sb_size]);
Yue Chen9ab6d712017-01-12 15:50:46 -08004012#if CONFIG_NCOBMC && CONFIG_MOTION_VAR
4013 detoken_and_recon_sb(pbi, &tile_data->xd, mi_row, mi_col,
4014 &tile_data->bit_reader, cm->sb_size);
4015#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004016 }
4017 }
4018 return !tile_data->xd.corrupted;
4019}
4020
4021// sorts in descending order
4022static int compare_tile_buffers(const void *a, const void *b) {
4023 const TileBufferDec *const buf1 = (const TileBufferDec *)a;
4024 const TileBufferDec *const buf2 = (const TileBufferDec *)b;
4025 return (int)(buf2->size - buf1->size);
4026}
4027
Yaowu Xuf883b422016-08-30 14:01:10 -07004028static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004029 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004030 AV1_COMMON *const cm = &pbi->common;
4031 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004032 const int tile_cols = cm->tile_cols;
4033 const int tile_rows = cm->tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07004034 const int num_workers = AOMMIN(pbi->max_threads & ~1, tile_cols);
clang-format67948d32016-09-07 22:40:40 -07004035 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004036#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07004037 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004038 const int single_row = pbi->dec_tile_row >= 0;
4039 const int tile_rows_start = single_row ? dec_tile_row : 0;
4040 const int tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07004041 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004042 const int single_col = pbi->dec_tile_col >= 0;
4043 const int tile_cols_start = single_col ? dec_tile_col : 0;
4044 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
4045#else
4046 const int tile_rows_start = 0;
4047 const int tile_rows_end = tile_rows;
4048 const int tile_cols_start = 0;
4049 const int tile_cols_end = tile_cols;
4050#endif // CONFIG_EXT_TILE
4051 int tile_row, tile_col;
4052 int i;
4053
4054#if !(CONFIG_ANS || CONFIG_EXT_TILE)
4055 int final_worker = -1;
4056#endif // !(CONFIG_ANS || CONFIG_EXT_TILE)
4057
4058 assert(tile_rows <= MAX_TILE_ROWS);
4059 assert(tile_cols <= MAX_TILE_COLS);
4060
4061 assert(tile_cols * tile_rows > 1);
4062
Yaowu Xuc27fc142016-08-22 16:08:15 -07004063 // TODO(jzern): See if we can remove the restriction of passing in max
4064 // threads to the decoder.
4065 if (pbi->num_tile_workers == 0) {
4066 const int num_threads = pbi->max_threads & ~1;
4067 CHECK_MEM_ERROR(cm, pbi->tile_workers,
Yaowu Xuf883b422016-08-30 14:01:10 -07004068 aom_malloc(num_threads * sizeof(*pbi->tile_workers)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004069 // Ensure tile data offsets will be properly aligned. This may fail on
4070 // platforms without DECLARE_ALIGNED().
4071 assert((sizeof(*pbi->tile_worker_data) % 16) == 0);
4072 CHECK_MEM_ERROR(
4073 cm, pbi->tile_worker_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07004074 aom_memalign(32, num_threads * sizeof(*pbi->tile_worker_data)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004075 CHECK_MEM_ERROR(cm, pbi->tile_worker_info,
Yaowu Xuf883b422016-08-30 14:01:10 -07004076 aom_malloc(num_threads * sizeof(*pbi->tile_worker_info)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004077 for (i = 0; i < num_threads; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004078 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004079 ++pbi->num_tile_workers;
4080
4081 winterface->init(worker);
4082 if (i < num_threads - 1 && !winterface->reset(worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004083 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004084 "Tile decoder thread creation failed");
4085 }
4086 }
4087 }
4088
4089 // Reset tile decoding hook
4090 for (i = 0; i < num_workers; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004091 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004092 winterface->sync(worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07004093 worker->hook = (AVxWorkerHook)tile_worker_hook;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004094 worker->data1 = &pbi->tile_worker_data[i];
4095 worker->data2 = &pbi->tile_worker_info[i];
4096 }
4097
4098 // Initialize thread frame counts.
4099 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4100 for (i = 0; i < num_workers; ++i) {
4101 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004102 av1_zero(twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004103 }
4104 }
4105
4106 // Load tile data into tile_buffers
4107 get_tile_buffers(pbi, data, data_end, tile_buffers);
4108
4109 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
4110 // Sort the buffers in this tile row based on size in descending order.
4111 qsort(&tile_buffers[tile_row][tile_cols_start],
4112 tile_cols_end - tile_cols_start, sizeof(tile_buffers[0][0]),
4113 compare_tile_buffers);
4114
4115 // Rearrange the tile buffers in this tile row such that per-tile group
4116 // the largest, and presumably the most difficult tile will be decoded in
4117 // the main thread. This should help minimize the number of instances
4118 // where the main thread is waiting for a worker to complete.
4119 {
4120 int group_start;
4121 for (group_start = tile_cols_start; group_start < tile_cols_end;
4122 group_start += num_workers) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004123 const int group_end = AOMMIN(group_start + num_workers, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004124 const TileBufferDec largest = tile_buffers[tile_row][group_start];
4125 memmove(&tile_buffers[tile_row][group_start],
4126 &tile_buffers[tile_row][group_start + 1],
4127 (group_end - group_start - 1) * sizeof(tile_buffers[0][0]));
4128 tile_buffers[tile_row][group_end - 1] = largest;
4129 }
4130 }
4131
4132 for (tile_col = tile_cols_start; tile_col < tile_cols_end;) {
4133 // Launch workers for individual columns
4134 for (i = 0; i < num_workers && tile_col < tile_cols_end;
4135 ++i, ++tile_col) {
4136 TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuf883b422016-08-30 14:01:10 -07004137 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004138 TileWorkerData *const twd = (TileWorkerData *)worker->data1;
4139 TileInfo *const tile_info = (TileInfo *)worker->data2;
4140
4141 twd->pbi = pbi;
4142 twd->xd = pbi->mb;
4143 twd->xd.corrupted = 0;
4144 twd->xd.counts =
4145 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
4146 ? &twd->counts
4147 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07004148 av1_zero(twd->dqcoeff);
4149 av1_tile_init(tile_info, cm, tile_row, buf->col);
4150 av1_tile_init(&twd->xd.tile, cm, tile_row, buf->col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004151 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08004152 &twd->bit_reader,
4153#if CONFIG_ANS && ANS_MAX_SYMBOLS
4154 1 << cm->ans_window_size_log2,
4155#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
4156 pbi->decrypt_cb, pbi->decrypt_state);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004157 av1_init_macroblockd(cm, &twd->xd,
4158#if CONFIG_PVQ
4159 twd->pvq_ref_coeff,
4160#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04004161#if CONFIG_CFL
4162 &twd->cfl,
4163#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07004164 twd->dqcoeff);
4165#if CONFIG_PVQ
Nathan E. Eggeab083972016-12-28 15:31:46 -05004166 daala_dec_init(cm, &twd->xd.daala_dec, &twd->bit_reader);
Yushin Choc49ef3a2017-03-13 17:27:25 -07004167 twd->xd.daala_dec.state.adapt = &twd->tctx.pvq_context;
Yushin Cho77bba8d2016-11-04 16:36:56 -07004168#endif
Yushin Chod767beb2017-03-24 10:15:47 -07004169#if CONFIG_EC_ADAPT
4170 // Initialise the tile context from the frame context
4171 twd->tctx = *cm->fc;
4172 twd->xd.tile_ctx = &twd->tctx;
4173#endif
Urvang Joshib100db72016-10-12 16:28:56 -07004174#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004175 twd->xd.plane[0].color_index_map = twd->color_index_map[0];
4176 twd->xd.plane[1].color_index_map = twd->color_index_map[1];
Urvang Joshib100db72016-10-12 16:28:56 -07004177#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004178
4179 worker->had_error = 0;
4180 if (i == num_workers - 1 || tile_col == tile_cols_end - 1) {
4181 winterface->execute(worker);
4182 } else {
4183 winterface->launch(worker);
4184 }
4185
4186#if !(CONFIG_ANS || CONFIG_EXT_TILE)
4187 if (tile_row == tile_rows - 1 && buf->col == tile_cols - 1) {
4188 final_worker = i;
4189 }
4190#endif // !(CONFIG_ANS || CONFIG_EXT_TILE)
4191 }
4192
4193 // Sync all workers
4194 for (; i > 0; --i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004195 AVxWorker *const worker = &pbi->tile_workers[i - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004196 // TODO(jzern): The tile may have specific error data associated with
Yaowu Xuf883b422016-08-30 14:01:10 -07004197 // its aom_internal_error_info which could be propagated to the main
Yaowu Xuc27fc142016-08-22 16:08:15 -07004198 // info in cm. Additionally once the threads have been synced and an
4199 // error is detected, there's no point in continuing to decode tiles.
4200 pbi->mb.corrupted |= !winterface->sync(worker);
4201 }
4202 }
4203 }
4204
4205 // Accumulate thread frame counts.
4206 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4207 for (i = 0; i < num_workers; ++i) {
4208 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08004209 av1_accumulate_frame_counts(&cm->counts, &twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004210 }
4211 }
4212
4213#if CONFIG_EXT_TILE
4214 // Return the end of the last tile buffer
4215 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
4216#else
4217#if CONFIG_ANS
4218 return data_end;
4219#else
4220 assert(final_worker != -1);
4221 {
4222 TileWorkerData *const twd =
4223 (TileWorkerData *)pbi->tile_workers[final_worker].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004224 return aom_reader_find_end(&twd->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004225 }
4226#endif // CONFIG_ANS
4227#endif // CONFIG_EXT_TILE
4228}
4229
4230static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004231 AV1_COMMON *const cm = (AV1_COMMON *)data;
4232 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004233}
4234
Yaowu Xuf883b422016-08-30 14:01:10 -07004235static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004236 struct aom_read_bit_buffer *rb,
4237 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004238 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004239 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004240 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004241 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004242 }
4243
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004244#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004245 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07004246#else
4247 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004248#endif
anorkin76fb1262017-03-22 15:12:12 -07004249#if CONFIG_COLORSPACE_HEADERS
4250 cm->color_space = aom_rb_read_literal(rb, 5);
4251 cm->transfer_function = aom_rb_read_literal(rb, 5);
4252#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004253 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07004254#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004255 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004256 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07004257 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004258 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004259 cm->subsampling_x = aom_rb_read_bit(rb);
4260 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004261 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07004262 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004263 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07004264 if (aom_rb_read_bit(rb))
4265 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004266 "Reserved bit set");
4267 } else {
4268 cm->subsampling_y = cm->subsampling_x = 1;
4269 }
anorkin76fb1262017-03-22 15:12:12 -07004270#if CONFIG_COLORSPACE_HEADERS
4271 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
4272 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
4273 }
4274#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004275 } else {
4276 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
4277 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
4278 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
4279 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07004280 if (aom_rb_read_bit(rb))
4281 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004282 "Reserved bit set");
4283 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004284 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004285 "4:4:4 color not supported in profile 0 or 2");
4286 }
4287 }
4288}
4289
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004290#if CONFIG_REFERENCE_BUFFER
4291void read_sequence_header(SequenceHeader *seq_params) {
4292 /* Placeholder for actually reading from the bitstream */
4293 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
4294 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
4295 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
4296}
4297#endif
4298
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004299#if CONFIG_EXT_INTER
4300static void read_compound_tools(AV1_COMMON *cm,
4301 struct aom_read_bit_buffer *rb) {
4302 (void)cm;
4303 (void)rb;
4304#if CONFIG_INTERINTRA
4305 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
4306 cm->allow_interintra_compound = aom_rb_read_bit(rb);
4307 } else {
4308 cm->allow_interintra_compound = 0;
4309 }
4310#endif // CONFIG_INTERINTRA
4311#if CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
Zoe Liu85b66462017-04-20 14:28:19 -07004312#if CONFIG_COMPOUND_SINGLEREF
4313 if (!frame_is_intra_only(cm)) {
4314#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004315 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07004316#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004317 cm->allow_masked_compound = aom_rb_read_bit(rb);
4318 } else {
4319 cm->allow_masked_compound = 0;
4320 }
4321#endif // CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
4322}
4323#endif // CONFIG_EXT_INTER
4324
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004325#if CONFIG_VAR_REFS
4326static void check_valid_ref_frames(AV1_COMMON *cm) {
4327 MV_REFERENCE_FRAME ref_frame;
4328 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
4329 // reference frames: Current encoder invalid ALTREF when ALTREF
4330 // is the same as LAST, but invalid all the other references
4331 // when they are the same as ALTREF.
4332 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4333 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
4334
4335 if (ref_buf->idx != INVALID_IDX) {
4336 ref_buf->is_valid = 1;
4337
4338 MV_REFERENCE_FRAME ref;
4339 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
4340 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
4341 if (buf->is_valid && buf->idx == ref_buf->idx) {
4342 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
4343 ref_buf->is_valid = 0;
4344 break;
4345 } else {
4346 buf->is_valid = 0;
4347 }
4348 }
4349 }
4350 } else {
4351 ref_buf->is_valid = 0;
4352 }
4353 }
4354}
4355#endif // CONFIG_VAR_REFS
4356
Yaowu Xuf883b422016-08-30 14:01:10 -07004357static size_t read_uncompressed_header(AV1Decoder *pbi,
4358 struct aom_read_bit_buffer *rb) {
4359 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004360 MACROBLOCKD *const xd = &pbi->mb;
4361 BufferPool *const pool = cm->buffer_pool;
4362 RefCntBuffer *const frame_bufs = pool->frame_bufs;
4363 int i, mask, ref_index = 0;
4364 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004365
4366#if CONFIG_REFERENCE_BUFFER
4367 /* TODO: Move outside frame loop or inside key-frame branch */
4368 read_sequence_header(&pbi->seq_params);
4369#endif
4370
Yaowu Xuc27fc142016-08-22 16:08:15 -07004371 cm->last_frame_type = cm->frame_type;
4372 cm->last_intra_only = cm->intra_only;
4373
4374#if CONFIG_EXT_REFS
4375 // NOTE: By default all coded frames to be used as a reference
4376 cm->is_reference_frame = 1;
4377#endif // CONFIG_EXT_REFS
4378
Yaowu Xuf883b422016-08-30 14:01:10 -07004379 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
4380 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004381 "Invalid frame marker");
4382
Yaowu Xuf883b422016-08-30 14:01:10 -07004383 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02004384
4385 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
4386 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
4387
4388 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07004389 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004390 "Unsupported bitstream profile");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004391
Yaowu Xuf883b422016-08-30 14:01:10 -07004392 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004393
4394 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08004395 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004396 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
4397 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08004398#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004399 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004400 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4401 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004402 /* Compare display_frame_id with ref_frame_id and check valid for
4403 * referencing */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004404 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
4405 cm->valid_for_referencing[existing_frame_idx] == 0)
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004406 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4407 "Reference buffer frame ID mismatch");
4408 }
4409#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004410 lock_buffer_pool(pool);
4411 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
4412 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07004413 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004414 "Buffer %d does not contain a decoded frame",
4415 frame_to_show);
4416 }
4417 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
4418 unlock_buffer_pool(pool);
4419
4420 cm->lf.filter_level = 0;
4421 cm->show_frame = 1;
4422 pbi->refresh_frame_flags = 0;
4423
4424 if (cm->frame_parallel_decode) {
4425 for (i = 0; i < REF_FRAMES; ++i)
4426 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
4427 }
4428
4429 return 0;
4430 }
4431
Yaowu Xuf883b422016-08-30 14:01:10 -07004432 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
4433 cm->show_frame = aom_rb_read_bit(rb);
4434 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004435#if CONFIG_REFERENCE_BUFFER
4436 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004437 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4438 int diff_len = pbi->seq_params.delta_frame_id_length_minus2 + 2;
4439 int prev_frame_id = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004440 if (cm->frame_type != KEY_FRAME) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004441 prev_frame_id = cm->current_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004442 }
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004443 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004444
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004445 if (cm->frame_type != KEY_FRAME) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004446 int diff_frame_id;
4447 if (cm->current_frame_id > prev_frame_id) {
4448 diff_frame_id = cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004449 } else {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004450 diff_frame_id =
4451 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004452 }
4453 /* Check current_frame_id for conformance */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004454 if (prev_frame_id == cm->current_frame_id ||
4455 diff_frame_id >= (1 << (frame_id_length - 1))) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004456 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4457 "Invalid value of current_frame_id");
4458 }
4459 }
4460 /* Check if some frames need to be marked as not valid for referencing */
4461 for (i = 0; i < REF_FRAMES; i++) {
4462 if (cm->frame_type == KEY_FRAME) {
4463 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004464 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004465 if (cm->ref_frame_id[i] > cm->current_frame_id ||
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004466 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004467 cm->valid_for_referencing[i] = 0;
4468 } else {
4469 if (cm->ref_frame_id[i] > cm->current_frame_id &&
4470 cm->ref_frame_id[i] <
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004471 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004472 cm->valid_for_referencing[i] = 0;
4473 }
4474 }
4475 }
4476#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004477 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004478 if (!av1_read_sync_code(rb))
4479 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004480 "Invalid frame sync code");
4481
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004482 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004483 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
4484
4485 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4486 cm->frame_refs[i].idx = INVALID_IDX;
4487 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004488#if CONFIG_VAR_REFS
4489 cm->frame_refs[i].is_valid = 0;
4490#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004491 }
4492
4493 setup_frame_size(cm, rb);
4494 if (pbi->need_resync) {
4495 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4496 pbi->need_resync = 0;
4497 }
Alex Converseeb780e72016-12-13 12:46:41 -08004498#if CONFIG_ANS && ANS_MAX_SYMBOLS
4499 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4500#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Conversee46382a2017-05-15 11:40:20 -07004501#if CONFIG_PALETTE || CONFIG_INTRABC
hui su24f7b072016-10-12 11:36:24 -07004502 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Alex Conversee46382a2017-05-15 11:40:20 -07004503#endif // CONFIG_PALETTE || CONFIG_INTRABC
Fangwen Fu930c51c2017-05-07 20:39:17 -07004504#if CONFIG_TEMPMV_SIGNALING
4505 cm->use_prev_frame_mvs = 0;
4506#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004507 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004508 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Alex Conversee46382a2017-05-15 11:40:20 -07004509#if CONFIG_PALETTE || CONFIG_INTRABC
hui su24f7b072016-10-12 11:36:24 -07004510 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Alex Conversee46382a2017-05-15 11:40:20 -07004511#endif // CONFIG_PALETTE || CONFIG_INTRABC
Fangwen Fu930c51c2017-05-07 20:39:17 -07004512#if CONFIG_TEMPMV_SIGNALING
4513 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
4514#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004515 if (cm->error_resilient_mode) {
4516 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
4517 } else {
4518 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004519 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004520 ? RESET_FRAME_CONTEXT_ALL
4521 : RESET_FRAME_CONTEXT_CURRENT;
4522 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004523 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004524 ? RESET_FRAME_CONTEXT_CURRENT
4525 : RESET_FRAME_CONTEXT_NONE;
4526 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07004527 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004528 ? RESET_FRAME_CONTEXT_ALL
4529 : RESET_FRAME_CONTEXT_CURRENT;
4530 }
4531 }
4532
4533 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004534 if (!av1_read_sync_code(rb))
4535 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004536 "Invalid frame sync code");
4537
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004538 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004539
Yaowu Xuf883b422016-08-30 14:01:10 -07004540 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004541 setup_frame_size(cm, rb);
4542 if (pbi->need_resync) {
4543 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4544 pbi->need_resync = 0;
4545 }
Alex Converseeb780e72016-12-13 12:46:41 -08004546#if CONFIG_ANS && ANS_MAX_SYMBOLS
4547 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4548#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004549 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Yaowu Xuf883b422016-08-30 14:01:10 -07004550 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004551
4552#if CONFIG_EXT_REFS
4553 if (!pbi->refresh_frame_flags) {
4554 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
4555 // will not be used as a reference
4556 cm->is_reference_frame = 0;
4557 }
4558#endif // CONFIG_EXT_REFS
4559
4560 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004561 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004562 const int idx = cm->ref_frame_map[ref];
4563 RefBuffer *const ref_frame = &cm->frame_refs[i];
4564 ref_frame->idx = idx;
4565 ref_frame->buf = &frame_bufs[idx].buf;
Yaowu Xuf883b422016-08-30 14:01:10 -07004566 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004567#if CONFIG_REFERENCE_BUFFER
4568 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004569 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4570 int diff_len = pbi->seq_params.delta_frame_id_length_minus2 + 2;
4571 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
4572 int ref_frame_id =
4573 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
4574 (1 << frame_id_length)) %
4575 (1 << frame_id_length));
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004576 /* Compare values derived from delta_frame_id_minus1 and
4577 * refresh_frame_flags. Also, check valid for referencing */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004578 if (ref_frame_id != cm->ref_frame_id[ref] ||
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004579 cm->valid_for_referencing[ref] == 0)
4580 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4581 "Reference buffer frame ID mismatch");
4582 }
4583#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004584 }
4585
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004586#if CONFIG_VAR_REFS
4587 check_valid_ref_frames(cm);
4588#endif // CONFIG_VAR_REFS
4589
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004590#if CONFIG_FRAME_SIZE
4591 if (cm->error_resilient_mode == 0) {
4592 setup_frame_size_with_refs(cm, rb);
4593 } else {
4594 setup_frame_size(cm, rb);
4595 }
4596#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004597 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004598#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004599
Yaowu Xuf883b422016-08-30 14:01:10 -07004600 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Angie Chiang5678ad92016-11-21 09:38:40 -08004601 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08004602#if CONFIG_TEMPMV_SIGNALING
4603 if (!cm->error_resilient_mode) {
4604 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
4605 }
4606#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004607 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4608 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004609#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07004610 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004611 &ref_buf->sf, ref_buf->buf->y_crop_width,
4612 ref_buf->buf->y_crop_height, cm->width, cm->height,
4613 cm->use_highbitdepth);
4614#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004615 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004616 &ref_buf->sf, ref_buf->buf->y_crop_width,
4617 ref_buf->buf->y_crop_height, cm->width, cm->height);
4618#endif
4619 }
4620 }
4621 }
Fangwen Fu8d164de2016-12-14 13:40:54 -08004622#if CONFIG_TEMPMV_SIGNALING
4623 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
4624#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004625
4626#if CONFIG_REFERENCE_BUFFER
4627 if (pbi->seq_params.frame_id_numbers_present_flag) {
4628 /* If bitmask is set, update reference frame id values and
4629 mark frames as valid for reference */
4630 int refresh_frame_flags =
4631 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
4632 for (i = 0; i < REF_FRAMES; i++) {
4633 if ((refresh_frame_flags >> i) & 1) {
4634 cm->ref_frame_id[i] = cm->current_frame_id;
4635 cm->valid_for_referencing[i] = 1;
4636 }
4637 }
4638 }
4639#endif
4640
Yaowu Xuc27fc142016-08-22 16:08:15 -07004641 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004642 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07004643#if CONFIG_COLORSPACE_HEADERS
4644 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
4645 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
4646#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004647 get_frame_new_buffer(cm)->color_range = cm->color_range;
4648 get_frame_new_buffer(cm)->render_width = cm->render_width;
4649 get_frame_new_buffer(cm)->render_height = cm->render_height;
4650
4651 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004652 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004653 "Keyframe / intra-only frame required to reset decoder"
4654 " state");
4655 }
4656
4657 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004658 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004659 ? REFRESH_FRAME_CONTEXT_FORWARD
4660 : REFRESH_FRAME_CONTEXT_BACKWARD;
4661 } else {
4662 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
4663 }
4664
Yaowu Xuf883b422016-08-30 14:01:10 -07004665 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07004666 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07004667 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004668
4669 // Generate next_ref_frame_map.
4670 lock_buffer_pool(pool);
4671 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
4672 if (mask & 1) {
4673 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
4674 ++frame_bufs[cm->new_fb_idx].ref_count;
4675 } else {
4676 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
4677 }
4678 // Current thread holds the reference frame.
4679 if (cm->ref_frame_map[ref_index] >= 0)
4680 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
4681 ++ref_index;
4682 }
4683
4684 for (; ref_index < REF_FRAMES; ++ref_index) {
4685 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
4686
4687 // Current thread holds the reference frame.
4688 if (cm->ref_frame_map[ref_index] >= 0)
4689 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
4690 }
4691 unlock_buffer_pool(pool);
4692 pbi->hold_ref_buf = 1;
4693
4694 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07004695 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004696
4697#if CONFIG_EXT_PARTITION
Yaowu Xuf883b422016-08-30 14:01:10 -07004698 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004699#else
4700 set_sb_size(cm, BLOCK_64X64);
4701#endif // CONFIG_EXT_PARTITION
4702
4703 setup_loopfilter(cm, rb);
Jean-Marc Valin01435132017-02-18 14:12:53 -05004704#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01004705 setup_cdef(cm, rb);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02004706#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004707#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004708 decode_restoration_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004709#endif // CONFIG_LOOP_RESTORATION
4710 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004711 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004712
hui su0d103572017-03-01 17:58:01 -08004713#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07004714 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004715 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
4716 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
4717 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
4718 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
4719 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
4720 }
hui su0d103572017-03-01 17:58:01 -08004721#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004722
4723 setup_segmentation(cm, rb);
4724
Arild Fuldseth07441162016-08-15 15:07:52 +02004725#if CONFIG_DELTA_Q
4726 {
4727 struct segmentation *const seg = &cm->seg;
4728 int segment_quantizer_active = 0;
4729 for (i = 0; i < MAX_SEGMENTS; i++) {
4730 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4731 segment_quantizer_active = 1;
4732 }
4733 }
4734
Thomas Daviesf6936102016-09-05 16:51:31 +01004735 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07004736#if CONFIG_EXT_DELTA_Q
4737 cm->delta_lf_res = 1;
4738#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01004739 if (segment_quantizer_active == 0 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02004740 cm->delta_q_present_flag = aom_rb_read_bit(rb);
4741 } else {
4742 cm->delta_q_present_flag = 0;
4743 }
4744 if (cm->delta_q_present_flag) {
4745 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01004746 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07004747#if CONFIG_EXT_DELTA_Q
4748 if (segment_quantizer_active) {
4749 assert(seg->abs_delta == SEGMENT_DELTADATA);
4750 }
4751 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
4752 if (cm->delta_lf_present_flag) {
4753 xd->prev_delta_lf_from_base = 0;
4754 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
4755 } else {
4756 cm->delta_lf_present_flag = 0;
4757 }
4758#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02004759 }
4760 }
4761#endif
4762
Urvang Joshi454280d2016-10-14 16:51:44 -07004763 for (i = 0; i < MAX_SEGMENTS; ++i) {
4764 const int qindex = cm->seg.enabled
4765 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
4766 : cm->base_qindex;
4767 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
4768 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
4769 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004770 }
4771
4772 setup_segmentation_dequant(cm);
Yue Cheneeacc4c2017-01-17 17:29:17 -08004773 cm->tx_mode = read_tx_mode(cm, xd, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004774 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004775#if CONFIG_EXT_INTER
4776 read_compound_tools(cm, rb);
4777#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07004778
Sarah Parkere68a3e42017-02-16 14:03:24 -08004779#if CONFIG_EXT_TX
4780 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
4781#endif // CONFIG_EXT_TX
4782
Yaowu Xuc27fc142016-08-22 16:08:15 -07004783 read_tile_info(pbi, rb);
Yaowu Xuf883b422016-08-30 14:01:10 -07004784 sz = aom_rb_read_literal(rb, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004785
4786 if (sz == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07004787 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004788 "Invalid header size");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004789 return sz;
4790}
4791
4792#if CONFIG_EXT_TX
Sarah Parkerb926f322017-04-24 16:19:48 -07004793#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07004794static void read_ext_tx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004795 int i, j, k;
4796 int s;
4797 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004798 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004799 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
4800 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
Debargha Mukherjee08542b92017-02-21 01:08:14 -08004801 for (j = 0; j < num_ext_tx_set[ext_tx_set_type_inter[s]] - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07004802 av1_diff_update_prob(r, &fc->inter_ext_tx_prob[s][i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004803 }
4804 }
4805 }
4806
4807 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004808 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004809 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
4810 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
4811 for (j = 0; j < INTRA_MODES; ++j)
Debargha Mukherjee08542b92017-02-21 01:08:14 -08004812 for (k = 0; k < num_ext_tx_set[ext_tx_set_type_intra[s]] - 1; ++k)
Michael Bebenita6048d052016-08-25 14:40:54 -07004813 av1_diff_update_prob(r, &fc->intra_ext_tx_prob[s][i][j][k],
4814 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004815 }
4816 }
4817 }
4818}
Sarah Parkerb926f322017-04-24 16:19:48 -07004819#endif // !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004820#else
4821
Yaowu Xuc27fc142016-08-22 16:08:15 -07004822#endif // CONFIG_EXT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07004823#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07004824static void read_supertx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004825 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -07004826 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004827 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Hanfeb517c2016-12-21 16:02:07 -08004828 for (j = TX_8X8; j < TX_SIZES; ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004829 av1_diff_update_prob(r, &fc->supertx_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004830 }
4831 }
4832 }
4833}
4834#endif // CONFIG_SUPERTX
4835
4836#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +00004837static void read_global_motion_params(WarpedMotionParams *params,
Sarah Parkerf1783292017-04-05 11:55:27 -07004838 WarpedMotionParams *ref_params,
Thomas Daviesb732c1e2017-06-09 14:13:43 +01004839 aom_reader *r, int allow_hp) {
4840 TransformationType type = aom_read_bit(r, ACCT_STR);
4841 if (type != IDENTITY) type += aom_read_literal(r, GLOBAL_TYPE_BITS, ACCT_STR);
Sarah Parker13d06622017-03-10 17:03:28 -08004842 int trans_bits;
4843 int trans_dec_factor;
Sarah Parkerf1783292017-04-05 11:55:27 -07004844 int trans_prec_diff;
Sarah Parker4c10a3c2017-04-10 19:37:59 -07004845 set_default_warp_params(params);
David Barkercf3d0b02016-11-10 10:14:49 +00004846 params->wmtype = type;
4847 switch (type) {
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004848 case HOMOGRAPHY:
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004849 case HORTRAPEZOID:
4850 case VERTRAPEZOID:
4851 if (type != HORTRAPEZOID)
4852 params->wmmat[6] =
Sarah Parkerf1783292017-04-05 11:55:27 -07004853 aom_read_signed_primitive_refsubexpfin(
4854 r, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004855 (ref_params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF), ACCT_STR) *
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004856 GM_ROW3HOMO_DECODE_FACTOR;
4857 if (type != VERTRAPEZOID)
4858 params->wmmat[7] =
Sarah Parkerf1783292017-04-05 11:55:27 -07004859 aom_read_signed_primitive_refsubexpfin(
4860 r, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004861 (ref_params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF), ACCT_STR) *
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004862 GM_ROW3HOMO_DECODE_FACTOR;
David Barkercf3d0b02016-11-10 10:14:49 +00004863 case AFFINE:
4864 case ROTZOOM:
Sarah Parkerf1783292017-04-05 11:55:27 -07004865 params->wmmat[2] = aom_read_signed_primitive_refsubexpfin(
4866 r, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4867 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004868 (1 << GM_ALPHA_PREC_BITS),
4869 ACCT_STR) *
Debargha Mukherjee949097c2016-11-15 17:27:38 -08004870 GM_ALPHA_DECODE_FACTOR +
David Barkercf3d0b02016-11-10 10:14:49 +00004871 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004872 if (type != VERTRAPEZOID)
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004873 params->wmmat[3] =
4874 aom_read_signed_primitive_refsubexpfin(
4875 r, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4876 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF), ACCT_STR) *
4877 GM_ALPHA_DECODE_FACTOR;
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004878 if (type >= AFFINE) {
4879 if (type != HORTRAPEZOID)
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004880 params->wmmat[4] =
4881 aom_read_signed_primitive_refsubexpfin(
4882 r, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4883 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF), ACCT_STR) *
4884 GM_ALPHA_DECODE_FACTOR;
Sarah Parkerf1783292017-04-05 11:55:27 -07004885 params->wmmat[5] = aom_read_signed_primitive_refsubexpfin(
4886 r, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4887 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004888 (1 << GM_ALPHA_PREC_BITS),
4889 ACCT_STR) *
David Barkercf3d0b02016-11-10 10:14:49 +00004890 GM_ALPHA_DECODE_FACTOR +
4891 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004892 } else {
David Barkercf3d0b02016-11-10 10:14:49 +00004893 params->wmmat[4] = -params->wmmat[3];
4894 params->wmmat[5] = params->wmmat[2];
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004895 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004896 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004897 case TRANSLATION:
Sarah Parker13d06622017-03-10 17:03:28 -08004898 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
4899 : GM_ABS_TRANS_BITS;
4900 trans_dec_factor = (type == TRANSLATION)
4901 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
4902 : GM_TRANS_DECODE_FACTOR;
Sarah Parkerf1783292017-04-05 11:55:27 -07004903 trans_prec_diff = (type == TRANSLATION)
4904 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
4905 : GM_TRANS_PREC_DIFF;
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07004906 params->wmmat[0] =
4907 aom_read_signed_primitive_refsubexpfin(
4908 r, (1 << trans_bits) + 1, SUBEXPFIN_K,
4909 (ref_params->wmmat[0] >> trans_prec_diff), ACCT_STR) *
4910 trans_dec_factor;
4911 params->wmmat[1] =
4912 aom_read_signed_primitive_refsubexpfin(
4913 r, (1 << trans_bits) + 1, SUBEXPFIN_K,
4914 (ref_params->wmmat[1] >> trans_prec_diff), ACCT_STR) *
4915 trans_dec_factor;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004916 case IDENTITY: break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004917 default: assert(0);
4918 }
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -07004919 if (params->wmtype <= AFFINE)
4920 if (!get_shear_params(params)) assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004921}
4922
Yaowu Xuf883b422016-08-30 14:01:10 -07004923static void read_global_motion(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004924 int frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004925 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
Thomas Daviesb732c1e2017-06-09 14:13:43 +01004926 read_global_motion_params(&cm->global_motion[frame],
4927 &cm->prev_frame->global_motion[frame], r,
4928 cm->allow_high_precision_mv);
Sarah Parkere5299862016-08-16 14:57:37 -07004929 /*
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004930 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
4931 frame, cm->current_video_frame, cm->show_frame,
David Barkercf3d0b02016-11-10 10:14:49 +00004932 cm->global_motion[frame].wmmat[0],
4933 cm->global_motion[frame].wmmat[1],
4934 cm->global_motion[frame].wmmat[2],
4935 cm->global_motion[frame].wmmat[3]);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004936 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07004937 }
Sarah Parkerf1783292017-04-05 11:55:27 -07004938 memcpy(cm->cur_frame->global_motion, cm->global_motion,
4939 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004940}
4941#endif // CONFIG_GLOBAL_MOTION
4942
Yaowu Xuf883b422016-08-30 14:01:10 -07004943static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004944 size_t partition_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004945 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004946#if CONFIG_SUPERTX
4947 MACROBLOCKD *const xd = &pbi->mb;
4948#endif
4949 FRAME_CONTEXT *const fc = cm->fc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004950 aom_reader r;
Thomas Davies61e3e372017-04-04 16:10:23 +01004951 int i;
Thomas Davies493623e2017-03-31 16:12:25 +01004952#if !CONFIG_EC_ADAPT || \
4953 (CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION || CONFIG_EXT_INTER)
Yaowu Xu8af861b2016-11-01 12:12:11 -07004954 int j;
4955#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004956
Alex Converse2cdf0d82016-12-13 13:53:09 -08004957#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08004958 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08004959#endif
Alex Converse346440b2017-01-03 13:47:37 -08004960 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
4961 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07004962 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004963 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004964
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004965#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004966 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
4967 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
4968 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
4969 av1_alloc_restoration_buffers(cm);
4970 decode_restoration(cm, &r);
4971 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004972#endif
4973
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05004974#if !CONFIG_EC_ADAPT
Yaowu Xuefc75352016-10-31 09:46:42 -07004975 if (cm->tx_mode == TX_MODE_SELECT) read_tx_size_probs(fc, &r);
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05004976#endif
Yue Chen56e226e2017-05-02 16:21:40 -07004977#if CONFIG_EXT_TX && CONFIG_RECT_TX && CONFIG_RECT_TX_EXT
4978 if (cm->tx_mode == TX_MODE_SELECT)
4979 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
4980#endif // CONFIG_EXT_TX && CONFIG_RECT_TX && CONFIG_RECT_TX_EXT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004981
Angie Chiang800df032017-03-22 11:14:12 -07004982#if CONFIG_LV_MAP
4983 av1_read_txb_probs(fc, cm->tx_mode, &r);
4984#else // CONFIG_LV_MAP
Yushin Cho77bba8d2016-11-04 16:36:56 -07004985#if !CONFIG_PVQ
Nathan E. Eggead66b812017-05-09 11:57:31 -04004986#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004987 read_coef_probs(fc, cm->tx_mode, &r);
Nathan E. Eggead66b812017-05-09 11:57:31 -04004988#endif // !CONFIG_EC_ADAPT
Angie Chiang7d7ead92017-03-22 10:35:51 -07004989#endif // !CONFIG_PVQ
Angie Chiang800df032017-03-22 11:14:12 -07004990#endif // CONFIG_LV_MAP
4991
Yaowu Xuc27fc142016-08-22 16:08:15 -07004992#if CONFIG_VAR_TX
Thomas Davies61e3e372017-04-04 16:10:23 +01004993 for (i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
4994 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004995#endif // CONFIG_VAR_TX
Thomas Davies61e3e372017-04-04 16:10:23 +01004996#if !CONFIG_NEW_MULTISYMBOL
4997 for (i = 0; i < SKIP_CONTEXTS; ++i)
4998 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
4999#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005000
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00005001#if CONFIG_DELTA_Q && !CONFIG_EC_ADAPT
Fangwen Fu06173632017-04-26 13:42:08 -07005002#if CONFIG_EXT_DELTA_Q
Fangwen Fu6160df22017-04-24 09:45:51 -07005003 if (cm->delta_q_present_flag) {
5004 for (k = 0; k < DELTA_Q_PROBS; ++k)
5005 av1_diff_update_prob(&r, &fc->delta_q_prob[k], ACCT_STR);
5006 }
Fangwen Fu231fe422017-04-24 17:52:29 -07005007 if (cm->delta_lf_present_flag) {
5008 for (k = 0; k < DELTA_LF_PROBS; ++k)
5009 av1_diff_update_prob(&r, &fc->delta_lf_prob[k], ACCT_STR);
5010 }
Fangwen Fu06173632017-04-26 13:42:08 -07005011#else
5012 for (k = 0; k < DELTA_Q_PROBS; ++k)
5013 av1_diff_update_prob(&r, &fc->delta_q_prob[k], ACCT_STR);
Fangwen Fu231fe422017-04-24 17:52:29 -07005014#endif
Thomas Daviesf6936102016-09-05 16:51:31 +01005015#endif
5016
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005017#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005018 if (cm->seg.enabled && cm->seg.update_map) {
5019 if (cm->seg.temporal_update) {
Thomas Davies61e3e372017-04-04 16:10:23 +01005020 for (i = 0; i < PREDICTION_PROBS; i++)
5021 av1_diff_update_prob(&r, &cm->fc->seg.pred_probs[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005022 }
Thomas Davies61e3e372017-04-04 16:10:23 +01005023 for (i = 0; i < MAX_SEGMENTS - 1; i++)
5024 av1_diff_update_prob(&r, &cm->fc->seg.tree_probs[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005025 }
5026
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04005027 for (j = 0; j < INTRA_MODES; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005028 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07005029 av1_diff_update_prob(&r, &fc->uv_mode_prob[j][i], ACCT_STR);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04005030 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005031
5032#if CONFIG_EXT_PARTITION_TYPES
Alex Converse4e18d402017-03-14 15:36:38 -07005033 for (j = 0; j < PARTITION_PLOFFSET; ++j)
5034 for (i = 0; i < PARTITION_TYPES - 1; ++i)
5035 av1_diff_update_prob(&r, &fc->partition_prob[j][i], ACCT_STR);
5036 for (; j < PARTITION_CONTEXTS_PRIMARY; ++j)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005037 for (i = 0; i < EXT_PARTITION_TYPES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07005038 av1_diff_update_prob(&r, &fc->partition_prob[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005039#else
Alex Converse55c6bde2017-01-12 15:55:31 -08005040 for (j = 0; j < PARTITION_CONTEXTS_PRIMARY; ++j)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005041 for (i = 0; i < PARTITION_TYPES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07005042 av1_diff_update_prob(&r, &fc->partition_prob[j][i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005043#endif // CONFIG_EXT_PARTITION_TYPES
hui su9aa97492017-01-26 16:46:01 -08005044
Alex Converse55c6bde2017-01-12 15:55:31 -08005045#if CONFIG_UNPOISON_PARTITION_CTX
5046 for (; j < PARTITION_CONTEXTS_PRIMARY + PARTITION_BLOCK_SIZES; ++j)
5047 av1_diff_update_prob(&r, &fc->partition_prob[j][PARTITION_VERT], ACCT_STR);
5048 for (; j < PARTITION_CONTEXTS_PRIMARY + 2 * PARTITION_BLOCK_SIZES; ++j)
5049 av1_diff_update_prob(&r, &fc->partition_prob[j][PARTITION_HORZ], ACCT_STR);
5050#endif // CONFIG_UNPOISON_PARTITION_CTX
hui su9aa97492017-01-26 16:46:01 -08005051
5052#if CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07005053 for (i = 0; i < INTRA_FILTERS + 1; ++i)
5054 for (j = 0; j < INTRA_FILTERS - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07005055 av1_diff_update_prob(&r, &fc->intra_filter_probs[i][j], ACCT_STR);
hui su9aa97492017-01-26 16:46:01 -08005056#endif // CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
hui sub4e25d22017-03-09 15:32:30 -08005057#endif // !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005058
5059 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005060 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00005061 av1_copy(cm->fc->kf_y_cdf, av1_kf_y_mode_cdf);
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005062#if !CONFIG_EC_ADAPT
Thomas Davies61e3e372017-04-04 16:10:23 +01005063 int k;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005064 for (k = 0; k < INTRA_MODES; k++)
Thomas Davies6519beb2016-10-19 14:46:07 +01005065 for (j = 0; j < INTRA_MODES; j++)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005066 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07005067 av1_diff_update_prob(&r, &cm->kf_y_prob[k][j][i], ACCT_STR);
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04005068#endif
Alex Converse7c412ea2017-06-01 15:16:22 -07005069#if CONFIG_INTRABC
5070 if (cm->allow_screen_content_tools) {
5071 av1_diff_update_prob(&r, &fc->intrabc_prob, ACCT_STR);
5072 }
5073#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005074 } else {
Thomas Davies149eda52017-06-12 18:11:55 +01005075#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005076 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01005077#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005078
5079#if CONFIG_EXT_INTER
5080 read_inter_compound_mode_probs(fc, &r);
Zoe Liu85b66462017-04-20 14:28:19 -07005081#if CONFIG_COMPOUND_SINGLEREF
5082 read_inter_singleref_comp_mode_probs(fc, &r);
5083#endif // CONFIG_COMPOUND_SINGLEREF
5084
Yue Chen4d26acb2017-05-01 12:28:34 -07005085#if CONFIG_INTERINTRA
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07005086 if (cm->reference_mode != COMPOUND_REFERENCE &&
5087 cm->allow_interintra_compound) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005088 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
5089 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07005090 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005091 }
5092 }
5093 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
5094 for (j = 0; j < INTERINTRA_MODES - 1; j++)
Michael Bebenita6048d052016-08-25 14:40:54 -07005095 av1_diff_update_prob(&r, &fc->interintra_mode_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005096 }
Debargha Mukherjeeed057992017-05-07 05:15:06 -07005097#if CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005098 for (i = 0; i < BLOCK_SIZES; i++) {
5099 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07005100 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005101 }
5102 }
Debargha Mukherjeeed057992017-05-07 05:15:06 -07005103#endif // CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005104 }
Yue Chen4d26acb2017-05-01 12:28:34 -07005105#endif // CONFIG_INTERINTRA
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00005106#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Zoe Liu85b66462017-04-20 14:28:19 -07005107#if CONFIG_COMPOUND_SINGLEREF
5108 if (cm->allow_masked_compound) {
5109#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07005110 if (cm->reference_mode != SINGLE_REFERENCE && cm->allow_masked_compound) {
Zoe Liu85b66462017-04-20 14:28:19 -07005111#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07005112 for (i = 0; i < BLOCK_SIZES; i++) {
Sarah Parker6fdc8532016-11-16 17:47:13 -08005113 for (j = 0; j < COMPOUND_TYPES - 1; j++) {
5114 av1_diff_update_prob(&r, &fc->compound_type_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005115 }
5116 }
5117 }
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00005118#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005119#endif // CONFIG_EXT_INTER
5120
Yue Chencb60b182016-10-13 15:18:22 -07005121#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07005122 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i) {
Yue Chencb60b182016-10-13 15:18:22 -07005123 for (j = 0; j < MOTION_MODES - 1; ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -07005124 av1_diff_update_prob(&r, &fc->motion_mode_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005125 }
Yue Chencb60b182016-10-13 15:18:22 -07005126#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07005127
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005128#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005129 if (cm->interp_filter == SWITCHABLE) read_switchable_interp_probs(fc, &r);
Thomas9ac55082016-09-23 18:04:17 +01005130#endif
Thomas Daviesf6ad9352017-04-19 11:38:06 +01005131#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005132 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07005133 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01005134#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005135
5136 if (cm->reference_mode != SINGLE_REFERENCE)
5137 setup_compound_reference_mode(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005138 read_frame_reference_mode_probs(cm, &r);
5139
Zoe Liu85b66462017-04-20 14:28:19 -07005140#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
5141 for (i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
5142 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
5143#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
5144
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005145#if !CONFIG_EC_ADAPT
Nathan E. Egge5710c722016-09-08 10:01:16 -04005146 for (j = 0; j < BLOCK_SIZE_GROUPS; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005147 for (i = 0; i < INTRA_MODES - 1; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -07005148 av1_diff_update_prob(&r, &fc->y_mode_prob[j][i], ACCT_STR);
Nathan E. Egge5710c722016-09-08 10:01:16 -04005149 }
Thomas9ac55082016-09-23 18:04:17 +01005150#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005151
Yaowu Xuc27fc142016-08-22 16:08:15 -07005152 for (i = 0; i < NMV_CONTEXTS; ++i)
5153 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005154#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005155 read_ext_tx_probs(fc, &r);
Sarah Parkerb926f322017-04-24 16:19:48 -07005156#endif // EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005157#if CONFIG_SUPERTX
5158 if (!xd->lossless[0]) read_supertx_probs(fc, &r);
5159#endif
5160#if CONFIG_GLOBAL_MOTION
5161 read_global_motion(cm, &r);
Nathan E. Egge476c63c2017-05-18 18:35:16 -04005162#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005163 }
Nathan E. Egge476c63c2017-05-18 18:35:16 -04005164#if !CONFIG_EC_ADAPT
Thomas Davies87aeeb82017-02-17 00:19:40 +00005165 av1_coef_head_cdfs(fc);
Thomas Davies87aeeb82017-02-17 00:19:40 +00005166 /* Make tail distribution from head */
Thomas Davies6519beb2016-10-19 14:46:07 +01005167 av1_coef_pareto_cdfs(fc);
David Barker599dfd02016-11-10 13:20:12 +00005168 for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]);
Thomas Davies6519beb2016-10-19 14:46:07 +01005169 av1_set_mode_cdfs(cm);
Nathan E. Egge476c63c2017-05-18 18:35:16 -04005170#endif // !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005171
Yaowu Xuf883b422016-08-30 14:01:10 -07005172 return aom_reader_has_error(&r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005173}
Yaowu Xuc27fc142016-08-22 16:08:15 -07005174#ifdef NDEBUG
5175#define debug_check_frame_counts(cm) (void)0
5176#else // !NDEBUG
5177// Counts should only be incremented when frame_parallel_decoding_mode and
5178// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07005179static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005180 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07005181 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005182 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
5183 cm->error_resilient_mode);
5184 assert(!memcmp(cm->counts.y_mode, zero_counts.y_mode,
5185 sizeof(cm->counts.y_mode)));
5186 assert(!memcmp(cm->counts.uv_mode, zero_counts.uv_mode,
5187 sizeof(cm->counts.uv_mode)));
5188 assert(!memcmp(cm->counts.partition, zero_counts.partition,
5189 sizeof(cm->counts.partition)));
5190 assert(!memcmp(cm->counts.coef, zero_counts.coef, sizeof(cm->counts.coef)));
5191 assert(!memcmp(cm->counts.eob_branch, zero_counts.eob_branch,
5192 sizeof(cm->counts.eob_branch)));
Thomas Daviesab780672017-02-01 12:07:29 +00005193 assert(!memcmp(cm->counts.blockz_count, zero_counts.blockz_count,
5194 sizeof(cm->counts.blockz_count)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005195 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
5196 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005197#if CONFIG_EXT_INTER
5198 assert(!memcmp(cm->counts.inter_compound_mode,
5199 zero_counts.inter_compound_mode,
5200 sizeof(cm->counts.inter_compound_mode)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005201#if CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07005202 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
5203 sizeof(cm->counts.interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005204#if CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005205 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
5206 sizeof(cm->counts.wedge_interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005207#endif // CONFIG_WEDGE
5208#endif // CONFIG_INTERINTRA
Sarah Parker6fddd182016-11-10 20:57:20 -08005209 assert(!memcmp(cm->counts.compound_interinter,
5210 zero_counts.compound_interinter,
5211 sizeof(cm->counts.compound_interinter)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005212#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07005213#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
5214 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
5215 sizeof(cm->counts.motion_mode)));
5216#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07005217 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
5218 sizeof(cm->counts.intra_inter)));
Zoe Liu85b66462017-04-20 14:28:19 -07005219#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
5220 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
5221 sizeof(cm->counts.comp_inter_mode)));
5222#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07005223 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
5224 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07005225#if CONFIG_EXT_COMP_REFS
5226 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
5227 sizeof(cm->counts.comp_ref_type)));
5228 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
5229 sizeof(cm->counts.uni_comp_ref)));
5230#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005231 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
5232 sizeof(cm->counts.single_ref)));
5233 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
5234 sizeof(cm->counts.comp_ref)));
5235#if CONFIG_EXT_REFS
5236 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
5237 sizeof(cm->counts.comp_bwdref)));
5238#endif // CONFIG_EXT_REFS
5239 assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
5240 sizeof(cm->counts.tx_size)));
5241 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005242 assert(
5243 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
5244 assert(
5245 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005246 assert(!memcmp(cm->counts.inter_ext_tx, zero_counts.inter_ext_tx,
5247 sizeof(cm->counts.inter_ext_tx)));
5248 assert(!memcmp(cm->counts.intra_ext_tx, zero_counts.intra_ext_tx,
5249 sizeof(cm->counts.intra_ext_tx)));
5250}
5251#endif // NDEBUG
5252
Yaowu Xuf883b422016-08-30 14:01:10 -07005253static struct aom_read_bit_buffer *init_read_bit_buffer(
5254 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
5255 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005256 rb->bit_offset = 0;
5257 rb->error_handler = error_handler;
5258 rb->error_handler_data = &pbi->common;
5259 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005260 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005261 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
5262 rb->bit_buffer = clear_data;
5263 rb->bit_buffer_end = clear_data + n;
5264 } else {
5265 rb->bit_buffer = data;
5266 rb->bit_buffer_end = data_end;
5267 }
5268 return rb;
5269}
5270
5271//------------------------------------------------------------------------------
5272
Yaowu Xuf883b422016-08-30 14:01:10 -07005273int av1_read_sync_code(struct aom_read_bit_buffer *const rb) {
5274 return aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_0 &&
5275 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_1 &&
5276 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005277}
5278
Yaowu Xuf883b422016-08-30 14:01:10 -07005279void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
5280 int *height) {
5281 *width = aom_rb_read_literal(rb, 16) + 1;
5282 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005283}
5284
Yaowu Xuf883b422016-08-30 14:01:10 -07005285BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
5286 int profile = aom_rb_read_bit(rb);
5287 profile |= aom_rb_read_bit(rb) << 1;
5288 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005289 return (BITSTREAM_PROFILE)profile;
5290}
5291
Thomas Davies028b57f2017-02-22 16:42:11 +00005292#if CONFIG_EC_ADAPT
Yaowu Xu4ff59b52017-04-24 12:41:56 -07005293static void make_update_tile_list_dec(AV1Decoder *pbi, int tile_rows,
5294 int tile_cols, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00005295 int i;
5296 for (i = 0; i < tile_rows * tile_cols; ++i)
5297 ec_ctxs[i] = &pbi->tile_data[i].tctx;
5298}
5299#endif
5300
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005301#if CONFIG_FRAME_SUPERRES
5302void superres_post_decode(AV1Decoder *pbi) {
5303 AV1_COMMON *const cm = &pbi->common;
5304 BufferPool *const pool = cm->buffer_pool;
5305
5306 if (av1_superres_unscaled(cm)) return;
5307
5308 lock_buffer_pool(pool);
5309 av1_superres_upscale(cm, pool);
5310 unlock_buffer_pool(pool);
5311}
5312#endif // CONFIG_FRAME_SUPERRES
5313
Yaowu Xuf883b422016-08-30 14:01:10 -07005314void av1_decode_frame(AV1Decoder *pbi, const uint8_t *data,
5315 const uint8_t *data_end, const uint8_t **p_data_end) {
5316 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005317 MACROBLOCKD *const xd = &pbi->mb;
Yaowu Xuf883b422016-08-30 14:01:10 -07005318 struct aom_read_bit_buffer rb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005319 int context_updated = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07005320 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005321 size_t first_partition_size;
5322 YV12_BUFFER_CONFIG *new_fb;
David Barker40a42d42017-05-09 15:07:32 +01005323#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5324 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
5325#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005326
Yi Luof8e87b42017-04-14 17:20:27 -07005327#if CONFIG_ADAPT_SCAN
5328 av1_deliver_eob_threshold(cm, xd);
5329#endif
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005330#if CONFIG_BITSTREAM_DEBUG
5331 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
5332#endif
5333
5334 first_partition_size = read_uncompressed_header(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005335 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08005336
5337#if CONFIG_EXT_TILE
5338 // If cm->tile_encoding_mode == TILE_NORMAL, the independent decoding of a
5339 // single tile or a section of a frame is not allowed.
5340 if (!cm->tile_encoding_mode &&
5341 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
5342 pbi->dec_tile_row = -1;
5343 pbi->dec_tile_col = -1;
5344 }
5345#endif // CONFIG_EXT_TILE
5346
Thomas Davies72712e62016-11-09 12:17:51 +00005347#if CONFIG_TILE_GROUPS
5348 pbi->first_partition_size = first_partition_size;
5349 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
5350#endif
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005351 new_fb = get_frame_new_buffer(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005352 xd->cur_buf = new_fb;
Alex Conversee816b312017-05-01 09:51:24 -07005353#if CONFIG_INTRABC
5354#if CONFIG_HIGHBITDEPTH
5355 av1_setup_scale_factors_for_frame(
5356 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5357 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5358 cm->use_highbitdepth);
5359#else
5360 av1_setup_scale_factors_for_frame(
5361 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5362 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
5363#endif // CONFIG_HIGHBITDEPTH
5364#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07005365#if CONFIG_GLOBAL_MOTION
Sarah Parkerf1783292017-04-05 11:55:27 -07005366 int i;
5367 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07005368 set_default_warp_params(&cm->global_motion[i]);
5369 set_default_warp_params(&cm->cur_frame->global_motion[i]);
Sarah Parkerf1783292017-04-05 11:55:27 -07005370 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005371 xd->global_motion = cm->global_motion;
5372#endif // CONFIG_GLOBAL_MOTION
5373
5374 if (!first_partition_size) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01005375 // showing a frame directly
5376 *p_data_end = data + aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005377 return;
5378 }
5379
Yaowu Xuf883b422016-08-30 14:01:10 -07005380 data += aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005381 if (!read_is_valid(data, first_partition_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07005382 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005383 "Truncated packet or corrupt header length");
5384
Dengca8d24d2016-10-17 14:06:35 +08005385 cm->setup_mi(cm);
Dengca8d24d2016-10-17 14:06:35 +08005386
David Barker40a42d42017-05-09 15:07:32 +01005387#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005388 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
5389 // show_exisiting_frame=1, nor can it take a frame not used as
5390 // a reference, it is probable that by the time it is being
5391 // referred to, the frame buffer it originally points to may
5392 // already get expired and have been reassigned to the current
5393 // newly coded frame. Hence, we need to check whether this is
5394 // the case, and if yes, we have 2 choices:
5395 // (1) Simply disable the use of previous frame mvs; or
5396 // (2) Have cm->prev_frame point to one reference frame buffer,
5397 // e.g. LAST_FRAME.
David Barker40a42d42017-05-09 15:07:32 +01005398 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005399 // Reassign the LAST_FRAME buffer to cm->prev_frame.
David Barker40a42d42017-05-09 15:07:32 +01005400 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
5401 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
5402 : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005403 }
David Barker40a42d42017-05-09 15:07:32 +01005404#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5405
5406#if CONFIG_TEMPMV_SIGNALING
5407 if (cm->use_prev_frame_mvs) {
Ryan Lei1d1df182017-06-15 11:38:59 -07005408 assert(!cm->error_resilient_mode && cm->prev_frame);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005409#if CONFIG_FRAME_SUPERRES
Ryan Lei1d1df182017-06-15 11:38:59 -07005410 assert(cm->width == cm->last_width && cm->height == cm->last_height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005411#else
Ryan Lei1d1df182017-06-15 11:38:59 -07005412 assert(cm->width == last_fb_ref_buf->buf->y_crop_width &&
5413 cm->height == last_fb_ref_buf->buf->y_crop_height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005414#endif // CONFIG_FRAME_SUPERRES
Ryan Lei1d1df182017-06-15 11:38:59 -07005415 assert(!cm->prev_frame->intra_only);
David Barker40a42d42017-05-09 15:07:32 +01005416 }
5417#else
5418 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005419#if CONFIG_FRAME_SUPERRES
5420 cm->width == cm->last_width &&
5421 cm->height == cm->last_height &&
5422#else
David Barker40a42d42017-05-09 15:07:32 +01005423 cm->width == cm->prev_frame->buf.y_crop_width &&
5424 cm->height == cm->prev_frame->buf.y_crop_height &&
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005425#endif // CONFIG_FRAME_SUPERRES
David Barker40a42d42017-05-09 15:07:32 +01005426 !cm->last_intra_only && cm->last_show_frame &&
5427 (cm->last_frame_type != KEY_FRAME);
5428#endif // CONFIG_TEMPMV_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005429
Yaowu Xuf883b422016-08-30 14:01:10 -07005430 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005431
5432 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
Thomas Daede10e1da92017-04-26 13:22:21 -07005433 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07005434 if (!cm->fc->initialized)
Yaowu Xuf883b422016-08-30 14:01:10 -07005435 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005436 "Uninitialized entropy context.");
5437
Yaowu Xuf883b422016-08-30 14:01:10 -07005438 av1_zero(cm->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005439
5440 xd->corrupted = 0;
5441 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
5442 if (new_fb->corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07005443 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005444 "Decode failed. Frame data header is corrupted.");
5445
5446 if (cm->lf.filter_level && !cm->skip_loop_filter) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005447 av1_loop_filter_frame_init(cm, cm->lf.filter_level);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005448 }
5449
5450 // If encoded in frame parallel mode, frame context is ready after decoding
5451 // the frame header.
5452 if (cm->frame_parallel_decode &&
5453 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005454 AVxWorker *const worker = pbi->frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005455 FrameWorkerData *const frame_worker_data = worker->data1;
5456 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
5457 context_updated = 1;
5458 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
5459 }
Yaowu Xuf883b422016-08-30 14:01:10 -07005460 av1_frameworker_lock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005461 pbi->cur_buf->row = -1;
5462 pbi->cur_buf->col = -1;
5463 frame_worker_data->frame_context_ready = 1;
5464 // Signal the main thread that context is ready.
Yaowu Xuf883b422016-08-30 14:01:10 -07005465 av1_frameworker_signal_stats(worker);
5466 av1_frameworker_unlock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005467 }
5468
Jingning Han52ece882017-04-07 14:58:25 -07005469 if (pbi->max_threads > 1 && !CONFIG_CB4X4 &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07005470#if CONFIG_EXT_TILE
Jingning Han52ece882017-04-07 14:58:25 -07005471 pbi->dec_tile_col < 0 && // Decoding all columns
Yaowu Xuc27fc142016-08-22 16:08:15 -07005472#endif // CONFIG_EXT_TILE
Jingning Han52ece882017-04-07 14:58:25 -07005473 cm->tile_cols > 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005474 // Multi-threaded tile decoder
5475 *p_data_end = decode_tiles_mt(pbi, data + first_partition_size, data_end);
5476 if (!xd->corrupted) {
5477 if (!cm->skip_loop_filter) {
5478 // If multiple threads are used to decode tiles, then we use those
5479 // threads to do parallel loopfiltering.
Yaowu Xuf883b422016-08-30 14:01:10 -07005480 av1_loop_filter_frame_mt(new_fb, cm, pbi->mb.plane, cm->lf.filter_level,
5481 0, 0, pbi->tile_workers, pbi->num_tile_workers,
5482 &pbi->lf_row_sync);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005483 }
5484 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005485 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005486 "Decode failed. Frame data is corrupted.");
5487 }
5488 } else {
5489 *p_data_end = decode_tiles(pbi, data + first_partition_size, data_end);
5490 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005491
Jean-Marc Valin01435132017-02-18 14:12:53 -05005492#if CONFIG_CDEF
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04005493 if (!cm->skip_loop_filter) {
Jean-Marc Valine9f77422017-03-22 17:09:51 -04005494 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02005495 }
Debargha Mukherjee00c54332017-03-03 15:44:17 -08005496#endif // CONFIG_CDEF
5497
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005498#if CONFIG_FRAME_SUPERRES
5499 superres_post_decode(pbi);
5500#endif // CONFIG_FRAME_SUPERRES
5501
Debargha Mukherjee00c54332017-03-03 15:44:17 -08005502#if CONFIG_LOOP_RESTORATION
5503 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5504 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5505 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5506 av1_loop_restoration_frame(new_fb, cm, cm->rst_info, 7, 0, NULL);
5507 }
5508#endif // CONFIG_LOOP_RESTORATION
Thomas Daedef56859f2016-04-19 16:57:24 -07005509
Yaowu Xuc27fc142016-08-22 16:08:15 -07005510 if (!xd->corrupted) {
5511 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies028b57f2017-02-22 16:42:11 +00005512#if CONFIG_EC_ADAPT
Thomas Davies493623e2017-03-31 16:12:25 +01005513 FRAME_CONTEXT **tile_ctxs = aom_malloc(cm->tile_rows * cm->tile_cols *
5514 sizeof(&pbi->tile_data[0].tctx));
5515 aom_cdf_prob **cdf_ptrs =
5516 aom_malloc(cm->tile_rows * cm->tile_cols *
5517 sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
Thomas Davies028b57f2017-02-22 16:42:11 +00005518 make_update_tile_list_dec(pbi, cm->tile_rows, cm->tile_cols, tile_ctxs);
5519#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07005520 av1_adapt_coef_probs(cm);
5521 av1_adapt_intra_frame_probs(cm);
Thomas Davies028b57f2017-02-22 16:42:11 +00005522#if CONFIG_EC_ADAPT
Thomas Davies493623e2017-03-31 16:12:25 +01005523 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005524 cm->tile_rows * cm->tile_cols);
Thomas Davies493623e2017-03-31 16:12:25 +01005525 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005526 cm->tile_rows * cm->tile_cols);
Yushin Chob188ea12017-03-13 13:45:23 -07005527#if CONFIG_PVQ
5528 av1_average_tile_pvq_cdfs(pbi->common.fc, tile_ctxs,
5529 cm->tile_rows * cm->tile_cols);
5530#endif // CONFIG_PVQ
5531#endif // CONFIG_EC_ADAPT
hui suff0da2b2017-03-07 15:51:37 -08005532#if CONFIG_ADAPT_SCAN
5533 av1_adapt_scan_order(cm);
5534#endif // CONFIG_ADAPT_SCAN
Yaowu Xuc27fc142016-08-22 16:08:15 -07005535
5536 if (!frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005537 av1_adapt_inter_frame_probs(cm);
5538 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies028b57f2017-02-22 16:42:11 +00005539#if CONFIG_EC_ADAPT
5540 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies493623e2017-03-31 16:12:25 +01005541 cdf_ptrs, cm->tile_rows * cm->tile_cols);
5542 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005543 cm->tile_rows * cm->tile_cols);
5544#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005545 }
Thomas Davies493623e2017-03-31 16:12:25 +01005546#if CONFIG_EC_ADAPT
5547 aom_free(tile_ctxs);
5548 aom_free(cdf_ptrs);
5549#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005550 } else {
5551 debug_check_frame_counts(cm);
5552 }
5553 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005554 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005555 "Decode failed. Frame data is corrupted.");
5556 }
5557
Nathan E. Egge2cf03b12017-02-22 16:19:59 -05005558#if CONFIG_INSPECTION
5559 if (pbi->inspect_cb != NULL) {
5560 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
5561 }
5562#endif
5563
Yaowu Xuc27fc142016-08-22 16:08:15 -07005564 // Non frame parallel update frame context here.
5565 if (!cm->error_resilient_mode && !context_updated)
5566 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
5567}