blob: ae25d7a021e9ab9c19807fd96725d36d21d9f0b8 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070022#include "aom_dsp/bitreader.h"
23#include "aom_dsp/bitreader_buffer.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070024#include "aom_dsp/binary_codes_reader.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
67
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070068#if CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070069#include "av1/common/warped_motion.h"
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070070#endif // CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070071
Yaowu Xuf883b422016-08-30 14:01:10 -070072#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070073#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070074
Yushin Cho77bba8d2016-11-04 16:36:56 -070075#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -070076#include "av1/common/partition.h"
hui suff0da2b2017-03-07 15:51:37 -080077#include "av1/common/pvq.h"
78#include "av1/common/scan.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070079#include "av1/decoder/decint.h"
hui suff0da2b2017-03-07 15:51:37 -080080#include "av1/decoder/pvq_decoder.h"
81#include "av1/encoder/encodemb.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070082#include "av1/encoder/hybrid_fwd_txfm.h"
83#endif
84
Luc Trudeaue3980282017-04-25 23:17:21 -040085#if CONFIG_CFL
86#include "av1/common/cfl.h"
87#endif
88
Rupert Swarbrick6c545212017-09-01 17:17:25 +010089#if CONFIG_LOOP_RESTORATION
90static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
91 MACROBLOCKD *xd,
92 aom_reader *const r, int plane,
93 int rtile_idx);
94#endif
95
Thomas Davies80188d12016-10-26 16:08:35 -070096static struct aom_read_bit_buffer *init_read_bit_buffer(
97 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
98 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
99static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
100 size_t partition_size);
101static size_t read_uncompressed_header(AV1Decoder *pbi,
102 struct aom_read_bit_buffer *rb);
103
Yaowu Xuf883b422016-08-30 14:01:10 -0700104static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -0700105#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +0200106 return !frame_is_intra_only(cm);
107#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108 int i;
109 if (frame_is_intra_only(cm)) return 0;
110 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
111 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
112
113 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700114#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700115}
116
Yaowu Xuf883b422016-08-30 14:01:10 -0700117static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118#if CONFIG_EXT_REFS
119 cm->comp_fwd_ref[0] = LAST_FRAME;
120 cm->comp_fwd_ref[1] = LAST2_FRAME;
121 cm->comp_fwd_ref[2] = LAST3_FRAME;
122 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
123
124 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700125 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
126 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700127#else // !CONFIG_EXT_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 if (cm->ref_frame_sign_bias[LAST_FRAME] ==
129 cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
130 cm->comp_fixed_ref = ALTREF_FRAME;
131 cm->comp_var_ref[0] = LAST_FRAME;
132 cm->comp_var_ref[1] = GOLDEN_FRAME;
133 } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
134 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
135 cm->comp_fixed_ref = GOLDEN_FRAME;
136 cm->comp_var_ref[0] = LAST_FRAME;
137 cm->comp_var_ref[1] = ALTREF_FRAME;
138 } else {
139 cm->comp_fixed_ref = LAST_FRAME;
140 cm->comp_var_ref[0] = GOLDEN_FRAME;
141 cm->comp_var_ref[1] = ALTREF_FRAME;
142 }
143#endif // CONFIG_EXT_REFS
144}
145
146static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
147 return len != 0 && len <= (size_t)(end - start);
148}
149
Yaowu Xuf883b422016-08-30 14:01:10 -0700150static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
151 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152 return data > max ? max : data;
153}
154
Thomas Daedef636d5c2017-06-29 13:48:27 -0700155static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800156#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800157 TX_MODE tx_mode;
158#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700159 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400160#if CONFIG_VAR_TX_NO_TX_MODE
161 (void)rb;
162 return TX_MODE_SELECT;
163#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800164#if CONFIG_TX64X64
165 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800166 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
167 return tx_mode;
168#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700169 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800170#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400171#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172}
173
Thomas Davies149eda52017-06-12 18:11:55 +0100174#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700175static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700176 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700178 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700179 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700180 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700181 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700182 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700183 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700184 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185}
Thomas Davies149eda52017-06-12 18:11:55 +0100186#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700187
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700189 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800191#if CONFIG_REF_ADAPT
192 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
193#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700194 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700195 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700196 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800197#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 } else {
199 return SINGLE_REFERENCE;
200 }
201}
202
Yaowu Xuf883b422016-08-30 14:01:10 -0700203static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Thomas Davies894cc812017-06-22 17:51:33 +0100204#if CONFIG_NEW_MULTISYMBOL && !CONFIG_EXT_COMP_REFS
205 (void)r;
206#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100208 int i;
209#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210
Thomas Davies860def62017-06-14 10:00:03 +0100211#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 if (cm->reference_mode == REFERENCE_MODE_SELECT)
213 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700214 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700215
216 if (cm->reference_mode != COMPOUND_REFERENCE) {
217 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100218 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700220 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221 }
222 }
223 }
Thomas Davies315f5782017-06-14 15:14:55 +0100224#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225
226 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700227#if CONFIG_EXT_COMP_REFS
228 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
229 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
230
Thomas Davies894cc812017-06-22 17:51:33 +0100231 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
232 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700233 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
234 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100235 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700236#endif // CONFIG_EXT_COMP_REFS
237
Thomas Davies894cc812017-06-22 17:51:33 +0100238#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100240 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241#if CONFIG_EXT_REFS
242 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700243 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700244 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700245 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246#else
247 for (j = 0; j < (COMP_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700248 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700249#endif // CONFIG_EXT_REFS
250 }
Thomas Davies894cc812017-06-22 17:51:33 +0100251#endif // CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700252 }
253}
254
Thomas Davies599395e2017-07-21 18:02:48 +0100255#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700256static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700258 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259}
260
Yaowu Xuf883b422016-08-30 14:01:10 -0700261static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100262 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263 if (allow_hp) {
264 for (i = 0; i < 2; ++i) {
265 nmv_component *const comp_ctx = &ctx->comps[i];
266 update_mv_probs(&comp_ctx->class0_hp, 1, r);
267 update_mv_probs(&comp_ctx->hp, 1, r);
268 }
269 }
270}
Thomas Davies599395e2017-07-21 18:02:48 +0100271#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272
273static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu708c1ec2017-06-14 14:54:49 -0700274#if CONFIG_LGT
275 PREDICTION_MODE mode,
276#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 const TX_TYPE tx_type,
278 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700279 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700280 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700281 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700282 av1_inverse_transform_block(xd, dqcoeff,
283#if CONFIG_LGT
284 mode,
285#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700286#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
287 xd->mrc_mask,
288#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Lester Lu708c1ec2017-06-14 14:54:49 -0700289 tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700290 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700291}
292
Lester Lu9c521922017-07-11 11:16:45 -0700293static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
294 const int bsize = xd->mi[0]->mbmi.sb_type;
295 const struct macroblockd_plane *pd = &xd->plane[plane];
296#if CONFIG_CHROMA_SUB8X8
297 const BLOCK_SIZE plane_bsize =
298 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
299#elif CONFIG_CB4X4
300 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
301#else
302 const BLOCK_SIZE plane_bsize =
303 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
304#endif
305 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700306 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700307 const uint8_t txh_unit = tx_size_high_unit[tx_size];
308 return row * max_blocks_wide + col * txh_unit;
309}
310
Yushin Cho77bba8d2016-11-04 16:36:56 -0700311#if CONFIG_PVQ
Thomas Daede6ff6af62017-02-03 16:29:24 -0800312static int av1_pvq_decode_helper(MACROBLOCKD *xd, tran_low_t *ref_coeff,
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800313 tran_low_t *dqcoeff, int16_t *quant, int pli,
Yushin Cho77bba8d2016-11-04 16:36:56 -0700314 int bs, TX_TYPE tx_type, int xdec,
ltrudeaue1c09292017-01-20 15:42:13 -0500315 PVQ_SKIP_TYPE ac_dc_coded) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700316 unsigned int flags; // used for daala's stream analyzer.
317 int off;
318 const int is_keyframe = 0;
319 const int has_dc_skip = 1;
Jingning Hanff705452017-04-27 11:32:15 -0700320 int coeff_shift = 3 - av1_get_tx_scale(bs);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800321 int hbd_downshift = 0;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800322 int rounding_mask;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700323 // DC quantizer for PVQ
324 int pvq_dc_quant;
325 int lossless = (quant[0] == 0);
326 const int blk_size = tx_size_wide[bs];
327 int eob = 0;
328 int i;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800329 od_dec_ctx *dec = &xd->daala_dec;
Yushin Cho70669122016-12-08 09:53:14 -1000330 int use_activity_masking = dec->use_activity_masking;
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800331 DECLARE_ALIGNED(16, tran_low_t, dqcoeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
332 DECLARE_ALIGNED(16, tran_low_t, ref_coeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700333
Yushin Cho48f84db2016-11-07 21:20:17 -0800334 od_coeff ref_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
335 od_coeff out_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700336
Thomas Daede6ff6af62017-02-03 16:29:24 -0800337 hbd_downshift = xd->bd - 8;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800338
Yushin Cho77bba8d2016-11-04 16:36:56 -0700339 od_raster_to_coding_order(ref_coeff_pvq, blk_size, tx_type, ref_coeff,
340 blk_size);
341
Thomas Daede6ff6af62017-02-03 16:29:24 -0800342 assert(OD_COEFF_SHIFT >= 4);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700343 if (lossless)
344 pvq_dc_quant = 1;
345 else {
Yushin Cho70669122016-12-08 09:53:14 -1000346 if (use_activity_masking)
clang-format4eafefe2017-09-04 12:51:20 -0700347 pvq_dc_quant =
348 OD_MAXI(1,
349 (quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift) *
350 dec->state.pvq_qm_q4[pli][od_qm_get_index(bs, 0)] >>
351 4);
Yushin Cho70669122016-12-08 09:53:14 -1000352 else
Thomas Daede6ff6af62017-02-03 16:29:24 -0800353 pvq_dc_quant =
354 OD_MAXI(1, quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700355 }
356
357 off = od_qm_offset(bs, xdec);
358
359 // copy int16 inputs to int32
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800360 for (i = 0; i < blk_size * blk_size; i++) {
Timothy B. Terriberry4e6a8f32017-02-24 11:00:59 -0800361 ref_int32[i] =
Thomas Daede6ff6af62017-02-03 16:29:24 -0800362 AOM_SIGNED_SHL(ref_coeff_pvq[i], OD_COEFF_SHIFT - coeff_shift) >>
363 hbd_downshift;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800364 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700365
Thomas Daede6ff6af62017-02-03 16:29:24 -0800366 od_pvq_decode(dec, ref_int32, out_int32,
367 OD_MAXI(1, quant[1] << (OD_COEFF_SHIFT - 3) >> hbd_downshift),
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800368 pli, bs, OD_PVQ_BETA[use_activity_masking][pli][bs],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700369 is_keyframe, &flags, ac_dc_coded, dec->state.qm + off,
370 dec->state.qm_inv + off);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700371
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800372 if (!has_dc_skip || out_int32[0]) {
373 out_int32[0] =
Yushin Choc49ef3a2017-03-13 17:27:25 -0700374 has_dc_skip + generic_decode(dec->r, &dec->state.adapt->model_dc[pli],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700375 &dec->state.adapt->ex_dc[pli][bs][0], 2,
376 "dc:mag");
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800377 if (out_int32[0]) out_int32[0] *= aom_read_bit(dec->r, "dc:sign") ? -1 : 1;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700378 }
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800379 out_int32[0] = out_int32[0] * pvq_dc_quant + ref_int32[0];
380
381 // copy int32 result back to int16
382 assert(OD_COEFF_SHIFT > coeff_shift);
383 rounding_mask = (1 << (OD_COEFF_SHIFT - coeff_shift - 1)) - 1;
384 for (i = 0; i < blk_size * blk_size; i++) {
Thomas Daede6ff6af62017-02-03 16:29:24 -0800385 out_int32[i] = AOM_SIGNED_SHL(out_int32[i], hbd_downshift);
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800386 dqcoeff_pvq[i] = (out_int32[i] + (out_int32[i] < 0) + rounding_mask) >>
387 (OD_COEFF_SHIFT - coeff_shift);
388 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700389
390 od_coding_order_to_raster(dqcoeff, blk_size, tx_type, dqcoeff_pvq, blk_size);
391
392 eob = blk_size * blk_size;
393
394 return eob;
395}
396
ltrudeaue1c09292017-01-20 15:42:13 -0500397static PVQ_SKIP_TYPE read_pvq_skip(AV1_COMMON *cm, MACROBLOCKD *const xd,
398 int plane, TX_SIZE tx_size) {
399 // decode ac/dc coded flag. bit0: DC coded, bit1 : AC coded
400 // NOTE : we don't use 5 symbols for luma here in aom codebase,
401 // since block partition is taken care of by aom.
402 // So, only AC/DC skip info is coded
Yushin Cho00779272017-02-21 10:38:16 -0800403 const int ac_dc_coded = aom_read_symbol(
ltrudeaue1c09292017-01-20 15:42:13 -0500404 xd->daala_dec.r,
Yushin Choc49ef3a2017-03-13 17:27:25 -0700405 xd->daala_dec.state.adapt->skip_cdf[2 * tx_size + (plane != 0)], 4,
Yushin Cho00779272017-02-21 10:38:16 -0800406 "skip");
ltrudeaue1c09292017-01-20 15:42:13 -0500407 if (ac_dc_coded < 0 || ac_dc_coded > 3) {
408 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
409 "Invalid PVQ Skip Type");
410 }
411 return ac_dc_coded;
412}
413
414static int av1_pvq_decode_helper2(AV1_COMMON *cm, MACROBLOCKD *const xd,
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800415 MB_MODE_INFO *const mbmi, int plane, int row,
416 int col, TX_SIZE tx_size, TX_TYPE tx_type) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700417 struct macroblockd_plane *const pd = &xd->plane[plane];
418 // transform block size in pixels
419 int tx_blk_size = tx_size_wide[tx_size];
420 int i, j;
421 tran_low_t *pvq_ref_coeff = pd->pvq_ref_coeff;
422 const int diff_stride = tx_blk_size;
423 int16_t *pred = pd->pred;
424 tran_low_t *const dqcoeff = pd->dqcoeff;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700425 uint8_t *dst;
426 int eob;
ltrudeaue1c09292017-01-20 15:42:13 -0500427 const PVQ_SKIP_TYPE ac_dc_coded = read_pvq_skip(cm, xd, plane, tx_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700428
429 eob = 0;
430 dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col];
431
Yushin Cho77bba8d2016-11-04 16:36:56 -0700432 if (ac_dc_coded) {
433 int xdec = pd->subsampling_x;
434 int seg_id = mbmi->segment_id;
435 int16_t *quant;
Lester Lu27319b62017-07-10 16:57:15 -0700436 TxfmParam txfm_param;
Yaowu Xufc1b2132016-11-07 15:16:15 -0800437 // ToDo(yaowu): correct this with optimal number from decoding process.
438 const int max_scan_line = tx_size_2d[tx_size];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200439#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800440 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
441 for (j = 0; j < tx_blk_size; j++)
442 for (i = 0; i < tx_blk_size; i++)
443 pred[diff_stride * j + i] =
444 CONVERT_TO_SHORTPTR(dst)[pd->dst.stride * j + i];
445 } else {
446#endif
447 for (j = 0; j < tx_blk_size; j++)
448 for (i = 0; i < tx_blk_size; i++)
449 pred[diff_stride * j + i] = dst[pd->dst.stride * j + i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200450#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800451 }
452#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -0700453
Lester Lu27319b62017-07-10 16:57:15 -0700454 txfm_param.tx_type = tx_type;
455 txfm_param.tx_size = tx_size;
456 txfm_param.lossless = xd->lossless[seg_id];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700457
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200458#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800459 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
Lester Lu27319b62017-07-10 16:57:15 -0700460 txfm_param.bd = xd->bd;
461 av1_highbd_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &txfm_param);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800462 } else {
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200463#endif // CONFIG_HIGHBITDEPTH
Lester Lu27319b62017-07-10 16:57:15 -0700464 av1_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &txfm_param);
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200465#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800466 }
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200467#endif // CONFIG_HIGHBITDEPTH
Yushin Cho77bba8d2016-11-04 16:36:56 -0700468
469 quant = &pd->seg_dequant[seg_id][0]; // aom's quantizer
470
Thomas Daede6ff6af62017-02-03 16:29:24 -0800471 eob = av1_pvq_decode_helper(xd, pvq_ref_coeff, dqcoeff, quant, plane,
472 tx_size, tx_type, xdec, ac_dc_coded);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700473
Lester Lu5ee28082017-07-14 15:11:36 -0700474 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800475 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700476 }
477
478 return eob;
479}
480#endif
481
Alex Converse8aca36d2017-01-31 12:33:15 -0800482static void predict_and_reconstruct_intra_block(
483 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
484 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500485 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700486 const int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700487#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -0700488 (void)r;
489#endif
Angie Chiang3d005e42017-04-02 16:31:35 -0700490 av1_predict_intra_block_facade(xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491
492 if (!mbmi->skip) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700493#if !CONFIG_PVQ
Angie Chiang3d005e42017-04-02 16:31:35 -0700494 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700495#if CONFIG_LV_MAP
496 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700497 int eob;
498 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700499 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700500 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700501 const TX_TYPE tx_type =
502 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700503#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700504 const TX_TYPE tx_type =
505 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700506 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700507 int16_t max_scan_line = 0;
508 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700509 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700510 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700511#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700512 if (eob) {
513 uint8_t *dst =
514 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700515 inverse_transform_block(xd, plane,
Lester Lu708c1ec2017-06-14 14:54:49 -0700516#if CONFIG_LGT
Lester Lu918fe692017-08-17 14:39:29 -0700517 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700518#endif
Hui Su400bf652017-08-15 15:42:19 -0700519 tx_type, tx_size, dst, pd->dst.stride,
520 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700521 }
Lester Lu708c1ec2017-06-14 14:54:49 -0700522#else // !CONFIG_PVQ
Jingning Han19b5c8f2017-07-06 15:10:12 -0700523 const TX_TYPE tx_type =
524 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500525 av1_pvq_decode_helper2(cm, xd, mbmi, plane, row, col, tx_size, tx_type);
Lester Lu708c1ec2017-06-14 14:54:49 -0700526#endif // !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400528#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400529 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaue3980282017-04-25 23:17:21 -0400530 struct macroblockd_plane *const pd = &xd->plane[plane];
Luc Trudeau96b31512017-07-25 10:02:21 -0400531#if CONFIG_CHROMA_SUB8X8
Luc Trudeau780d2492017-06-15 22:26:41 -0400532 const BLOCK_SIZE plane_bsize =
533 AOMMAX(BLOCK_4X4, get_plane_block_size(mbmi->sb_type, pd));
534#else
535 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi->sb_type, pd);
Luc Trudeauc84c21c2017-07-25 19:40:34 -0400536#endif // CONFIG_CHROMA_SUB8X8
Luc Trudeaue3980282017-04-25 23:17:21 -0400537 uint8_t *dst =
538 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Luc Trudeau780d2492017-06-15 22:26:41 -0400539 // TODO (ltrudeau) Store sub-8x8 inter blocks when bottom right block is
540 // intra predicted.
541 cfl_store(xd->cfl, dst, pd->dst.stride, row, col, tx_size, plane_bsize);
Luc Trudeaue3980282017-04-25 23:17:21 -0400542 }
Luc Trudeauc84c21c2017-07-25 19:40:34 -0400543#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700544}
545
Jingning Handddb21f2017-02-28 14:44:05 -0800546#if CONFIG_VAR_TX && !CONFIG_COEF_INTERLEAVE
Angie Chiangff6d8902016-10-21 11:02:09 -0700547static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
548 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700549 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700550 int blk_row, int blk_col, int block,
551 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700552 const struct macroblockd_plane *const pd = &xd->plane[plane];
553 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
554 const int tx_row = blk_row >> (1 - pd->subsampling_y);
555 const int tx_col = blk_col >> (1 - pd->subsampling_x);
556 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700557 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700558 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700559 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700560 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
561 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700562
563 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
564
565 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500566 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700567#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700568 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700569 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700570 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700571 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700572 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700573 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700574 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700575#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700576 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700577 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700578 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700579 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700580 const int eob = av1_decode_block_tokens(
581 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
582 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700583#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700584 inverse_transform_block(xd, plane,
585#if CONFIG_LGT
586 mbmi->mode,
587#endif
588 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800589 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
590 << tx_size_wide_log2[0]],
591 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592 *eob_total += eob;
593 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700594#if CONFIG_RECT_TX_EXT
595 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
596 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
597 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
598#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700599 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chend6bdd462017-07-19 16:05:43 -0700600 assert(sub_txs < tx_size);
601#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700602 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700603 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700604 int i;
605
606 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700607
608 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700609#if CONFIG_RECT_TX_EXT
610 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
611 const int offsetr =
612 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
613 : blk_row + ((i >> 1) * bsl);
614 const int offsetc =
615 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
616 : blk_col + (i & 0x01) * bsl;
617#else
Jingning Han5f614262016-10-27 14:27:43 -0700618 const int offsetr = blk_row + (i >> 1) * bsl;
619 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700620#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700621
622 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
623
Jingning Han8fd62b72016-10-21 12:55:54 -0700624 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700625 offsetc, block, sub_txs, eob_total);
626 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700627 }
628 }
629}
630#endif // CONFIG_VAR_TX
631
Jingning Handddb21f2017-02-28 14:44:05 -0800632#if !CONFIG_VAR_TX || CONFIG_SUPERTX || CONFIG_COEF_INTERLEAVE || \
Jingning Hanfe45b212016-11-22 10:30:23 -0800633 (!CONFIG_VAR_TX && CONFIG_EXT_TX && CONFIG_RECT_TX)
Angie Chiangff6d8902016-10-21 11:02:09 -0700634static int reconstruct_inter_block(AV1_COMMON *cm, MACROBLOCKD *const xd,
Alex Converse8aca36d2017-01-31 12:33:15 -0800635 aom_reader *const r, int segment_id,
636 int plane, int row, int col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700637 TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500638 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700639 int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700640#if CONFIG_PVQ
641 int eob;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700642 (void)r;
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800643 (void)segment_id;
644#else
645 struct macroblockd_plane *const pd = &xd->plane[plane];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700646#endif
647
648#if !CONFIG_PVQ
Angie Chiang133733c2017-03-17 12:50:20 -0700649#if CONFIG_LV_MAP
650 (void)segment_id;
Jingning Han1be18782016-10-21 11:48:15 -0700651 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700652 int eob;
653 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane, pd->dqcoeff,
Angie Chiang0eac3192017-06-19 09:57:30 -0700654 tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700655 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700656 const TX_TYPE tx_type =
657 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700658#else // CONFIG_LV_MAP
659 int16_t max_scan_line = 0;
Jingning Han19b5c8f2017-07-06 15:10:12 -0700660 const TX_TYPE tx_type =
661 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700662 const SCAN_ORDER *scan_order =
663 get_scan(cm, tx_size, tx_type, &xd->mi[0]->mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700664 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700665 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
666 tx_type, &max_scan_line, r, segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700667#endif // CONFIG_LV_MAP
Jingning Hanca14dda2016-12-09 09:36:00 -0800668 uint8_t *dst =
669 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Jingning Han1be18782016-10-21 11:48:15 -0700670 if (eob)
Lester Lu708c1ec2017-06-14 14:54:49 -0700671 inverse_transform_block(xd, plane,
672#if CONFIG_LGT
673 xd->mi[0]->mbmi.mode,
674#endif
675 tx_type, tx_size, dst, pd->dst.stride,
Jingning Hanca14dda2016-12-09 09:36:00 -0800676 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700677#else
Jingning Han19b5c8f2017-07-06 15:10:12 -0700678 const TX_TYPE tx_type =
679 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500680 eob = av1_pvq_decode_helper2(cm, xd, &xd->mi[0]->mbmi, plane, row, col,
681 tx_size, tx_type);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700682#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700683 return eob;
684}
685#endif // !CONFIG_VAR_TX || CONFIG_SUPER_TX
686
Angie Chiang44701f22017-02-27 10:36:44 -0800687static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
688 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
689 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700690 const int offset = mi_row * cm->mi_stride + mi_col;
691 int x, y;
692 const TileInfo *const tile = &xd->tile;
693
694 xd->mi = cm->mi_grid_visible + offset;
695 xd->mi[0] = &cm->mi[offset];
696 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
697 // passing bsize from decode_partition().
698 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700699#if CONFIG_RD_DEBUG
700 xd->mi[0]->mbmi.mi_row = mi_row;
701 xd->mi[0]->mbmi.mi_col = mi_col;
702#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400703#if CONFIG_CFL
704 xd->cfl->mi_row = mi_row;
705 xd->cfl->mi_col = mi_col;
706#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700707 for (y = 0; y < y_mis; ++y)
Jingning Han97d85482016-07-15 11:06:05 -0700708 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700709
Jingning Hanfaad0e12016-12-07 10:54:57 -0800710 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700711 set_skip_context(xd, mi_row, mi_col);
712
713#if CONFIG_VAR_TX
714 xd->max_tx_size = max_txsize_lookup[bsize];
715#endif
716
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700717 // Distance of Mb to the various image edges. These are specified to 8th pel
718 // as they are always compared to values that are in 1/8th pel units
719 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800720#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700721 cm->dependent_horz_tiles,
722#endif // CONFIG_DEPENDENT_HORZTILES
723 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700724
Jingning Han91d9a792017-04-18 12:01:52 -0700725 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
726 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700727}
728
729#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700730static MB_MODE_INFO *set_offsets_extend(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700731 MACROBLOCKD *const xd,
732 const TileInfo *const tile,
733 BLOCK_SIZE bsize_pred, int mi_row_pred,
734 int mi_col_pred, int mi_row_ori,
735 int mi_col_ori) {
736 // Used in supertx
737 // (mi_row_ori, mi_col_ori): location for mv
738 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
Jingning Han93531242016-12-20 11:54:36 -0800739 const int bw = mi_size_wide[bsize_pred];
740 const int bh = mi_size_high[bsize_pred];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700741 const int offset = mi_row_ori * cm->mi_stride + mi_col_ori;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700742 xd->mi = cm->mi_grid_visible + offset;
743 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700744 set_mi_row_col(xd, tile, mi_row_pred, bh, mi_col_pred, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800745#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700746 cm->dependent_horz_tiles,
747#endif // CONFIG_DEPENDENT_HORZTILES
748 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749
750 xd->up_available = (mi_row_ori > tile->mi_row_start);
751 xd->left_available = (mi_col_ori > tile->mi_col_start);
752
Jingning Hanfaad0e12016-12-07 10:54:57 -0800753 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700754
755 return &xd->mi[0]->mbmi;
756}
757
Angie Chiang7fcfee42017-02-24 15:51:03 -0800758#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700759static MB_MODE_INFO *set_mb_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
760 BLOCK_SIZE bsize, int mi_row, int mi_col,
761 int bw, int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 const int offset = mi_row * cm->mi_stride + mi_col;
763 const TileInfo *const tile = &xd->tile;
764 int x, y;
765
766 xd->mi = cm->mi_grid_visible + offset;
767 xd->mi[0] = cm->mi + offset;
768 xd->mi[0]->mbmi.sb_type = bsize;
769 for (y = 0; y < y_mis; ++y)
770 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
771
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700772 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800773#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700774 cm->dependent_horz_tiles,
775#endif // CONFIG_DEPENDENT_HORZTILES
776 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700777 return &xd->mi[0]->mbmi;
778}
Angie Chiang7fcfee42017-02-24 15:51:03 -0800779#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700780
Yaowu Xuf883b422016-08-30 14:01:10 -0700781static void set_offsets_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700782 const TileInfo *const tile, BLOCK_SIZE bsize,
783 int mi_row, int mi_col) {
Jingning Han93531242016-12-20 11:54:36 -0800784 const int bw = mi_size_wide[bsize];
785 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700786 const int offset = mi_row * cm->mi_stride + mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700787
788 xd->mi = cm->mi_grid_visible + offset;
789 xd->mi[0] = cm->mi + offset;
790
Jingning Hanfaad0e12016-12-07 10:54:57 -0800791 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700792
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700793 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800794#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700795 cm->dependent_horz_tiles,
796#endif // CONFIG_DEPENDENT_HORZTILES
797 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798
Jingning Han91d9a792017-04-18 12:01:52 -0700799 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
800 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801}
802
Yaowu Xuf883b422016-08-30 14:01:10 -0700803static void set_param_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 BLOCK_SIZE bsize, int mi_row, int mi_col,
805 int txfm, int skip) {
Jingning Han93531242016-12-20 11:54:36 -0800806 const int bw = mi_size_wide[bsize];
807 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700808 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
809 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810 const int offset = mi_row * cm->mi_stride + mi_col;
811 int x, y;
812
813 xd->mi = cm->mi_grid_visible + offset;
814 xd->mi[0] = cm->mi + offset;
815
816 for (y = 0; y < y_mis; ++y)
817 for (x = 0; x < x_mis; ++x) {
818 xd->mi[y * cm->mi_stride + x]->mbmi.skip = skip;
819 xd->mi[y * cm->mi_stride + x]->mbmi.tx_type = txfm;
820 }
821#if CONFIG_VAR_TX
822 xd->above_txfm_context = cm->above_txfm_context + mi_col;
823 xd->left_txfm_context =
824 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
Yaowu Xu52a17632016-11-17 15:48:21 -0800825 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bw, bh, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700826#endif
827}
828
Yaowu Xuf883b422016-08-30 14:01:10 -0700829static void set_ref(AV1_COMMON *const cm, MACROBLOCKD *const xd, int idx,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830 int mi_row, int mi_col) {
831 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Zoe Liu85b66462017-04-20 14:28:19 -0700832#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
833 RefBuffer *ref_buffer =
834 has_second_ref(mbmi) ? &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME]
835 : &cm->frame_refs[mbmi->ref_frame[0] - LAST_FRAME];
836#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700837 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
Zoe Liu85b66462017-04-20 14:28:19 -0700838#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -0700839 xd->block_refs[idx] = ref_buffer;
Yaowu Xuf883b422016-08-30 14:01:10 -0700840 if (!av1_is_valid_scale(&ref_buffer->sf))
841 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700842 "Invalid scale factors");
Yaowu Xuf883b422016-08-30 14:01:10 -0700843 av1_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col,
844 &ref_buffer->sf);
Angie Chiangd0916d92017-03-10 17:54:18 -0800845 aom_merge_corrupted_flag(&xd->corrupted, ref_buffer->buf->corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700846}
847
848static void dec_predict_b_extend(
Yaowu Xuf883b422016-08-30 14:01:10 -0700849 AV1Decoder *const pbi, MACROBLOCKD *const xd, const TileInfo *const tile,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700850 int block, int mi_row_ori, int mi_col_ori, int mi_row_pred, int mi_col_pred,
Yue Chen8e689e42017-06-02 10:56:10 -0700851 int mi_row_top, int mi_col_top, int plane, uint8_t *dst_buf, int dst_stride,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700852 BLOCK_SIZE bsize_top, BLOCK_SIZE bsize_pred, int b_sub8x8, int bextend) {
853 // Used in supertx
854 // (mi_row_ori, mi_col_ori): location for mv
855 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
856 // (mi_row_top, mi_col_top, bsize_top): region of the top partition size
857 // block: sub location of sub8x8 blocks
858 // b_sub8x8: 1: ori is sub8x8; 0: ori is not sub8x8
859 // bextend: 1: region to predict is an extension of ori; 0: not
860 int r = (mi_row_pred - mi_row_top) * MI_SIZE;
861 int c = (mi_col_pred - mi_col_top) * MI_SIZE;
Jingning Han93531242016-12-20 11:54:36 -0800862 const int mi_width_top = mi_size_wide[bsize_top];
863 const int mi_height_top = mi_size_high[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700864 MB_MODE_INFO *mbmi;
Yaowu Xuf883b422016-08-30 14:01:10 -0700865 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866
867 if (mi_row_pred < mi_row_top || mi_col_pred < mi_col_top ||
868 mi_row_pred >= mi_row_top + mi_height_top ||
869 mi_col_pred >= mi_col_top + mi_width_top || mi_row_pred >= cm->mi_rows ||
870 mi_col_pred >= cm->mi_cols)
871 return;
872
873 mbmi = set_offsets_extend(cm, xd, tile, bsize_pred, mi_row_pred, mi_col_pred,
874 mi_row_ori, mi_col_ori);
875 set_ref(cm, xd, 0, mi_row_pred, mi_col_pred);
Zoe Liu85b66462017-04-20 14:28:19 -0700876 if (has_second_ref(&xd->mi[0]->mbmi)
877#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
878 || is_inter_singleref_comp_mode(xd->mi[0]->mbmi.mode)
879#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
880 )
Yaowu Xuc27fc142016-08-22 16:08:15 -0700881 set_ref(cm, xd, 1, mi_row_pred, mi_col_pred);
Jingning Han2511c662016-12-22 11:57:34 -0800882 if (!bextend) mbmi->tx_size = max_txsize_lookup[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700883
Yue Chen8e689e42017-06-02 10:56:10 -0700884 xd->plane[plane].dst.stride = dst_stride;
885 xd->plane[plane].dst.buf =
886 dst_buf + (r >> xd->plane[plane].subsampling_y) * dst_stride +
887 (c >> xd->plane[plane].subsampling_x);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700888
889 if (!b_sub8x8)
Yue Chen8e689e42017-06-02 10:56:10 -0700890 av1_build_inter_predictor_sb_extend(&pbi->common, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700891#if CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -0700892 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893#endif // CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -0700894 mi_row_pred, mi_col_pred, plane,
895 bsize_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896 else
Yue Chen8e689e42017-06-02 10:56:10 -0700897 av1_build_inter_predictor_sb_sub8x8_extend(&pbi->common, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700898#if CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -0700899 mi_row_ori, mi_col_ori,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700900#endif // CONFIG_EXT_INTER
Yue Chen8e689e42017-06-02 10:56:10 -0700901 mi_row_pred, mi_col_pred, plane,
902 bsize_pred, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700903}
904
Yaowu Xuf883b422016-08-30 14:01:10 -0700905static void dec_extend_dir(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700906 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -0700907 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
908 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700909 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700910 int plane, uint8_t *dst_buf, int dst_stride,
911 int dir) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700912 // dir: 0-lower, 1-upper, 2-left, 3-right
913 // 4-lowerleft, 5-upperleft, 6-lowerright, 7-upperright
Jingning Han93531242016-12-20 11:54:36 -0800914 const int mi_width = mi_size_wide[bsize];
915 const int mi_height = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916 int xss = xd->plane[1].subsampling_x;
917 int yss = xd->plane[1].subsampling_y;
Jingning Hanfeb517c2016-12-21 16:02:07 -0800918#if CONFIG_CB4X4
919 const int unify_bsize = 1;
920#else
921 const int unify_bsize = 0;
922#endif
923 int b_sub8x8 = (bsize < BLOCK_8X8) && !unify_bsize ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924 BLOCK_SIZE extend_bsize;
Jingning Han24f24a52016-12-27 10:13:28 -0800925 int mi_row_pred, mi_col_pred;
926
927 int wide_unit, high_unit;
928 int i, j;
929 int ext_offset = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700930
931 if (dir == 0 || dir == 1) {
Jingning Han93531242016-12-20 11:54:36 -0800932 extend_bsize =
933 (mi_width == mi_size_wide[BLOCK_8X8] || bsize < BLOCK_8X8 || xss < yss)
934 ? BLOCK_8X8
935 : BLOCK_16X8;
Jingning Han24f24a52016-12-27 10:13:28 -0800936#if CONFIG_CB4X4
937 if (bsize < BLOCK_8X8) {
938 extend_bsize = BLOCK_4X4;
939 ext_offset = mi_size_wide[BLOCK_8X8];
940 }
941#endif
942
943 wide_unit = mi_size_wide[extend_bsize];
944 high_unit = mi_size_high[extend_bsize];
945
946 mi_row_pred = mi_row + ((dir == 0) ? mi_height : -(mi_height + ext_offset));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700947 mi_col_pred = mi_col;
948
Jingning Han24f24a52016-12-27 10:13:28 -0800949 for (j = 0; j < mi_height + ext_offset; j += high_unit)
950 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700951 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800952 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700953 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -0800954 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700955 } else if (dir == 2 || dir == 3) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800956 extend_bsize =
957 (mi_height == mi_size_high[BLOCK_8X8] || bsize < BLOCK_8X8 || yss < xss)
958 ? BLOCK_8X8
959 : BLOCK_8X16;
Jingning Han24f24a52016-12-27 10:13:28 -0800960#if CONFIG_CB4X4
961 if (bsize < BLOCK_8X8) {
962 extend_bsize = BLOCK_4X4;
963 ext_offset = mi_size_wide[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700964 }
Jingning Han24f24a52016-12-27 10:13:28 -0800965#endif
966
967 wide_unit = mi_size_wide[extend_bsize];
968 high_unit = mi_size_high[extend_bsize];
969
970 mi_row_pred = mi_row;
971 mi_col_pred = mi_col + ((dir == 3) ? mi_width : -(mi_width + ext_offset));
972
973 for (j = 0; j < mi_height + ext_offset; j += high_unit)
974 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700975 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800976 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700977 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -0800978 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700979 } else {
980 extend_bsize = BLOCK_8X8;
Jingning Han24f24a52016-12-27 10:13:28 -0800981#if CONFIG_CB4X4
982 if (bsize < BLOCK_8X8) {
983 extend_bsize = BLOCK_4X4;
984 ext_offset = mi_size_wide[BLOCK_8X8];
985 }
986#endif
987 wide_unit = mi_size_wide[extend_bsize];
988 high_unit = mi_size_high[extend_bsize];
989
Jingning Han5b7706a2016-12-21 09:55:10 -0800990 mi_row_pred = mi_row + ((dir == 4 || dir == 6) ? mi_height
Jingning Han24f24a52016-12-27 10:13:28 -0800991 : -(mi_height + ext_offset));
Jingning Han5b7706a2016-12-21 09:55:10 -0800992 mi_col_pred =
Jingning Han24f24a52016-12-27 10:13:28 -0800993 mi_col + ((dir == 6 || dir == 7) ? mi_width : -(mi_width + ext_offset));
994
995 for (j = 0; j < mi_height + ext_offset; j += high_unit)
996 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700997 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800998 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700999 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -08001000 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001001 }
1002}
1003
Yaowu Xuf883b422016-08-30 14:01:10 -07001004static void dec_extend_all(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001005 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -07001006 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1007 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001008 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -07001009 int plane, uint8_t *dst_buf, int dst_stride) {
Sarah Parkerfb9e6652017-04-25 16:32:06 -07001010 for (int i = 0; i < 8; ++i) {
Yue Chen8e689e42017-06-02 10:56:10 -07001011 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row_ori,
1012 mi_col_ori, mi_row, mi_col, mi_row_top, mi_col_top, plane,
1013 dst_buf, dst_stride, i);
Sarah Parkerfb9e6652017-04-25 16:32:06 -07001014 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001015}
1016
Yaowu Xuf883b422016-08-30 14:01:10 -07001017static void dec_predict_sb_complex(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001018 const TileInfo *const tile, int mi_row,
1019 int mi_col, int mi_row_top, int mi_col_top,
1020 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1021 uint8_t *dst_buf[3], int dst_stride[3]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001022 const AV1_COMMON *const cm = &pbi->common;
Jingning Han5b7706a2016-12-21 09:55:10 -08001023 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001024 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1025 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
1026#if CONFIG_EXT_PARTITION_TYPES
1027 const BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
1028#endif
1029 int i;
1030 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1031 uint8_t *dst_buf1[3], *dst_buf2[3], *dst_buf3[3];
Jingning Hanfeb517c2016-12-21 16:02:07 -08001032#if CONFIG_CB4X4
1033 const int unify_bsize = 1;
1034#else
1035 const int unify_bsize = 0;
1036#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001037
1038 DECLARE_ALIGNED(16, uint8_t, tmp_buf1[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1039 DECLARE_ALIGNED(16, uint8_t, tmp_buf2[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1040 DECLARE_ALIGNED(16, uint8_t, tmp_buf3[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1041 int dst_stride1[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1042 int dst_stride2[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1043 int dst_stride3[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1044
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001045#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001046 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
1047 int len = sizeof(uint16_t);
1048 dst_buf1[0] = CONVERT_TO_BYTEPTR(tmp_buf1);
1049 dst_buf1[1] = CONVERT_TO_BYTEPTR(tmp_buf1 + MAX_TX_SQUARE * len);
1050 dst_buf1[2] = CONVERT_TO_BYTEPTR(tmp_buf1 + 2 * MAX_TX_SQUARE * len);
1051 dst_buf2[0] = CONVERT_TO_BYTEPTR(tmp_buf2);
1052 dst_buf2[1] = CONVERT_TO_BYTEPTR(tmp_buf2 + MAX_TX_SQUARE * len);
1053 dst_buf2[2] = CONVERT_TO_BYTEPTR(tmp_buf2 + 2 * MAX_TX_SQUARE * len);
1054 dst_buf3[0] = CONVERT_TO_BYTEPTR(tmp_buf3);
1055 dst_buf3[1] = CONVERT_TO_BYTEPTR(tmp_buf3 + MAX_TX_SQUARE * len);
1056 dst_buf3[2] = CONVERT_TO_BYTEPTR(tmp_buf3 + 2 * MAX_TX_SQUARE * len);
1057 } else {
1058#endif
1059 dst_buf1[0] = tmp_buf1;
1060 dst_buf1[1] = tmp_buf1 + MAX_TX_SQUARE;
1061 dst_buf1[2] = tmp_buf1 + 2 * MAX_TX_SQUARE;
1062 dst_buf2[0] = tmp_buf2;
1063 dst_buf2[1] = tmp_buf2 + MAX_TX_SQUARE;
1064 dst_buf2[2] = tmp_buf2 + 2 * MAX_TX_SQUARE;
1065 dst_buf3[0] = tmp_buf3;
1066 dst_buf3[1] = tmp_buf3 + MAX_TX_SQUARE;
1067 dst_buf3[2] = tmp_buf3 + 2 * MAX_TX_SQUARE;
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001068#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001069 }
1070#endif
1071
1072 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1073
1074 xd->mi = cm->mi_grid_visible + mi_offset;
1075 xd->mi[0] = cm->mi + mi_offset;
1076
1077 for (i = 0; i < MAX_MB_PLANE; i++) {
1078 xd->plane[i].dst.buf = dst_buf[i];
1079 xd->plane[i].dst.stride = dst_stride[i];
1080 }
1081
1082 switch (partition) {
1083 case PARTITION_NONE:
1084 assert(bsize < top_bsize);
Yue Chen8e689e42017-06-02 10:56:10 -07001085 for (i = 0; i < MAX_MB_PLANE; i++) {
1086 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1087 mi_row_top, mi_col_top, i, dst_buf[i],
1088 dst_stride[i], top_bsize, bsize, 0, 0);
1089 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row, mi_col,
1090 mi_row, mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1091 dst_stride[i]);
1092 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001093 break;
1094 case PARTITION_HORZ:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001095 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001096 for (i = 0; i < MAX_MB_PLANE; i++) {
1097 // For sub8x8, predict in 8x8 unit
1098 // First half
1099 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1100 mi_row_top, mi_col_top, i, dst_buf[i],
1101 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1102 if (bsize < top_bsize)
1103 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1104 mi_row, mi_col, mi_row_top, mi_col_top, i,
1105 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106
Yue Chen8e689e42017-06-02 10:56:10 -07001107 // Second half
1108 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1109 mi_row_top, mi_col_top, i, dst_buf1[i],
1110 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1111 if (bsize < top_bsize)
1112 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1113 mi_row, mi_col, mi_row_top, mi_col_top, i,
1114 dst_buf1[i], dst_stride1[i]);
1115 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001116
1117 // weighted average to smooth the boundary
1118 xd->plane[0].dst.buf = dst_buf[0];
1119 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001120 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001121 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1122 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1123 0);
1124 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001125 for (i = 0; i < MAX_MB_PLANE; i++) {
1126#if CONFIG_CB4X4
1127 const struct macroblockd_plane *pd = &xd->plane[i];
1128 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1129 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001130
Yue Chen8e689e42017-06-02 10:56:10 -07001131 if (handle_chroma_sub8x8) {
1132 int mode_offset_row = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001133
Yue Chen8e689e42017-06-02 10:56:10 -07001134 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1135 mi_col, mi_row, mi_col, mi_row_top, mi_col_top,
1136 i, dst_buf[i], dst_stride[i], top_bsize, bsize,
1137 0, 0);
1138 if (bsize < top_bsize)
1139 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize,
1140 mi_row + mode_offset_row, mi_col, mi_row, mi_col,
1141 mi_row_top, mi_col_top, i, dst_buf[i],
1142 dst_stride[i]);
1143 } else {
1144#endif
1145 // First half
1146 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1147 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1148 dst_stride[i], top_bsize, subsize, 0, 0);
1149 if (bsize < top_bsize)
1150 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1151 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1152 dst_buf[i], dst_stride[i]);
1153 else
1154 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1155 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1156 dst_buf[i], dst_stride[i], 0);
1157
1158 if (mi_row + hbs < cm->mi_rows) {
1159 // Second half
1160 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1161 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1162 i, dst_buf1[i], dst_stride1[i], top_bsize,
1163 subsize, 0, 0);
1164 if (bsize < top_bsize)
1165 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1166 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1167 mi_row_top, mi_col_top, i, dst_buf1[i],
1168 dst_stride1[i]);
1169 else
1170 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize,
1171 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1172 mi_row_top, mi_col_top, i, dst_buf1[i],
1173 dst_stride1[i], 1);
1174
1175 // weighted average to smooth the boundary
1176 xd->plane[i].dst.buf = dst_buf[i];
1177 xd->plane[i].dst.stride = dst_stride[i];
1178 av1_build_masked_inter_predictor_complex(
1179 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1180 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1181 PARTITION_HORZ, i);
1182 }
1183#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001184 }
Yue Chen8e689e42017-06-02 10:56:10 -07001185#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001186 }
1187 }
1188 break;
1189 case PARTITION_VERT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001190 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001191 for (i = 0; i < MAX_MB_PLANE; i++) {
1192 // First half
1193 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1194 mi_row_top, mi_col_top, i, dst_buf[i],
1195 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1196 if (bsize < top_bsize)
1197 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1198 mi_row, mi_col, mi_row_top, mi_col_top, i,
1199 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001200
Yue Chen8e689e42017-06-02 10:56:10 -07001201 // Second half
1202 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1203 mi_row_top, mi_col_top, i, dst_buf1[i],
1204 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1205 if (bsize < top_bsize)
1206 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1207 mi_row, mi_col, mi_row_top, mi_col_top, i,
1208 dst_buf1[i], dst_stride1[i]);
1209 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001210
1211 // Smooth
1212 xd->plane[0].dst.buf = dst_buf[0];
1213 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001214 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001215 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1216 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1217 0);
1218 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001219 for (i = 0; i < MAX_MB_PLANE; i++) {
1220#if CONFIG_CB4X4
1221 const struct macroblockd_plane *pd = &xd->plane[i];
1222 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1223 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001224
Yue Chen8e689e42017-06-02 10:56:10 -07001225 if (handle_chroma_sub8x8) {
1226 int mode_offset_col = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
1227 assert(i > 0 && bsize == BLOCK_8X8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001228
Yue Chen8e689e42017-06-02 10:56:10 -07001229 dec_predict_b_extend(pbi, xd, tile, 0, mi_row,
1230 mi_col + mode_offset_col, mi_row, mi_col,
1231 mi_row_top, mi_col_top, i, dst_buf[i],
1232 dst_stride[i], top_bsize, bsize, 0, 0);
1233 if (bsize < top_bsize)
1234 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row,
1235 mi_col + mode_offset_col, mi_row, mi_col,
1236 mi_row_top, mi_col_top, i, dst_buf[i],
1237 dst_stride[i]);
1238 } else {
1239#endif
1240 // First half
1241 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1242 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1243 dst_stride[i], top_bsize, subsize, 0, 0);
1244 if (bsize < top_bsize)
1245 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1246 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1247 dst_buf[i], dst_stride[i]);
1248 else
1249 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1250 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1251 dst_buf[i], dst_stride[i], 3);
1252
1253 // Second half
1254 if (mi_col + hbs < cm->mi_cols) {
1255 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1256 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1257 i, dst_buf1[i], dst_stride1[i], top_bsize,
1258 subsize, 0, 0);
1259 if (bsize < top_bsize)
1260 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1261 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1262 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1263 else
1264 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1265 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1266 mi_col_top, i, dst_buf1[i], dst_stride1[i], 2);
1267
1268 // Smooth
1269 xd->plane[i].dst.buf = dst_buf[i];
1270 xd->plane[i].dst.stride = dst_stride[i];
1271 av1_build_masked_inter_predictor_complex(
1272 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1273 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1274 PARTITION_VERT, i);
1275 }
1276#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001277 }
Yue Chen8e689e42017-06-02 10:56:10 -07001278#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001279 }
1280 }
1281 break;
1282 case PARTITION_SPLIT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001283 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001284 for (i = 0; i < MAX_MB_PLANE; i++) {
1285 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1286 mi_row_top, mi_col_top, i, dst_buf[i],
1287 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1288 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1289 mi_row_top, mi_col_top, i, dst_buf1[i],
1290 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1291 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1292 mi_row_top, mi_col_top, i, dst_buf2[i],
1293 dst_stride2[i], top_bsize, BLOCK_8X8, 1, 1);
1294 dec_predict_b_extend(pbi, xd, tile, 3, mi_row, mi_col, mi_row, mi_col,
1295 mi_row_top, mi_col_top, i, dst_buf3[i],
1296 dst_stride3[i], top_bsize, BLOCK_8X8, 1, 1);
1297 if (bsize < top_bsize) {
1298 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1299 mi_row, mi_col, mi_row_top, mi_col_top, i,
1300 dst_buf[i], dst_stride[i]);
1301 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1302 mi_row, mi_col, mi_row_top, mi_col_top, i,
1303 dst_buf1[i], dst_stride1[i]);
1304 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1305 mi_row, mi_col, mi_row_top, mi_col_top, i,
1306 dst_buf2[i], dst_stride2[i]);
1307 dec_extend_all(pbi, xd, tile, 3, subsize, top_bsize, mi_row, mi_col,
1308 mi_row, mi_col, mi_row_top, mi_col_top, i,
1309 dst_buf3[i], dst_stride3[i]);
1310 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001311 }
Yue Chen8e689e42017-06-02 10:56:10 -07001312#if CONFIG_CB4X4
1313 } else if (bsize == BLOCK_8X8) {
1314 for (i = 0; i < MAX_MB_PLANE; i++) {
1315 const struct macroblockd_plane *pd = &xd->plane[i];
1316 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1317 subsize, pd->subsampling_x, pd->subsampling_y);
1318
1319 if (handle_chroma_sub8x8) {
1320 int mode_offset_row =
1321 CONFIG_CHROMA_SUB8X8 && mi_row + hbs < cm->mi_rows ? hbs : 0;
1322 int mode_offset_col =
1323 CONFIG_CHROMA_SUB8X8 && mi_col + hbs < cm->mi_cols ? hbs : 0;
1324
1325 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1326 mi_col + mode_offset_col, mi_row, mi_col,
1327 mi_row_top, mi_col_top, i, dst_buf[i],
1328 dst_stride[i], top_bsize, BLOCK_8X8, 0, 0);
1329 if (bsize < top_bsize)
1330 dec_extend_all(pbi, xd, tile, 0, BLOCK_8X8, top_bsize,
1331 mi_row + mode_offset_row, mi_col + mode_offset_col,
1332 mi_row, mi_col, mi_row_top, mi_col_top, i,
1333 dst_buf[i], dst_stride[i]);
1334 } else {
1335 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1336 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1337 dst_stride[i], top_bsize, subsize, 0, 0);
1338 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1339 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1340 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1341 i, dst_buf1[i], dst_stride1[i], top_bsize,
1342 subsize, 0, 0);
1343 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1344 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1345 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1346 i, dst_buf2[i], dst_stride2[i], top_bsize,
1347 subsize, 0, 0);
1348 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1349 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1350 mi_row + hbs, mi_col + hbs, mi_row_top,
1351 mi_col_top, i, dst_buf3[i], dst_stride3[i],
1352 top_bsize, subsize, 0, 0);
1353
1354 if (bsize < top_bsize) {
1355 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1356 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1357 dst_buf[i], dst_stride[i]);
1358 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1359 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1360 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1361 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1362 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1363 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1364 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1365 mi_row_top, mi_col_top, i, dst_buf2[i],
1366 dst_stride2[i]);
1367 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1368 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1369 mi_row + hbs, mi_col + hbs, mi_row + hbs,
1370 mi_col + hbs, mi_row_top, mi_col_top, i,
1371 dst_buf3[i], dst_stride3[i]);
1372 }
1373 }
1374 }
1375#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001376 } else {
1377 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row_top,
1378 mi_col_top, subsize, top_bsize, dst_buf,
1379 dst_stride);
1380 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1381 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col + hbs,
1382 mi_row_top, mi_col_top, subsize, top_bsize,
1383 dst_buf1, dst_stride1);
1384 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1385 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col,
1386 mi_row_top, mi_col_top, subsize, top_bsize,
1387 dst_buf2, dst_stride2);
1388 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1389 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col + hbs,
1390 mi_row_top, mi_col_top, subsize, top_bsize,
1391 dst_buf3, dst_stride3);
1392 }
1393 for (i = 0; i < MAX_MB_PLANE; i++) {
Yue Chen8e689e42017-06-02 10:56:10 -07001394#if CONFIG_CB4X4
1395 const struct macroblockd_plane *pd = &xd->plane[i];
1396 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1397 subsize, pd->subsampling_x, pd->subsampling_y);
1398 if (handle_chroma_sub8x8) continue; // Skip <4x4 chroma smoothing
1399#else
Jingning Han24f24a52016-12-27 10:13:28 -08001400 if (bsize == BLOCK_8X8 && i != 0)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001401 continue; // Skip <4x4 chroma smoothing
Jingning Han9e0976a2016-12-27 17:52:42 -08001402#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001403 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001404 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001405 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1406 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1407 PARTITION_VERT, i);
1408 if (mi_row + hbs < cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001409 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410 xd, dst_buf2[i], dst_stride2[i], dst_buf3[i], dst_stride3[i],
1411 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1412 PARTITION_VERT, i);
Yaowu Xuf883b422016-08-30 14:01:10 -07001413 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001414 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1415 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1416 PARTITION_HORZ, i);
1417 }
1418 } else if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001419 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001420 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1421 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1422 PARTITION_HORZ, i);
1423 }
1424 }
1425 break;
1426#if CONFIG_EXT_PARTITION_TYPES
1427 case PARTITION_HORZ_A:
1428 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1429 mi_row_top, mi_col_top, dst_buf, dst_stride,
1430 top_bsize, bsize2, 0, 0);
1431 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1432 mi_row_top, mi_col_top, dst_buf, dst_stride);
1433
1434 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1435 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1436 dst_stride1, top_bsize, bsize2, 0, 0);
1437 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1438 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1439
1440 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1441 mi_col, mi_row_top, mi_col_top, dst_buf2,
1442 dst_stride2, top_bsize, subsize, 0, 0);
1443 if (bsize < top_bsize)
1444 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1445 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2);
1446 else
1447 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1448 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2,
1449 1);
1450
1451 for (i = 0; i < MAX_MB_PLANE; i++) {
1452 xd->plane[i].dst.buf = dst_buf[i];
1453 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001454 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001455 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1456 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1457 i);
1458 }
1459 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001460 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001461 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1462 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1463 i);
1464 }
1465 break;
1466 case PARTITION_VERT_A:
1467
1468 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1469 mi_row_top, mi_col_top, dst_buf, dst_stride,
1470 top_bsize, bsize2, 0, 0);
1471 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1472 mi_row_top, mi_col_top, dst_buf, dst_stride);
1473
1474 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1475 mi_col, mi_row_top, mi_col_top, dst_buf1,
1476 dst_stride1, top_bsize, bsize2, 0, 0);
1477 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1478 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1479
1480 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1481 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1482 dst_stride2, top_bsize, subsize, 0, 0);
1483 if (bsize < top_bsize)
1484 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1485 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1486 dst_stride2);
1487 else
1488 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1489 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1490 dst_stride2, 2);
1491
1492 for (i = 0; i < MAX_MB_PLANE; i++) {
1493 xd->plane[i].dst.buf = dst_buf[i];
1494 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001495 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001496 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1497 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1498 i);
1499 }
1500 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001501 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001502 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1503 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1504 i);
1505 }
1506 break;
1507 case PARTITION_HORZ_B:
1508 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1509 mi_row_top, mi_col_top, dst_buf, dst_stride,
1510 top_bsize, subsize, 0, 0);
1511 if (bsize < top_bsize)
1512 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1513 mi_row_top, mi_col_top, dst_buf, dst_stride);
1514 else
1515 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1516 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
1517
1518 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1519 mi_col, mi_row_top, mi_col_top, dst_buf1,
1520 dst_stride1, top_bsize, bsize2, 0, 0);
1521 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1522 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1523
1524 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1525 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1526 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1527 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1528 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1529 dst_stride2);
1530
1531 for (i = 0; i < MAX_MB_PLANE; i++) {
1532 xd->plane[i].dst.buf = dst_buf1[i];
1533 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001534 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001535 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1536 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1537 PARTITION_VERT, i);
1538 }
1539 for (i = 0; i < MAX_MB_PLANE; i++) {
1540 xd->plane[i].dst.buf = dst_buf[i];
1541 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001542 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001543 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1544 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1545 i);
1546 }
1547 break;
1548 case PARTITION_VERT_B:
1549 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1550 mi_row_top, mi_col_top, dst_buf, dst_stride,
1551 top_bsize, subsize, 0, 0);
1552 if (bsize < top_bsize)
1553 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1554 mi_row_top, mi_col_top, dst_buf, dst_stride);
1555 else
1556 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1557 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
1558
1559 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1560 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1561 dst_stride1, top_bsize, bsize2, 0, 0);
1562 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1563 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1564
1565 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1566 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1567 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1568 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1569 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1570 dst_stride2);
1571
1572 for (i = 0; i < MAX_MB_PLANE; i++) {
1573 xd->plane[i].dst.buf = dst_buf1[i];
1574 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001575 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1577 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1578 PARTITION_HORZ, i);
1579 }
1580 for (i = 0; i < MAX_MB_PLANE; i++) {
1581 xd->plane[i].dst.buf = dst_buf[i];
1582 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001583 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001584 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1585 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1586 i);
1587 }
1588 break;
1589#endif // CONFIG_EXT_PARTITION_TYPES
1590 default: assert(0);
1591 }
1592}
1593
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001594static void set_segment_id_supertx(const AV1_COMMON *const cm, int mi_row,
1595 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001596 const struct segmentation *seg = &cm->seg;
Jingning Han5b7706a2016-12-21 09:55:10 -08001597 const int miw = AOMMIN(mi_size_wide[bsize], cm->mi_cols - mi_col);
1598 const int mih = AOMMIN(mi_size_high[bsize], cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001599 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1600 MODE_INFO **const mip = cm->mi_grid_visible + mi_offset;
1601 int r, c;
1602 int seg_id_supertx = MAX_SEGMENTS;
1603
1604 if (!seg->enabled) {
1605 seg_id_supertx = 0;
1606 } else {
1607 // Find the minimum segment_id
1608 for (r = 0; r < mih; r++)
1609 for (c = 0; c < miw; c++)
1610 seg_id_supertx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001611 AOMMIN(mip[r * cm->mi_stride + c]->mbmi.segment_id, seg_id_supertx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001612 assert(0 <= seg_id_supertx && seg_id_supertx < MAX_SEGMENTS);
1613 }
1614
1615 // Assign the the segment_id back to segment_id_supertx
1616 for (r = 0; r < mih; r++)
1617 for (c = 0; c < miw; c++)
1618 mip[r * cm->mi_stride + c]->mbmi.segment_id_supertx = seg_id_supertx;
1619}
1620#endif // CONFIG_SUPERTX
1621
Yue Chen64550b62017-01-12 12:18:22 -08001622static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001623#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001624 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001625#endif // CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001626 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001627#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001628 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001629#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001630 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001631 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -08001632 const int bw = mi_size_wide[bsize];
1633 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001634 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1635 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -04001636
Michael Bebenita6048d052016-08-25 14:40:54 -07001637#if CONFIG_ACCOUNTING
1638 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
1639#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640#if CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001641 if (supertx_enabled) {
Yue Chen64550b62017-01-12 12:18:22 -08001642 set_mb_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001643 } else {
Yue Chen64550b62017-01-12 12:18:22 -08001644 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001645 }
1646#if CONFIG_EXT_PARTITION_TYPES
1647 xd->mi[0]->mbmi.partition = partition;
1648#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001649 av1_read_mode_info(pbi, xd, supertx_enabled, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001650#else
Yue Chen64550b62017-01-12 12:18:22 -08001651 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652#if CONFIG_EXT_PARTITION_TYPES
1653 xd->mi[0]->mbmi.partition = partition;
1654#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001655 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001656#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001657 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
1658 const BLOCK_SIZE uv_subsize =
1659 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
1660 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -07001661 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001662 "Invalid block size.");
1663 }
1664
1665#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001666 xd->mi[0]->mbmi.segment_id_supertx = MAX_SEGMENTS;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001667#endif // CONFIG_SUPERTX
1668
Angie Chiangd0916d92017-03-10 17:54:18 -08001669 int reader_corrupted_flag = aom_reader_has_error(r);
1670 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -08001671}
1672
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07001673#if CONFIG_NCOBMC_ADAPT_WEIGHT
1674static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1675 int mi_row, int mi_col) {
1676 const int offset = mi_row * cm->mi_stride + mi_col;
1677 xd->mi = cm->mi_grid_visible + offset;
1678 xd->mi[0] = &cm->mi[offset];
1679}
1680
1681static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
1682 int mi_col, int bsize, int mode) {
1683#if CONFIG_HIGHBITDEPTH
1684 DECLARE_ALIGNED(16, uint8_t, tmp_buf_0[2 * MAX_MB_PLANE * MAX_SB_SQUARE]);
1685 DECLARE_ALIGNED(16, uint8_t, tmp_buf_1[2 * MAX_MB_PLANE * MAX_SB_SQUARE]);
1686 DECLARE_ALIGNED(16, uint8_t, tmp_buf_2[2 * MAX_MB_PLANE * MAX_SB_SQUARE]);
1687 DECLARE_ALIGNED(16, uint8_t, tmp_buf_3[2 * MAX_MB_PLANE * MAX_SB_SQUARE]);
1688#else
1689 DECLARE_ALIGNED(16, uint8_t, tmp_buf_0[MAX_MB_PLANE * MAX_SB_SQUARE]);
1690 DECLARE_ALIGNED(16, uint8_t, tmp_buf_1[MAX_MB_PLANE * MAX_SB_SQUARE]);
1691 DECLARE_ALIGNED(16, uint8_t, tmp_buf_2[MAX_MB_PLANE * MAX_SB_SQUARE]);
1692 DECLARE_ALIGNED(16, uint8_t, tmp_buf_3[MAX_MB_PLANE * MAX_SB_SQUARE]);
1693#endif
1694 uint8_t *pred_buf[4][MAX_MB_PLANE];
1695 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
1696 // target block in pxl
1697 int pxl_row = mi_row << MI_SIZE_LOG2;
1698 int pxl_col = mi_col << MI_SIZE_LOG2;
1699
1700 int plane;
1701#if CONFIG_HIGHBITDEPTH
1702 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
1703 int len = sizeof(uint16_t);
1704 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], tmp_buf_0, MAX_SB_SQUARE, len);
1705 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], tmp_buf_0, MAX_SB_SQUARE, len);
1706 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], tmp_buf_0, MAX_SB_SQUARE, len);
1707 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], tmp_buf_0, MAX_SB_SQUARE, len);
1708 } else {
1709#endif // CONFIG_HIGHBITDEPTH
1710 ASSIGN_ALIGNED_PTRS(pred_buf[0], tmp_buf_0, MAX_SB_SQUARE);
1711 ASSIGN_ALIGNED_PTRS(pred_buf[1], tmp_buf_1, MAX_SB_SQUARE);
1712 ASSIGN_ALIGNED_PTRS(pred_buf[2], tmp_buf_2, MAX_SB_SQUARE);
1713 ASSIGN_ALIGNED_PTRS(pred_buf[3], tmp_buf_3, MAX_SB_SQUARE);
1714#if CONFIG_HIGHBITDEPTH
1715 }
1716#endif
1717 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
1718 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
1719 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1720 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
1721 pred_stride, mode);
1722 }
1723}
1724
1725static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1726 int bsize, const int mi_row, const int mi_col,
1727 const NCOBMC_MODE modes) {
1728 const int mi_width = mi_size_wide[bsize];
1729 const int mi_height = mi_size_high[bsize];
1730
1731 assert(bsize >= BLOCK_8X8);
1732
1733 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
1734 cm->mi_cols);
1735 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
1736}
1737
1738static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
1739 MACROBLOCKD *const xd, int mi_row,
1740 int mi_col, BLOCK_SIZE bsize) {
1741 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1742 const int mi_width = mi_size_wide[bsize];
1743 const int mi_height = mi_size_high[bsize];
1744 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
1745 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
1746 if (mi_width > mi_height) {
1747 // horizontal partition
1748 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1749 xd->mi += hbs;
1750 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
1751 mbmi->ncobmc_mode[1]);
1752 } else if (mi_height > mi_width) {
1753 // vertical partition
1754 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1755 xd->mi += hbs * xd->mi_stride;
1756 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
1757 mbmi->ncobmc_mode[1]);
1758 } else {
1759 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1760 }
1761 set_mode_info_offsets(cm, xd, mi_row, mi_col);
1762 // restore dst buffer and mode info
1763 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
1764 mi_col);
1765}
1766#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
1767
Yue Chen64550b62017-01-12 12:18:22 -08001768static void decode_token_and_recon_block(AV1Decoder *const pbi,
1769 MACROBLOCKD *const xd, int mi_row,
1770 int mi_col, aom_reader *r,
1771 BLOCK_SIZE bsize) {
1772 AV1_COMMON *const cm = &pbi->common;
1773 const int bw = mi_size_wide[bsize];
1774 const int bh = mi_size_high[bsize];
1775 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1776 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -08001777
Angie Chiang44701f22017-02-27 10:36:44 -08001778 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
1779 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Yue Chen19e7aa82016-11-30 14:05:39 -08001780
Arild Fuldseth07441162016-08-15 15:07:52 +02001781#if CONFIG_DELTA_Q
1782 if (cm->delta_q_present_flag) {
1783 int i;
1784 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -07001785#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -07001786 const int current_qindex =
1787 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -07001788#else
Cheng Chen49d30e62017-08-28 20:59:27 -07001789 const int current_qindex = xd->current_qindex;
1790#endif // CONFIG_EXT_DELTA_Q
1791 int j;
1792 for (j = 0; j < MAX_MB_PLANE; ++j) {
1793 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
1794 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
1795
1796 xd->plane[j].seg_dequant[i][0] =
1797 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
1798 xd->plane[j].seg_dequant[i][1] =
1799 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
1800 }
Arild Fuldseth07441162016-08-15 15:07:52 +02001801 }
1802 }
1803#endif
1804
Jingning Han41bb3392016-12-14 10:46:48 -08001805#if CONFIG_CB4X4
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001806 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08001807#else
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001808 if (mbmi->skip) {
1809 av1_reset_skip_context(xd, mi_row, mi_col, AOMMAX(BLOCK_8X8, bsize));
1810 }
Jingning Han41bb3392016-12-14 10:46:48 -08001811#endif
Jingning Hand39cc722016-12-02 14:03:26 -08001812
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001813#if CONFIG_COEF_INTERLEAVE
1814 {
1815 const struct macroblockd_plane *const pd_y = &xd->plane[0];
1816 const struct macroblockd_plane *const pd_c = &xd->plane[1];
1817 const TX_SIZE tx_log2_y = mbmi->tx_size;
hui su0c6244b2017-07-12 17:11:43 -07001818 const TX_SIZE tx_log2_c = av1_get_uv_tx_size(mbmi, pd_c);
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001819 const int tx_sz_y = (1 << tx_log2_y);
1820 const int tx_sz_c = (1 << tx_log2_c);
1821 const int num_4x4_w_y = pd_y->n4_w;
1822 const int num_4x4_h_y = pd_y->n4_h;
1823 const int num_4x4_w_c = pd_c->n4_w;
1824 const int num_4x4_h_c = pd_c->n4_h;
1825 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
1826 pd_y->subsampling_x);
1827 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
1828 pd_y->subsampling_y);
1829 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
1830 pd_c->subsampling_x);
1831 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
1832 pd_c->subsampling_y);
1833
1834 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
1835 // i.e. when the SB is splitted by tile boundaries.
1836 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
1837 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
1838 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
1839 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001840 const int tu_num_c = tu_num_w_c * tu_num_h_c;
1841
1842 if (!is_inter_block(mbmi)) {
1843 int tu_idx_c = 0;
1844 int row_y, col_y, row_c, col_c;
1845 int plane;
1846
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001847 for (plane = 0; plane <= 1; ++plane) {
1848 if (mbmi->palette_mode_info.palette_size[plane])
1849 av1_decode_palette_tokens(xd, plane, r);
1850 }
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001851
1852 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1853 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1854 // luma
1855 predict_and_reconstruct_intra_block(
1856 cm, xd, r, mbmi, 0, row_y * tx_sz_y, col_y * tx_sz_y, tx_log2_y);
1857 // chroma
1858 if (tu_idx_c < tu_num_c) {
1859 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1860 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1861 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c,
1862 col_c, tx_log2_c);
1863 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c,
1864 col_c, tx_log2_c);
1865 tu_idx_c++;
1866 }
1867 }
1868 }
1869
1870 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1871 while (tu_idx_c < tu_num_c) {
1872 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1873 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1874 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c, col_c,
1875 tx_log2_c);
1876 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c, col_c,
1877 tx_log2_c);
1878 tu_idx_c++;
1879 }
1880 } else {
1881 // Prediction
Jingning Hanc44009c2017-05-06 11:36:49 -07001882 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001883 AOMMAX(bsize, BLOCK_8X8));
1884
1885 // Reconstruction
1886 if (!mbmi->skip) {
1887 int eobtotal = 0;
1888 int tu_idx_c = 0;
1889 int row_y, col_y, row_c, col_c;
1890
1891 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1892 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1893 // luma
1894 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 0,
1895 row_y * tx_sz_y,
1896 col_y * tx_sz_y, tx_log2_y);
1897 // chroma
1898 if (tu_idx_c < tu_num_c) {
1899 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1900 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1901 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1902 1, row_c, col_c, tx_log2_c);
1903 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1904 2, row_c, col_c, tx_log2_c);
1905 tu_idx_c++;
1906 }
1907 }
1908 }
1909
1910 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1911 while (tu_idx_c < tu_num_c) {
1912 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1913 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1914 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 1,
1915 row_c, col_c, tx_log2_c);
1916 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 2,
1917 row_c, col_c, tx_log2_c);
1918 tu_idx_c++;
1919 }
1920
Alex Converse64d7ef62017-03-22 18:09:16 -07001921 // TODO(CONFIG_COEF_INTERLEAVE owners): bring eob == 0 corner case
1922 // into line with the defaut configuration
1923 if (bsize >= BLOCK_8X8 && eobtotal == 0) mbmi->skip = 1;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001924 }
1925 }
1926 }
Angie Chiang133733c2017-03-17 12:50:20 -07001927#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001928 if (!is_inter_block(mbmi)) {
1929 int plane;
1930 for (plane = 0; plane <= 1; ++plane) {
1931 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -07001932 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001933 }
1934 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1935 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -07001936 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07001937 const int stepr = tx_size_high_unit[tx_size];
1938 const int stepc = tx_size_wide_unit[tx_size];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07001939#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001940 const BLOCK_SIZE plane_bsize =
1941 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07001942#elif CONFIG_CB4X4
1943 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han41bb3392016-12-14 10:46:48 -08001944#else
Jingning Hanbafee8d2016-12-02 10:25:03 -08001945 const BLOCK_SIZE plane_bsize =
1946 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08001947#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001948 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -08001949 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
1950 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001951#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07001952 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
1953 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001954 continue;
1955#endif
Jingning Han5b701742017-07-19 14:39:07 -07001956 int blk_row, blk_col;
1957 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1958 int mu_blocks_wide =
1959 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1960 int mu_blocks_high =
1961 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1962 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
1963 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001964
Jingning Han5b701742017-07-19 14:39:07 -07001965 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -04001966 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -07001967 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -07001968 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
1969
1970 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
1971 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
1972 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
1973 blk_row, blk_col, tx_size);
1974 }
1975 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001976 }
Luc Trudeauc84c21c2017-07-25 19:40:34 -04001977#if CONFIG_CFL && CONFIG_CB4X4 && CONFIG_DEBUG
1978 if (xd->cfl->is_chroma_reference) {
1979 cfl_clear_sub8x8_val(xd->cfl);
1980 }
1981#endif // CONFIG_CFL && CONFIG_CB4X4 && CONFIG_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001982 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -08001983 int ref;
1984
Zoe Liu85b66462017-04-20 14:28:19 -07001985#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -07001986 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
1987#else
1988 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
1989#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
1990 {
Zoe Liu85b66462017-04-20 14:28:19 -07001991 const MV_REFERENCE_FRAME frame =
Yushin Cho127c5832017-07-28 16:39:04 -07001992#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07001993 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
1994#else
Yushin Cho127c5832017-07-28 16:39:04 -07001995 mbmi->ref_frame[ref];
Zoe Liu85b66462017-04-20 14:28:19 -07001996#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -07001997 if (frame < LAST_FRAME) {
1998#if CONFIG_INTRABC
1999 assert(is_intrabc_block(mbmi));
2000 assert(frame == INTRA_FRAME);
2001 assert(ref == 0);
2002#else
2003 assert(0);
2004#endif // CONFIG_INTRABC
2005 } else {
2006 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -08002007
Alex Converse28744302017-04-13 14:46:22 -07002008 xd->block_refs[ref] = ref_buf;
2009 if ((!av1_is_valid_scale(&ref_buf->sf)))
2010 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
2011 "Reference frame has invalid dimensions");
2012 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
2013 &ref_buf->sf);
2014 }
Yue Chen9ab6d712017-01-12 15:50:46 -08002015 }
Yue Chen69f18e12016-09-08 14:48:15 -07002016
Jingning Han41bb3392016-12-14 10:46:48 -08002017#if CONFIG_CB4X4
Jingning Hanc44009c2017-05-06 11:36:49 -07002018 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08002019#else
Jingning Hanc44009c2017-05-06 11:36:49 -07002020 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
Jingning Han41bb3392016-12-14 10:46:48 -08002021 AOMMAX(bsize, BLOCK_8X8));
2022#endif
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002023
Yue Chencb60b182016-10-13 15:18:22 -07002024#if CONFIG_MOTION_VAR
2025 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -08002026#if CONFIG_NCOBMC
2027 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
2028#else
Yue Chen894fcce2016-10-21 16:50:52 -07002029 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -08002030#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002031 }
Yue Chencb60b182016-10-13 15:18:22 -07002032#endif // CONFIG_MOTION_VAR
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002033#if CONFIG_NCOBMC_ADAPT_WEIGHT
2034 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
2035 int plane;
2036 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
2037 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2038 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
2039 }
2040 }
2041#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002042 // Reconstruction
2043 if (!mbmi->skip) {
2044 int eobtotal = 0;
2045 int plane;
2046
2047 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2048 const struct macroblockd_plane *const pd = &xd->plane[plane];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07002049#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002050 const BLOCK_SIZE plane_bsize =
2051 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07002052#elif CONFIG_CB4X4
2053 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002054#else
Yushin Cho127c5832017-07-28 16:39:04 -07002055 const BLOCK_SIZE plane_bsize =
2056 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002057#endif
Jingning Hanbafee8d2016-12-02 10:25:03 -08002058 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2059 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002060 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002061
2062#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002063 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
2064 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002065 continue;
2066#endif
2067
Yaowu Xuc27fc142016-08-22 16:08:15 -07002068#if CONFIG_VAR_TX
Jingning Hanc2b797f2017-07-19 09:37:11 -07002069 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
2070 int mu_blocks_wide =
2071 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
2072 int mu_blocks_high =
2073 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
2074
2075 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
2076 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
2077
Sarah Parker106b3cb2017-04-21 12:13:37 -07002078 const TX_SIZE max_tx_size = get_vartx_max_txsize(mbmi, plane_bsize);
Jingning Hanf64062f2016-11-02 16:22:18 -07002079 const int bh_var_tx = tx_size_high_unit[max_tx_size];
2080 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -07002081 int block = 0;
2082 int step =
2083 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -07002084
2085 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
2086 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
2087 int blk_row, blk_col;
2088 const int unit_height =
2089 AOMMIN(mu_blocks_high + row, max_blocks_high);
2090 const int unit_width =
2091 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
2092 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
2093 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
2094 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
2095 blk_row, blk_col, block, max_tx_size,
2096 &eobtotal);
2097 block += step;
2098 }
2099 }
Jingning Hana65f3052017-06-23 10:52:05 -07002100 }
2101 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002102#else
hui su0c6244b2017-07-12 17:11:43 -07002103 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07002104 const int stepr = tx_size_high_unit[tx_size];
2105 const int stepc = tx_size_wide_unit[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002106 for (row = 0; row < max_blocks_high; row += stepr)
2107 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002108 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
2109 plane, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002110#endif
2111 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002112 }
2113 }
Angie Chiang133733c2017-03-17 12:50:20 -07002114#endif // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002115
Angie Chiangd0916d92017-03-10 17:54:18 -08002116 int reader_corrupted_flag = aom_reader_has_error(r);
2117 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002118}
2119
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07002120#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08002121static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2122 int mi_row, int mi_col, aom_reader *r,
2123 BLOCK_SIZE bsize) {
2124 AV1_COMMON *const cm = &pbi->common;
2125 const int hbs = mi_size_wide[bsize] >> 1;
2126#if CONFIG_CB4X4
2127 const int unify_bsize = 1;
2128#else
2129 const int unify_bsize = 0;
2130#endif
2131#if CONFIG_EXT_PARTITION_TYPES
2132 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
2133#endif
2134 PARTITION_TYPE partition;
2135 BLOCK_SIZE subsize;
2136 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2137 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2138
2139 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2140
2141 partition = get_partition(cm, mi_row, mi_col, bsize);
2142 subsize = subsize_lookup[partition][bsize];
2143
2144 if (!hbs && !unify_bsize) {
2145 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2146 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
2147 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2148 } else {
2149 switch (partition) {
2150 case PARTITION_NONE:
2151 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
2152 break;
2153 case PARTITION_HORZ:
2154 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2155 if (has_rows)
2156 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r,
2157 subsize);
2158 break;
2159 case PARTITION_VERT:
2160 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2161 if (has_cols)
2162 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r,
2163 subsize);
2164 break;
2165 case PARTITION_SPLIT:
2166 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
2167 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2168 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2169 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
2170 break;
2171#if CONFIG_EXT_PARTITION_TYPES
2172 case PARTITION_HORZ_A:
2173 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2174 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2175 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2176 break;
2177 case PARTITION_HORZ_B:
2178 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2179 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2180 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2181 bsize2);
2182 break;
2183 case PARTITION_VERT_A:
2184 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2185 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2186 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2187 break;
2188 case PARTITION_VERT_B:
2189 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2190 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2191 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2192 bsize2);
2193 break;
2194#endif
2195 default: assert(0 && "Invalid partition type");
2196 }
2197 }
2198}
2199#endif
2200
Yue Chen64550b62017-01-12 12:18:22 -08002201static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2202#if CONFIG_SUPERTX
2203 int supertx_enabled,
2204#endif // CONFIG_SUPERTX
2205 int mi_row, int mi_col, aom_reader *r,
2206#if CONFIG_EXT_PARTITION_TYPES
2207 PARTITION_TYPE partition,
2208#endif // CONFIG_EXT_PARTITION_TYPES
2209 BLOCK_SIZE bsize) {
2210 decode_mbmi_block(pbi, xd,
2211#if CONFIG_SUPERTX
2212 supertx_enabled,
2213#endif
2214 mi_row, mi_col, r,
2215#if CONFIG_EXT_PARTITION_TYPES
2216 partition,
2217#endif
2218 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002219
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07002220#if !(CONFIG_MOTION_VAR && NC_MODE_INFO)
Yue Chen64550b62017-01-12 12:18:22 -08002221#if CONFIG_SUPERTX
2222 if (!supertx_enabled)
2223#endif // CONFIG_SUPERTX
2224 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08002225#endif
Yue Chen64550b62017-01-12 12:18:22 -08002226}
2227
Yaowu Xuf883b422016-08-30 14:01:10 -07002228static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
2229 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002230 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -08002231 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -08002232#if CONFIG_UNPOISON_PARTITION_CTX
2233 const int ctx =
2234 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08002235#else
Jingning Han1beb0102016-12-07 11:08:30 -08002236 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08002237#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002238 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002239 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2240 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002241
Jingning Han5fe79db2017-03-27 15:10:30 -07002242 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -07002243
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002244 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002245#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002246 const int bsl =
2247 mi_width_log2_lookup[bsize] - mi_width_log2_lookup[BLOCK_8X8];
2248 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf,
2249 av1_num_partition_types[bsl], ACCT_STR);
Alex Converse57795a42017-03-14 12:18:25 -07002250#else
Jingning Han5fe79db2017-03-27 15:10:30 -07002251 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, PARTITION_TYPES,
2252 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002253#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002254 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002255 assert(bsize > BLOCK_8X8);
2256 aom_cdf_prob cdf[2];
2257 partition_gather_vert_alike(cdf, partition_cdf);
2258 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
2259 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
2260 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002261 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002262 assert(bsize > BLOCK_8X8);
2263 aom_cdf_prob cdf[2];
2264 partition_gather_horz_alike(cdf, partition_cdf);
2265 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
2266 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002267 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002268 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002269 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002270
Yaowu Xuc27fc142016-08-22 16:08:15 -07002271 return p;
2272}
2273
2274#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002275static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
2276 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
2278 return 1;
2279 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002280 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +01002281#if CONFIG_NEW_MULTISYMBOL
Thomas Davies61e3e372017-04-04 16:10:23 +01002282 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +01002283 const int skip = aom_read_symbol(r, ec_ctx->skip_cdfs[ctx], 2, ACCT_STR);
2284#else
Michael Bebenita6048d052016-08-25 14:40:54 -07002285 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Thomas Davies61e3e372017-04-04 16:10:23 +01002286#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002287 FRAME_COUNTS *counts = xd->counts;
2288 if (counts) ++counts->skip[ctx][skip];
2289 return skip;
2290 }
2291}
2292#endif // CONFIG_SUPERTX
2293
2294// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -07002295static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002296#if CONFIG_SUPERTX
2297 int supertx_enabled,
2298#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002299 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -07002300 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002301 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -08002302 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303 const int hbs = num_8x8_wh >> 1;
Jingning Han41bb3392016-12-14 10:46:48 -08002304#if CONFIG_CB4X4
2305 const int unify_bsize = 1;
2306#else
2307 const int unify_bsize = 0;
2308#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002309 PARTITION_TYPE partition;
2310 BLOCK_SIZE subsize;
2311#if CONFIG_EXT_PARTITION_TYPES
2312 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002313 const int quarter_step = num_8x8_wh / 4;
2314 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002315#endif
2316 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2317 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2318#if CONFIG_SUPERTX
2319 const int read_token = !supertx_enabled;
2320 int skip = 0;
Jingning Han2511c662016-12-22 11:57:34 -08002321 TX_SIZE supertx_size = max_txsize_lookup[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002322 const TileInfo *const tile = &xd->tile;
2323 int txfm = DCT_DCT;
2324#endif // CONFIG_SUPERTX
2325
2326 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2327
Jingning Hancd959762017-03-27 14:49:59 -07002328 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
2329 : read_partition(cm, xd, mi_row, mi_col, r,
2330 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002331 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002332
2333#if CONFIG_PVQ
2334 assert(partition < PARTITION_TYPES);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002335 assert(subsize < BLOCK_SIZES_ALL);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002336#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002337#if CONFIG_SUPERTX
2338 if (!frame_is_intra_only(cm) && partition != PARTITION_NONE &&
2339 bsize <= MAX_SUPERTX_BLOCK_SIZE && !supertx_enabled && !xd->lossless[0]) {
2340 const int supertx_context = partition_supertx_context_lookup[partition];
Michael Bebenita6048d052016-08-25 14:40:54 -07002341 supertx_enabled = aom_read(
2342 r, cm->fc->supertx_prob[supertx_context][supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002343 if (xd->counts)
2344 xd->counts->supertx[supertx_context][supertx_size][supertx_enabled]++;
2345#if CONFIG_VAR_TX
2346 if (supertx_enabled) xd->supertx_size = supertx_size;
2347#endif
2348 }
2349#endif // CONFIG_SUPERTX
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002350
2351#if CONFIG_SUPERTX
2352#define DEC_BLOCK_STX_ARG supertx_enabled,
2353#else
2354#define DEC_BLOCK_STX_ARG
2355#endif
2356#if CONFIG_EXT_PARTITION_TYPES
2357#define DEC_BLOCK_EPT_ARG partition,
2358#else
2359#define DEC_BLOCK_EPT_ARG
2360#endif
2361#define DEC_BLOCK(db_r, db_c, db_subsize) \
2362 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
2363 DEC_BLOCK_EPT_ARG(db_subsize))
2364#define DEC_PARTITION(db_r, db_c, db_subsize) \
2365 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
2366
Jingning Han41bb3392016-12-14 10:46:48 -08002367 if (!hbs && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002368 // calculate bmode block dimensions (log 2)
2369 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2370 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002371 DEC_BLOCK(mi_row, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002372 } else {
2373 switch (partition) {
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002374 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002375 case PARTITION_HORZ:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002376 DEC_BLOCK(mi_row, mi_col, subsize);
2377 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002378 break;
2379 case PARTITION_VERT:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002380 DEC_BLOCK(mi_row, mi_col, subsize);
2381 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002382 break;
2383 case PARTITION_SPLIT:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002384 DEC_PARTITION(mi_row, mi_col, subsize);
2385 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
2386 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
2387 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002388 break;
2389#if CONFIG_EXT_PARTITION_TYPES
2390 case PARTITION_HORZ_A:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002391 DEC_BLOCK(mi_row, mi_col, bsize2);
2392 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
2393 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002394 break;
2395 case PARTITION_HORZ_B:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002396 DEC_BLOCK(mi_row, mi_col, subsize);
2397 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
2398 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002399 break;
2400 case PARTITION_VERT_A:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002401 DEC_BLOCK(mi_row, mi_col, bsize2);
2402 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
2403 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002404 break;
2405 case PARTITION_VERT_B:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002406 DEC_BLOCK(mi_row, mi_col, subsize);
2407 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
2408 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002409 break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002410 case PARTITION_HORZ_4:
2411 for (i = 0; i < 4; ++i) {
2412 int this_mi_row = mi_row + i * quarter_step;
2413 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002414 DEC_BLOCK(this_mi_row, mi_col, subsize);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002415 }
2416 break;
2417 case PARTITION_VERT_4:
2418 for (i = 0; i < 4; ++i) {
2419 int this_mi_col = mi_col + i * quarter_step;
2420 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002421 DEC_BLOCK(mi_row, this_mi_col, subsize);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002422 }
2423 break;
2424#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002425 default: assert(0 && "Invalid partition type");
2426 }
2427 }
2428
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002429#undef DEC_PARTITION
2430#undef DEC_BLOCK
2431#undef DEC_BLOCK_EPT_ARG
2432#undef DEC_BLOCK_STX_ARG
2433
Yaowu Xuc27fc142016-08-22 16:08:15 -07002434#if CONFIG_SUPERTX
2435 if (supertx_enabled && read_token) {
2436 uint8_t *dst_buf[3];
2437 int dst_stride[3], i;
2438 int offset = mi_row * cm->mi_stride + mi_col;
2439
2440 set_segment_id_supertx(cm, mi_row, mi_col, bsize);
2441
David Barker3aec8d62017-01-31 14:55:32 +00002442#if CONFIG_DELTA_Q
2443 if (cm->delta_q_present_flag) {
2444 for (i = 0; i < MAX_SEGMENTS; i++) {
Cheng Chen49d30e62017-08-28 20:59:27 -07002445 int j;
2446 for (j = 0; j < MAX_MB_PLANE; ++j) {
2447 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
2448 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
2449
2450 xd->plane[j].seg_dequant[i][0] =
2451 av1_dc_quant(xd->current_qindex, dc_delta_q, cm->bit_depth);
2452 xd->plane[j].seg_dequant[i][1] =
2453 av1_ac_quant(xd->current_qindex, ac_delta_q, cm->bit_depth);
2454 }
David Barker3aec8d62017-01-31 14:55:32 +00002455 }
2456 }
2457#endif
2458
Yaowu Xuc27fc142016-08-22 16:08:15 -07002459 xd->mi = cm->mi_grid_visible + offset;
2460 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002461 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2462 mi_size_wide[bsize],
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002463#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002464 cm->dependent_horz_tiles,
2465#endif // CONFIG_DEPENDENT_HORZTILES
2466 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002467 set_skip_context(xd, mi_row, mi_col);
2468 skip = read_skip(cm, xd, xd->mi[0]->mbmi.segment_id_supertx, r);
2469 if (skip) {
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07002470 av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002471 } else {
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002472 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002473#if CONFIG_EXT_TX
Sarah Parkere68a3e42017-02-16 14:03:24 -08002474 if (get_ext_tx_types(supertx_size, bsize, 1, cm->reduced_tx_set_used) >
2475 1) {
2476 const int eset =
2477 get_ext_tx_set(supertx_size, bsize, 1, cm->reduced_tx_set_used);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002478 if (eset > 0) {
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002479 const int packed_sym =
2480 aom_read_symbol(r, ec_ctx->inter_ext_tx_cdf[eset][supertx_size],
2481 ext_tx_cnt_inter[eset], ACCT_STR);
2482 txfm = av1_ext_tx_inter_inv[eset][packed_sym];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002483 if (xd->counts) ++xd->counts->inter_ext_tx[eset][supertx_size][txfm];
2484 }
2485 }
2486#else
2487 if (supertx_size < TX_32X32) {
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002488 txfm = aom_read_symbol(r, ec_ctx->inter_ext_tx_cdf[supertx_size],
2489 TX_TYPES, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002490 if (xd->counts) ++xd->counts->inter_ext_tx[supertx_size][txfm];
2491 }
2492#endif // CONFIG_EXT_TX
2493 }
2494
Jingning Han91d9a792017-04-18 12:01:52 -07002495 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
2496 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002497 for (i = 0; i < MAX_MB_PLANE; i++) {
2498 dst_buf[i] = xd->plane[i].dst.buf;
2499 dst_stride[i] = xd->plane[i].dst.stride;
2500 }
2501 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row, mi_col, bsize,
2502 bsize, dst_buf, dst_stride);
2503
2504 if (!skip) {
2505 int eobtotal = 0;
2506 MB_MODE_INFO *mbmi;
2507 set_offsets_topblock(cm, xd, tile, bsize, mi_row, mi_col);
2508 mbmi = &xd->mi[0]->mbmi;
2509 mbmi->tx_type = txfm;
2510 assert(mbmi->segment_id_supertx != MAX_SEGMENTS);
2511 for (i = 0; i < MAX_MB_PLANE; ++i) {
2512 const struct macroblockd_plane *const pd = &xd->plane[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002513 int row, col;
hui su0c6244b2017-07-12 17:11:43 -07002514 const TX_SIZE tx_size = av1_get_tx_size(i, xd);
Jingning Han5b7706a2016-12-21 09:55:10 -08002515 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han32b20282016-10-28 15:42:44 -07002516 const int stepr = tx_size_high_unit[tx_size];
2517 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002518 const int max_blocks_wide = max_block_wide(xd, plane_bsize, i);
2519 const int max_blocks_high = max_block_high(xd, plane_bsize, i);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002520
2521 for (row = 0; row < max_blocks_high; row += stepr)
2522 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002523 eobtotal += reconstruct_inter_block(
2524 cm, xd, r, mbmi->segment_id_supertx, i, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002525 }
Jingning Han24f24a52016-12-27 10:13:28 -08002526 if ((unify_bsize || !(subsize < BLOCK_8X8)) && eobtotal == 0) skip = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527 }
2528 set_param_topblock(cm, xd, bsize, mi_row, mi_col, txfm, skip);
2529 }
2530#endif // CONFIG_SUPERTX
2531
2532#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07002533 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002534#else
2535 // update partition context
2536 if (bsize >= BLOCK_8X8 &&
2537 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08002538 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002539#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07002540
Jean-Marc Valin01435132017-02-18 14:12:53 -05002541#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07002542 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07002543 int width_step = mi_size_wide[BLOCK_64X64];
2544 int height_step = mi_size_wide[BLOCK_64X64];
2545 int w, h;
2546 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
2547 h += height_step) {
2548 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
2549 w += width_step) {
2550 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
2551 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
2552 ->mbmi.cdef_strength =
2553 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
2554 else
2555 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
2556 ->mbmi.cdef_strength = -1;
2557 }
Yaowu Xud71be782016-10-14 08:47:03 -07002558 }
2559 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05002560#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002561#if CONFIG_LOOP_RESTORATION
2562 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
2563 int rcol0, rcol1, rrow0, rrow1, nhtiles;
2564 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
2565 &rcol0, &rcol1, &rrow0, &rrow1,
2566 &nhtiles)) {
2567 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
2568 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
2569 int rtile_idx = rcol + rrow * nhtiles;
2570 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
2571 }
2572 }
2573 }
2574 }
2575#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002576}
2577
Yaowu Xuc27fc142016-08-22 16:08:15 -07002578static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
2579 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002580 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08002581 aom_reader *r,
2582#if CONFIG_ANS && ANS_MAX_SYMBOLS
2583 int window_size,
2584#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2585 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002586 // Validate the calculated partition length. If the buffer
2587 // described by the partition can't be fully read, then restrict
2588 // it to the portion that can be (for EC mode) or throw an error.
2589 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002590 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002591 "Truncated packet or corrupt tile length");
2592
Alex Converse2cdf0d82016-12-13 13:53:09 -08002593#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08002594 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08002595#endif
Alex Converse346440b2017-01-03 13:47:37 -08002596 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07002597 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002598 "Failed to allocate bool decoder %d", 1);
2599}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002600
Yaowu Xuf883b422016-08-30 14:01:10 -07002601static void setup_segmentation(AV1_COMMON *const cm,
2602 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002603 struct segmentation *const seg = &cm->seg;
2604 int i, j;
2605
2606 seg->update_map = 0;
2607 seg->update_data = 0;
2608
Yaowu Xuf883b422016-08-30 14:01:10 -07002609 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002610 if (!seg->enabled) return;
2611
2612 // Segmentation map update
2613 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2614 seg->update_map = 1;
2615 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002616 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002617 }
2618 if (seg->update_map) {
2619 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2620 seg->temporal_update = 0;
2621 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002622 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002623 }
2624 }
2625
2626 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07002627 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002628 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002629 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002630
Yaowu Xuf883b422016-08-30 14:01:10 -07002631 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002632
2633 for (i = 0; i < MAX_SEGMENTS; i++) {
2634 for (j = 0; j < SEG_LVL_MAX; j++) {
2635 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002636 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002637 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002638 av1_enable_segfeature(seg, i, j);
2639 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
2640 if (av1_is_segfeature_signed(j))
2641 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002642 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002643 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002644 }
2645 }
2646 }
2647}
2648
2649#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002650static void decode_restoration_mode(AV1_COMMON *cm,
2651 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002652 int p;
2653 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002654 if (aom_rb_read_bit(rb)) {
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08002655 rsi->frame_restoration_type =
2656 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002657 } else {
2658 rsi->frame_restoration_type =
2659 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
2660 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002661 for (p = 1; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002662 rsi = &cm->rst_info[p];
2663 if (aom_rb_read_bit(rb)) {
2664 rsi->frame_restoration_type =
2665 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
2666 } else {
2667 rsi->frame_restoration_type = RESTORE_NONE;
2668 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002669 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002670
2671 cm->rst_info[0].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2672 cm->rst_info[1].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2673 cm->rst_info[2].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2674 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
2675 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
2676 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
2677 rsi = &cm->rst_info[0];
2678 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2679 if (rsi->restoration_tilesize != RESTORATION_TILESIZE_MAX) {
2680 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2681 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002682 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002683 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2684 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
2685 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
2686 cm->rst_info[1].restoration_tilesize =
2687 cm->rst_info[0].restoration_tilesize >> (aom_rb_read_bit(rb) * s);
2688 } else {
2689 cm->rst_info[1].restoration_tilesize = cm->rst_info[0].restoration_tilesize;
2690 }
2691 cm->rst_info[2].restoration_tilesize = cm->rst_info[1].restoration_tilesize;
Debargha Mukherjee7a5587a2017-08-31 07:41:30 -07002692
2693 cm->rst_info[0].procunit_width = cm->rst_info[0].procunit_height =
2694 RESTORATION_PROC_UNIT_SIZE;
2695 cm->rst_info[1].procunit_width = cm->rst_info[2].procunit_width =
2696 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_x;
2697 cm->rst_info[1].procunit_height = cm->rst_info[2].procunit_height =
2698 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_y;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002699}
2700
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002701static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002702 WienerInfo *ref_wiener_info, aom_reader *rb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002703 if (wiener_win == WIENER_WIN)
2704 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
2705 aom_read_primitive_refsubexpfin(
2706 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2707 WIENER_FILT_TAP0_SUBEXP_K,
2708 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
2709 WIENER_FILT_TAP0_MINV;
2710 else
2711 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002712 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002713 aom_read_primitive_refsubexpfin(
2714 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2715 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002716 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002717 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002718 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002719 aom_read_primitive_refsubexpfin(
2720 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2721 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002722 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002723 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002724 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002725 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002726 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
2727 wiener_info->vfilter[2]);
2728
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002729 if (wiener_win == WIENER_WIN)
2730 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
2731 aom_read_primitive_refsubexpfin(
2732 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2733 WIENER_FILT_TAP0_SUBEXP_K,
2734 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
2735 WIENER_FILT_TAP0_MINV;
2736 else
2737 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002738 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002739 aom_read_primitive_refsubexpfin(
2740 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2741 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002742 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002743 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002744 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002745 aom_read_primitive_refsubexpfin(
2746 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2747 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002748 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002749 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002750 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002751 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002752 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
2753 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002754 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002755}
2756
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002757static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
2758 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002759 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
2760 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002761 aom_read_primitive_refsubexpfin(
2762 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002763 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002764 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002765 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002766 aom_read_primitive_refsubexpfin(
2767 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002768 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002769 SGRPROJ_PRJ_MIN1;
2770 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002771}
2772
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002773static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
2774 MACROBLOCKD *xd,
2775 aom_reader *const r, int plane,
2776 int rtile_idx) {
2777 const RestorationInfo *rsi = cm->rst_info + plane;
2778 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002779
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002780 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2781 WienerInfo *wiener_info = xd->wiener_info + plane;
2782 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002783
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002784 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
2785 assert(plane == 0);
2786 rsi->restoration_type[rtile_idx] =
2787 aom_read_tree(r, av1_switchable_restore_tree,
2788 cm->fc->switchable_restore_prob, ACCT_STR);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002789
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002790 if (rsi->restoration_type[rtile_idx] == RESTORE_WIENER) {
2791 read_wiener_filter(wiener_win, &rsi->wiener_info[rtile_idx], wiener_info,
2792 r);
2793 } else if (rsi->restoration_type[rtile_idx] == RESTORE_SGRPROJ) {
2794 read_sgrproj_filter(&rsi->sgrproj_info[rtile_idx], sgrproj_info, r);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002795 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002796 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
2797 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
2798 rsi->restoration_type[rtile_idx] = RESTORE_WIENER;
2799 read_wiener_filter(wiener_win, &rsi->wiener_info[rtile_idx], wiener_info,
2800 r);
2801 } else {
2802 rsi->restoration_type[rtile_idx] = RESTORE_NONE;
2803 }
2804 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
2805 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
2806 rsi->restoration_type[rtile_idx] = RESTORE_SGRPROJ;
2807 read_sgrproj_filter(&rsi->sgrproj_info[rtile_idx], sgrproj_info, r);
2808 } else {
2809 rsi->restoration_type[rtile_idx] = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002810 }
2811 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002812}
2813#endif // CONFIG_LOOP_RESTORATION
2814
Yaowu Xuf883b422016-08-30 14:01:10 -07002815static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002816 struct loopfilter *lf = &cm->lf;
Cheng Chen13fc8192017-08-19 11:49:28 -07002817#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002818 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
2819 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
2820 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07002821 lf->filter_level_u = aom_rb_read_literal(rb, 6);
2822 lf->filter_level_v = aom_rb_read_literal(rb, 6);
2823 }
Cheng Chen179479f2017-08-04 10:56:39 -07002824#else
2825 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07002826#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002827 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002828
2829 // Read in loop filter deltas applied at the MB level based on mode or ref
2830 // frame.
2831 lf->mode_ref_delta_update = 0;
2832
Yaowu Xuf883b422016-08-30 14:01:10 -07002833 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002834 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002835 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002836 if (lf->mode_ref_delta_update) {
2837 int i;
2838
2839 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002840 if (aom_rb_read_bit(rb))
2841 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002842
2843 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002844 if (aom_rb_read_bit(rb))
2845 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002846 }
2847 }
2848}
2849
Jean-Marc Valin01435132017-02-18 14:12:53 -05002850#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002851static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002852 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002853#if CONFIG_CDEF_SINGLEPASS
2854 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
2855#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02002856 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
2857 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002858#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002859 cm->cdef_bits = aom_rb_read_literal(rb, 2);
2860 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
2861 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2862 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002863 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
2864 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
2865 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002866 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002867}
Jean-Marc Valin01435132017-02-18 14:12:53 -05002868#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002869
Yaowu Xuf883b422016-08-30 14:01:10 -07002870static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
2871 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002872}
2873
Yaowu Xuf883b422016-08-30 14:01:10 -07002874static void setup_quantization(AV1_COMMON *const cm,
2875 struct aom_read_bit_buffer *rb) {
2876 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002877 cm->y_dc_delta_q = read_delta_q(rb);
2878 cm->uv_dc_delta_q = read_delta_q(rb);
2879 cm->uv_ac_delta_q = read_delta_q(rb);
2880 cm->dequant_bit_depth = cm->bit_depth;
2881#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002882 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002883 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002884 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
2885 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002886 } else {
2887 cm->min_qmlevel = 0;
2888 cm->max_qmlevel = 0;
2889 }
2890#endif
2891}
2892
Alex Converse05a3e7d2017-05-16 12:20:07 -07002893// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07002894static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002895#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07002896 const int using_qm = cm->using_qmatrix;
2897 const int minqm = cm->min_qmlevel;
2898 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002899#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07002900 // When segmentation is disabled, only the first value is used. The
2901 // remaining are don't cares.
2902 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
2903 for (int i = 0; i < max_segments; ++i) {
2904 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
2905 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
2906 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
2907 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002908 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07002909 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07002910 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002911#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07002912 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
2913 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
2914 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07002915 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07002916 const int qmlevel = (lossless || using_qm == 0)
2917 ? NUM_QM_LEVELS - 1
2918 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01002919 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002920 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
2921 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
2922 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
2923 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
2924 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07002925#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07002926#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07002927 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
2928 for (int b = 0; b < COEF_BANDS; ++b) {
2929 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
2930 cm->y_dequant_nuq[i][dq][b], NULL, dq);
2931 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
2932 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933 }
2934 }
2935#endif // CONFIG_NEW_QUANT
2936 }
2937}
2938
Angie Chiang5678ad92016-11-21 09:38:40 -08002939static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002940 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07002941 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002942}
2943
Yaowu Xuf883b422016-08-30 14:01:10 -07002944static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002945#if CONFIG_FRAME_SUPERRES
2946 cm->render_width = cm->superres_upscaled_width;
2947 cm->render_height = cm->superres_upscaled_height;
2948#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002949 cm->render_width = cm->width;
2950 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002951#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07002952 if (aom_rb_read_bit(rb))
2953 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002954}
2955
Fergus Simpsond91c8c92017-04-07 12:12:00 -07002956#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07002957// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002958static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
2959 int *width, int *height) {
2960 cm->superres_upscaled_width = *width;
2961 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07002962 if (aom_rb_read_bit(rb)) {
2963 cm->superres_scale_numerator =
2964 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
2965 cm->superres_scale_numerator += SUPERRES_SCALE_NUMERATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07002966 // Don't edit cm->width or cm->height directly, or the buffers won't get
2967 // resized correctly
Fergus Simpsonbfbf6a52017-06-14 23:13:12 -07002968 av1_calculate_scaled_size(width, height, cm->superres_scale_numerator);
Fergus Simpsone7508412017-03-14 18:14:09 -07002969 } else {
2970 // 1:1 scaling - ie. no scaling, scale not provided
Fergus Simpsonbfbf6a52017-06-14 23:13:12 -07002971 cm->superres_scale_numerator = SCALE_DENOMINATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07002972 }
2973}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07002974#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07002975
Yaowu Xuf883b422016-08-30 14:01:10 -07002976static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002977#if CONFIG_SIZE_LIMIT
2978 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07002979 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002980 "Dimensions of %dx%d beyond allowed size of %dx%d.",
2981 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
2982#endif
2983 if (cm->width != width || cm->height != height) {
2984 const int new_mi_rows =
2985 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
2986 const int new_mi_cols =
2987 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
2988
Yaowu Xuf883b422016-08-30 14:01:10 -07002989 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07002990 // dimensions as well as the overall size.
2991 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002992 if (av1_alloc_context_buffers(cm, width, height))
2993 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002994 "Failed to allocate context buffers");
2995 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002996 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002997 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002998 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002999 cm->width = width;
3000 cm->height = height;
3001 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003002
3003 ensure_mv_buffer(cm->cur_frame, cm);
3004 cm->cur_frame->width = cm->width;
3005 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003006}
3007
Yaowu Xuf883b422016-08-30 14:01:10 -07003008static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003009 int width, height;
3010 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07003011 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003012#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003013 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003014#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003015 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003016 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003017
3018 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003019 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003020 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3021 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003022#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003023 cm->use_highbitdepth,
3024#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003025 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3027 pool->cb_priv)) {
3028 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003029 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003030 "Failed to allocate frame buffer");
3031 }
3032 unlock_buffer_pool(pool);
3033
3034 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3035 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3036 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3037 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003038#if CONFIG_COLORSPACE_HEADERS
3039 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3040 cm->transfer_function;
3041 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3042 cm->chroma_sample_position;
3043#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003044 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3045 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3046 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3047}
3048
Yaowu Xuf883b422016-08-30 14:01:10 -07003049static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003050 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07003051 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003052 int this_xss, int this_yss) {
3053 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
3054 ref_yss == this_yss;
3055}
3056
Yaowu Xuf883b422016-08-30 14:01:10 -07003057static void setup_frame_size_with_refs(AV1_COMMON *cm,
3058 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003059 int width, height;
3060 int found = 0, i;
3061 int has_valid_ref_frame = 0;
3062 BufferPool *const pool = cm->buffer_pool;
3063 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003064 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003065 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
3066 width = buf->y_crop_width;
3067 height = buf->y_crop_height;
3068 cm->render_width = buf->render_width;
3069 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003070#if CONFIG_FRAME_SUPERRES
3071 setup_superres(cm, rb, &width, &height);
3072#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003073 found = 1;
3074 break;
3075 }
3076 }
3077
3078 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003079 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003080#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003081 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003082#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003083 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003084 }
3085
3086 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07003087 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003088 "Invalid frame size");
3089
3090 // Check to make sure at least one of frames that this frame references
3091 // has valid dimensions.
3092 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3093 RefBuffer *const ref_frame = &cm->frame_refs[i];
3094 has_valid_ref_frame |=
3095 valid_ref_frame_size(ref_frame->buf->y_crop_width,
3096 ref_frame->buf->y_crop_height, width, height);
3097 }
3098 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07003099 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003100 "Referenced frame has invalid size");
3101 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3102 RefBuffer *const ref_frame = &cm->frame_refs[i];
3103 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
3104 ref_frame->buf->subsampling_x,
3105 ref_frame->buf->subsampling_y, cm->bit_depth,
3106 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07003107 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003108 "Referenced frame has incompatible color format");
3109 }
3110
3111 resize_context_buffers(cm, width, height);
3112
3113 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003114 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003115 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3116 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003117#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003118 cm->use_highbitdepth,
3119#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003120 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003121 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3122 pool->cb_priv)) {
3123 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003124 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003125 "Failed to allocate frame buffer");
3126 }
3127 unlock_buffer_pool(pool);
3128
3129 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3130 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3131 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3132 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003133#if CONFIG_COLORSPACE_HEADERS
3134 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3135 cm->transfer_function;
3136 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3137 cm->chroma_sample_position;
3138#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003139 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3140 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3141 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3142}
3143
David Barker1a191122017-09-06 15:24:16 +01003144static void read_tile_group_range(AV1Decoder *pbi,
3145 struct aom_read_bit_buffer *const rb) {
3146 AV1_COMMON *const cm = &pbi->common;
3147 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
3148 const int num_tiles =
3149 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
3150 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
3151 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
3152 if (pbi->tg_start + pbi->tg_size > num_tiles)
3153 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3154 "Tile group extends past last tile in frame");
3155}
3156
Yaowu Xuf883b422016-08-30 14:01:10 -07003157static void read_tile_info(AV1Decoder *const pbi,
3158 struct aom_read_bit_buffer *const rb) {
3159 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003161 cm->single_tile_decoding = 0;
3162 if (cm->large_scale_tile) {
3163 struct loopfilter *lf = &cm->lf;
3164
3165 // Figure out single_tile_decoding by loopfilter_level.
3166 cm->single_tile_decoding = (!lf->filter_level) ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003167// Read the tile width/height
3168#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003169 if (cm->sb_size == BLOCK_128X128) {
3170 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
3171 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
3172 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003173#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003174 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
3175 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
3176#if CONFIG_EXT_PARTITION
3177 }
3178#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003179
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003180#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003181 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003182#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003183
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003184 cm->tile_width <<= cm->mib_size_log2;
3185 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003186
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003187 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
3188 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003189
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003190 // Get the number of tiles
3191 cm->tile_cols = 1;
3192 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003193
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003194 cm->tile_rows = 1;
3195 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003196
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003197 if (cm->tile_cols * cm->tile_rows > 1) {
3198 // Read the number of bytes used to store tile size
3199 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
3200 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
3201 }
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003202
3203#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003204 cm->dependent_horz_tiles = 0;
3205#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003206 } else {
3207#endif // CONFIG_EXT_TILE
3208 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
3209 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003210
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003211 // columns
3212 max_ones = max_log2_tile_cols - min_log2_tile_cols;
3213 cm->log2_tile_cols = min_log2_tile_cols;
3214 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003215
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003216 if (cm->log2_tile_cols > 6)
3217 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3218 "Invalid number of tile columns");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003219
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003220 // rows
3221 cm->log2_tile_rows = aom_rb_read_bit(rb);
3222 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003223#if CONFIG_DEPENDENT_HORZTILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003224 if (cm->log2_tile_rows != 0)
3225 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
3226 else
3227 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003228#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003229#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003230 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003231#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003232
Rupert Swarbrick9a3640d2017-09-01 13:54:41 +01003233 cm->tile_width = get_tile_size(cm->mi_cols, cm->log2_tile_cols);
3234 cm->tile_height = get_tile_size(cm->mi_rows, cm->log2_tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003235
Rupert Swarbrick940942b2017-09-01 12:05:55 +01003236 const int max_cols = (cm->mi_cols + cm->tile_width - 1) / cm->tile_width;
3237 const int max_rows = (cm->mi_rows + cm->tile_height - 1) / cm->tile_height;
3238
3239 cm->tile_cols = AOMMIN(1 << cm->log2_tile_cols, max_cols);
3240 cm->tile_rows = AOMMIN(1 << cm->log2_tile_rows, max_rows);
3241
Thomas Daviesb25ba502017-07-18 10:18:24 +01003242 // tile size magnitude
3243 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003244#if CONFIG_EXT_TILE
3245 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003246#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00003247
Thomas Davies80188d12016-10-26 16:08:35 -07003248 // Store an index to the location of the tile group information
3249 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01003250 read_tile_group_range(pbi, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003251}
3252
Yaowu Xu4ff59b52017-04-24 12:41:56 -07003253static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003254 switch (sz) {
3255 case 1: return src[0];
3256 case 2: return mem_get_le16(src);
3257 case 3: return mem_get_le24(src);
3258 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07003259 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003260 }
3261}
3262
3263#if CONFIG_EXT_TILE
3264// Reads the next tile returning its size and adjusting '*data' accordingly
3265// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003266static void get_ls_tile_buffer(
3267 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
3268 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
3269 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
3270 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003271 size_t size;
3272
3273 size_t copy_size = 0;
3274 const uint8_t *copy_data = NULL;
3275
3276 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003277 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003278 "Truncated packet or corrupt tile length");
3279 if (decrypt_cb) {
3280 uint8_t be_data[4];
3281 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3282
3283 // Only read number of bytes in cm->tile_size_bytes.
3284 size = mem_get_varsize(be_data, tile_size_bytes);
3285 } else {
3286 size = mem_get_varsize(*data, tile_size_bytes);
3287 }
3288
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003289 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
3290 // mode.
3291 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003292 // The remaining bits in the top byte signal the row offset
3293 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
3294
3295 // Currently, only use tiles in same column as reference tiles.
3296 copy_data = tile_buffers[row - offset][col].data;
3297 copy_size = tile_buffers[row - offset][col].size;
3298 size = 0;
3299 }
3300
3301 *data += tile_size_bytes;
3302
3303 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003304 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003305 "Truncated packet or corrupt tile size");
3306
3307 if (size > 0) {
3308 tile_buffers[row][col].data = *data;
3309 tile_buffers[row][col].size = size;
3310 } else {
3311 tile_buffers[row][col].data = copy_data;
3312 tile_buffers[row][col].size = copy_size;
3313 }
3314
3315 *data += size;
3316
3317 tile_buffers[row][col].raw_data_end = *data;
3318}
3319
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003320static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07003321 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003322 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003323 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003324 const int tile_cols = cm->tile_cols;
3325 const int tile_rows = cm->tile_rows;
3326 const int have_tiles = tile_cols * tile_rows > 1;
3327
3328 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07003329 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003330 tile_buffers[0][0].data = data;
3331 tile_buffers[0][0].size = tile_size;
3332 tile_buffers[0][0].raw_data_end = NULL;
3333 } else {
3334 // We locate only the tile buffers that are required, which are the ones
3335 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
3336 // need the last (bottom right) tile buffer, as we need to know where the
3337 // end of the compressed frame buffer is for proper superframe decoding.
3338
3339 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
3340 const uint8_t *const data_start = data;
3341
Yaowu Xuf883b422016-08-30 14:01:10 -07003342 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003343 const int single_row = pbi->dec_tile_row >= 0;
3344 const int tile_rows_start = single_row ? dec_tile_row : 0;
3345 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003346 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003347 const int single_col = pbi->dec_tile_col >= 0;
3348 const int tile_cols_start = single_col ? dec_tile_col : 0;
3349 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3350
3351 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
3352 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003353 const int tile_copy_mode =
3354 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
3355 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003356 size_t tile_col_size;
3357 int r, c;
3358
3359 // Read tile column sizes for all columns (we need the last tile buffer)
3360 for (c = 0; c < tile_cols; ++c) {
3361 const int is_last = c == tile_cols - 1;
3362 if (!is_last) {
3363 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
3364 data += tile_col_size_bytes;
3365 tile_col_data_end[c] = data + tile_col_size;
3366 } else {
3367 tile_col_size = data_end - data;
3368 tile_col_data_end[c] = data_end;
3369 }
3370 data += tile_col_size;
3371 }
3372
3373 data = data_start;
3374
3375 // Read the required tile sizes.
3376 for (c = tile_cols_start; c < tile_cols_end; ++c) {
3377 const int is_last = c == tile_cols - 1;
3378
3379 if (c > 0) data = tile_col_data_end[c - 1];
3380
3381 if (!is_last) data += tile_col_size_bytes;
3382
3383 // Get the whole of the last column, otherwise stop at the required tile.
3384 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
3385 tile_buffers[r][c].col = c;
3386
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003387 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3388 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
3389 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003390 }
3391 }
3392
3393 // If we have not read the last column, then read it to get the last tile.
3394 if (tile_cols_end != tile_cols) {
3395 c = tile_cols - 1;
3396
3397 data = tile_col_data_end[c - 1];
3398
3399 for (r = 0; r < tile_rows; ++r) {
3400 tile_buffers[r][c].col = c;
3401
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003402 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3403 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
3404 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003405 }
3406 }
3407 }
3408}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003409#endif // CONFIG_EXT_TILE
3410
Yaowu Xuc27fc142016-08-22 16:08:15 -07003411// Reads the next tile returning its size and adjusting '*data' accordingly
3412// based on 'is_last'.
3413static void get_tile_buffer(const uint8_t *const data_end,
3414 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07003415 struct aom_internal_error_info *error_info,
3416 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003417 void *decrypt_state, TileBufferDec *const buf) {
3418 size_t size;
3419
3420 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08003421 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003422 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003423 "Truncated packet or corrupt tile length");
3424
3425 if (decrypt_cb) {
3426 uint8_t be_data[4];
3427 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3428 size = mem_get_varsize(be_data, tile_size_bytes);
3429 } else {
3430 size = mem_get_varsize(*data, tile_size_bytes);
3431 }
3432 *data += tile_size_bytes;
3433
3434 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003435 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003436 "Truncated packet or corrupt tile size");
3437 } else {
3438 size = data_end - *data;
3439 }
3440
3441 buf->data = *data;
3442 buf->size = size;
3443
3444 *data += size;
3445}
3446
3447static void get_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07003448 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003449 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003450 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003451 int r, c;
3452 const int tile_cols = cm->tile_cols;
3453 const int tile_rows = cm->tile_rows;
3454 int tc = 0;
3455 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003456 struct aom_read_bit_buffer rb_tg_hdr;
3457 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
James Zern6efba482017-04-20 20:53:49 -07003458 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003459 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Fangwen Fu73126c02017-02-08 22:37:47 -08003460#if CONFIG_DEPENDENT_HORZTILES
3461 int tile_group_start_col = 0;
3462 int tile_group_start_row = 0;
3463#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003464
3465 for (r = 0; r < tile_rows; ++r) {
3466 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07003467 TileBufferDec *const buf = &tile_buffers[r][c];
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003468 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07003469 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003470
Rupert Swarbrickcd757392017-09-01 13:57:53 +01003471 if (data + hdr_offset >= data_end)
3472 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3473 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07003474 buf->col = c;
3475 if (hdr_offset) {
3476 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
3477 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01003478 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08003479#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01003480 tile_group_start_row = r;
3481 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08003482#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003483 }
3484 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
3485 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003486 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
3487 &pbi->common.error, &data, pbi->decrypt_cb,
3488 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08003489#if CONFIG_DEPENDENT_HORZTILES
3490 cm->tile_group_start_row[r][c] = tile_group_start_row;
3491 cm->tile_group_start_col[r][c] = tile_group_start_col;
3492#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003493 }
3494 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003495}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003496
Yushin Cho77bba8d2016-11-04 16:36:56 -07003497#if CONFIG_PVQ
Yushin Cho70669122016-12-08 09:53:14 -10003498static void daala_dec_init(AV1_COMMON *const cm, daala_dec_ctx *daala_dec,
Nathan E. Eggeab083972016-12-28 15:31:46 -05003499 aom_reader *r) {
3500 daala_dec->r = r;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003501
Yushin Cho70669122016-12-08 09:53:14 -10003502 // TODO(yushin) : activity masking info needs be signaled by a bitstream
3503 daala_dec->use_activity_masking = AV1_PVQ_ENABLE_ACTIVITY_MASKING;
3504
3505 if (daala_dec->use_activity_masking)
3506 daala_dec->qm = OD_HVS_QM;
3507 else
3508 daala_dec->qm = OD_FLAT_QM;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003509
3510 od_init_qm(daala_dec->state.qm, daala_dec->state.qm_inv,
3511 daala_dec->qm == OD_HVS_QM ? OD_QM8_Q4_HVS : OD_QM8_Q4_FLAT);
Yushin Cho70669122016-12-08 09:53:14 -10003512
3513 if (daala_dec->use_activity_masking) {
3514 int pli;
3515 int use_masking = daala_dec->use_activity_masking;
3516 int segment_id = 0;
3517 int qindex = av1_get_qindex(&cm->seg, segment_id, cm->base_qindex);
3518
3519 for (pli = 0; pli < MAX_MB_PLANE; pli++) {
3520 int i;
3521 int q;
3522
3523 q = qindex;
3524 if (q <= OD_DEFAULT_QMS[use_masking][0][pli].interp_q << OD_COEFF_SHIFT) {
3525 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3526 &OD_DEFAULT_QMS[use_masking][0][pli], NULL);
3527 } else {
3528 i = 0;
3529 while (OD_DEFAULT_QMS[use_masking][i + 1][pli].qm_q4 != NULL &&
3530 q > OD_DEFAULT_QMS[use_masking][i + 1][pli].interp_q
3531 << OD_COEFF_SHIFT) {
3532 i++;
3533 }
3534 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3535 &OD_DEFAULT_QMS[use_masking][i][pli],
3536 &OD_DEFAULT_QMS[use_masking][i + 1][pli]);
3537 }
3538 }
3539 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07003540}
Yushin Cho70669122016-12-08 09:53:14 -10003541#endif // #if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -07003542
David Barker5c06a642017-08-18 13:18:16 +01003543#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003544static void dec_setup_across_tile_boundary_info(
3545 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02003546 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
3547 tile_info->mi_col_start >= tile_info->mi_col_end)
3548 return;
3549
David Barker5c06a642017-08-18 13:18:16 +01003550 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07003551 av1_setup_across_tile_boundary_info(cm, tile_info);
3552 }
3553}
David Barker5c06a642017-08-18 13:18:16 +01003554#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003555
Yaowu Xuf883b422016-08-30 14:01:10 -07003556static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003557 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003558 AV1_COMMON *const cm = &pbi->common;
3559 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003560 const int tile_cols = cm->tile_cols;
3561 const int tile_rows = cm->tile_rows;
3562 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07003563 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003564#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003565 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003566 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003567 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003568 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003569#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003570 int tile_rows_start;
3571 int tile_rows_end;
3572 int tile_cols_start;
3573 int tile_cols_end;
3574 int inv_col_order;
3575 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003576 int tile_row, tile_col;
3577
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003578#if CONFIG_EXT_TILE
3579 if (cm->large_scale_tile) {
3580 tile_rows_start = single_row ? dec_tile_row : 0;
3581 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
3582 tile_cols_start = single_col ? dec_tile_col : 0;
3583 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3584 inv_col_order = pbi->inv_tile_order && !single_col;
3585 inv_row_order = pbi->inv_tile_order && !single_row;
3586 } else {
3587#endif // CONFIG_EXT_TILE
3588 tile_rows_start = 0;
3589 tile_rows_end = tile_rows;
3590 tile_cols_start = 0;
3591 tile_cols_end = tile_cols;
3592 inv_col_order = pbi->inv_tile_order;
3593 inv_row_order = pbi->inv_tile_order;
3594#if CONFIG_EXT_TILE
3595 }
3596#endif // CONFIG_EXT_TILE
3597
Yaowu Xuc27fc142016-08-22 16:08:15 -07003598 if (cm->lf.filter_level && !cm->skip_loop_filter &&
3599 pbi->lf_worker.data1 == NULL) {
3600 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07003601 aom_memalign(32, sizeof(LFWorkerData)));
3602 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003603 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003604 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003605 "Loop filter thread creation failed");
3606 }
3607 }
3608
3609 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3610 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3611 // Be sure to sync as we might be resuming after a failed frame decode.
3612 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003613 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
3614 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003615 }
3616
3617 assert(tile_rows <= MAX_TILE_ROWS);
3618 assert(tile_cols <= MAX_TILE_COLS);
3619
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003620#if CONFIG_EXT_TILE
3621 if (cm->large_scale_tile)
3622 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
3623 else
3624#endif // CONFIG_EXT_TILE
3625 get_tile_buffers(pbi, data, data_end, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003626
3627 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003628 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003629 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003630 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003631 pbi->allocated_tiles = n_tiles;
3632 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003633#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003634 if (pbi->acct_enabled) {
3635 aom_accounting_reset(&pbi->accounting);
3636 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003637#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003638 // Load all tile information into tile_data.
3639 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3640 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3641 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
3642 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
3643
3644 td->cm = cm;
3645 td->xd = pbi->mb;
3646 td->xd.corrupted = 0;
3647 td->xd.counts =
3648 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
3649 ? &cm->counts
3650 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07003651 av1_zero(td->dqcoeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003652#if CONFIG_PVQ
Yaowu Xud6ea71c2016-11-07 10:24:14 -08003653 av1_zero(td->pvq_ref_coeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003654#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003655 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003656 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08003657 &td->bit_reader,
3658#if CONFIG_ANS && ANS_MAX_SYMBOLS
3659 1 << cm->ans_window_size_log2,
3660#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
3661 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07003662#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003663 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003664 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003665 } else {
David Barkerd971f402016-10-25 13:52:07 +01003666 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003667 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003668#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003669 av1_init_macroblockd(cm, &td->xd,
3670#if CONFIG_PVQ
3671 td->pvq_ref_coeff,
3672#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04003673#if CONFIG_CFL
3674 &td->cfl,
3675#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003676 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07003677
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00003678 // Initialise the tile context from the frame context
3679 td->tctx = *cm->fc;
3680 td->xd.tile_ctx = &td->tctx;
Yushin Choc49ef3a2017-03-13 17:27:25 -07003681
3682#if CONFIG_PVQ
3683 daala_dec_init(cm, &td->xd.daala_dec, &td->bit_reader);
3684 td->xd.daala_dec.state.adapt = &td->tctx.pvq_context;
3685#endif
3686
Yaowu Xuc27fc142016-08-22 16:08:15 -07003687 td->xd.plane[0].color_index_map = td->color_index_map[0];
3688 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07003689#if CONFIG_MRC_TX
3690 td->xd.mrc_mask = td->mrc_mask;
3691#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003692 }
3693 }
3694
3695 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3696 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
3697 int mi_row = 0;
3698 TileInfo tile_info;
3699
Yaowu Xuf883b422016-08-30 14:01:10 -07003700 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003701
3702 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3703 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
3704 TileData *const td = pbi->tile_data + tile_cols * row + col;
Michael Bebenita6048d052016-08-25 14:40:54 -07003705#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003706 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003707 td->bit_reader.accounting->last_tell_frac =
3708 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003709 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003710#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003711
Yaowu Xuf883b422016-08-30 14:01:10 -07003712 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003713
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003714#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003715 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
3716 if (!cm->dependent_horz_tiles || tile_row == 0 ||
3717 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003718 av1_zero_above_context(cm, tile_info.mi_col_start,
3719 tile_info.mi_col_end);
3720 }
3721#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003722 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003723#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003724#if CONFIG_LOOP_RESTORATION
3725 for (int p = 0; p < MAX_MB_PLANE; ++p) {
3726 set_default_wiener(td->xd.wiener_info + p);
3727 set_default_sgrproj(td->xd.sgrproj_info + p);
3728 }
3729#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003730
David Barker5c06a642017-08-18 13:18:16 +01003731#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003732 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01003733#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07003734
Yaowu Xuc27fc142016-08-22 16:08:15 -07003735 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
3736 mi_row += cm->mib_size) {
3737 int mi_col;
3738
Yaowu Xuf883b422016-08-30 14:01:10 -07003739 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003740
3741 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
3742 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003743#if CONFIG_NCOBMC_ADAPT_WEIGHT
3744 alloc_ncobmc_pred_buffer(&td->xd);
3745 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
3746#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003747 decode_partition(pbi, &td->xd,
3748#if CONFIG_SUPERTX
3749 0,
3750#endif // CONFIG_SUPERTX
Jingning Hanea10ad42017-07-20 11:19:08 -07003751 mi_row, mi_col, &td->bit_reader, cm->sb_size);
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07003752#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08003753 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
3754 cm->sb_size);
3755#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003756#if CONFIG_NCOBMC_ADAPT_WEIGHT
3757 free_ncobmc_pred_buffer(&td->xd);
3758#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003759 }
Angie Chiangd0916d92017-03-10 17:54:18 -08003760 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003761 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07003762 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003763 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003764 }
3765 }
3766
3767 assert(mi_row > 0);
3768
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003769// when Parallel deblocking is enabled, deblocking should not
3770// be interleaved with decoding. Instead, deblocking should be done
3771// after the entire frame is decoded.
Jingning Han52ece882017-04-07 14:58:25 -07003772#if !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING && !CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07003773 // Loopfilter one tile row.
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003774 // Note: If out-of-order tile decoding is used(for example, inv_row_order
3775 // = 1), the loopfiltering has be done after all tile rows are decoded.
3776 if (!inv_row_order && cm->lf.filter_level && !cm->skip_loop_filter) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003777 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003778 const int lf_start = AOMMAX(0, tile_info.mi_row_start - cm->mib_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003779 const int lf_end = tile_info.mi_row_end - cm->mib_size;
3780
3781 // Delay the loopfilter if the first tile row is only
3782 // a single superblock high.
3783 if (lf_end <= 0) continue;
3784
3785 // Decoding has completed. Finish up the loop filter in this thread.
3786 if (tile_info.mi_row_end >= cm->mi_rows) continue;
3787
3788 winterface->sync(&pbi->lf_worker);
3789 lf_data->start = lf_start;
3790 lf_data->stop = lf_end;
3791 if (pbi->max_threads > 1) {
3792 winterface->launch(&pbi->lf_worker);
3793 } else {
3794 winterface->execute(&pbi->lf_worker);
3795 }
3796 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003797#endif // !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003798
3799 // After loopfiltering, the last 7 row pixels in each superblock row may
3800 // still be changed by the longest loopfilter of the next superblock row.
3801 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003802 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003803 }
3804
Jingning Han52ece882017-04-07 14:58:25 -07003805#if CONFIG_VAR_TX || CONFIG_CB4X4
Cheng Chene94df5c2017-07-19 17:25:33 -07003806// Loopfilter the whole frame.
Cheng Chen13fc8192017-08-19 11:49:28 -07003807#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07003808 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07003809 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07003810 cm->lf.filter_level[0], cm->lf.filter_level[1], 0, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07003811 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07003812 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07003813 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07003814 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07003815 }
3816#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003817 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
3818 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07003819#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003820#else
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003821#if CONFIG_PARALLEL_DEBLOCKING
3822 // Loopfilter all rows in the frame in the frame.
3823 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3824 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3825 winterface->sync(&pbi->lf_worker);
3826 lf_data->start = 0;
3827 lf_data->stop = cm->mi_rows;
3828 winterface->execute(&pbi->lf_worker);
3829 }
3830#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003831 // Loopfilter remaining rows in the frame.
3832 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3833 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3834 winterface->sync(&pbi->lf_worker);
3835 lf_data->start = lf_data->stop;
3836 lf_data->stop = cm->mi_rows;
3837 winterface->execute(&pbi->lf_worker);
3838 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003839#endif // CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003840#endif // CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003842 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003843
3844#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003845 if (cm->large_scale_tile) {
3846 if (n_tiles == 1) {
3847#if CONFIG_ANS
3848 return data_end;
3849#else
3850 // Find the end of the single tile buffer
3851 return aom_reader_find_end(&pbi->tile_data->bit_reader);
3852#endif // CONFIG_ANS
3853 } else {
3854 // Return the end of the last tile buffer
3855 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
3856 }
3857 } else {
3858#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003859#if CONFIG_ANS
3860 return data_end;
3861#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003862 {
3863 // Get last tile data.
3864 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003865 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003866 }
3867#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003868#if CONFIG_EXT_TILE
3869 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003870#endif // CONFIG_EXT_TILE
3871}
3872
3873static int tile_worker_hook(TileWorkerData *const tile_data,
3874 const TileInfo *const tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003875 AV1Decoder *const pbi = tile_data->pbi;
3876 const AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003877 int mi_row, mi_col;
3878
3879 if (setjmp(tile_data->error_info.jmp)) {
3880 tile_data->error_info.setjmp = 0;
Angie Chiangd0916d92017-03-10 17:54:18 -08003881 aom_merge_corrupted_flag(&tile_data->xd.corrupted, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003882 return 0;
3883 }
3884
3885 tile_data->error_info.setjmp = 1;
3886 tile_data->xd.error_info = &tile_data->error_info;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003887#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003888 if (!cm->dependent_horz_tiles || tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003889 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
3890 }
3891#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003892 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003893#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003894
3895 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
3896 mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003897 av1_zero_left_context(&tile_data->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003898
3899 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
3900 mi_col += cm->mib_size) {
3901 decode_partition(pbi, &tile_data->xd,
3902#if CONFIG_SUPERTX
3903 0,
3904#endif
Jingning Hanea10ad42017-07-20 11:19:08 -07003905 mi_row, mi_col, &tile_data->bit_reader, cm->sb_size);
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07003906#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08003907 detoken_and_recon_sb(pbi, &tile_data->xd, mi_row, mi_col,
3908 &tile_data->bit_reader, cm->sb_size);
3909#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003910 }
3911 }
3912 return !tile_data->xd.corrupted;
3913}
3914
3915// sorts in descending order
3916static int compare_tile_buffers(const void *a, const void *b) {
3917 const TileBufferDec *const buf1 = (const TileBufferDec *)a;
3918 const TileBufferDec *const buf2 = (const TileBufferDec *)b;
3919 return (int)(buf2->size - buf1->size);
3920}
3921
Yaowu Xuf883b422016-08-30 14:01:10 -07003922static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003923 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003924 AV1_COMMON *const cm = &pbi->common;
3925 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003926 const int tile_cols = cm->tile_cols;
3927 const int tile_rows = cm->tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003928 const int num_workers = AOMMIN(pbi->max_threads & ~1, tile_cols);
clang-format67948d32016-09-07 22:40:40 -07003929 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003930#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003931 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003932 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003933 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003934 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003935#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003936 int tile_rows_start;
3937 int tile_rows_end;
3938 int tile_cols_start;
3939 int tile_cols_end;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003940 int tile_row, tile_col;
3941 int i;
3942
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003943#if CONFIG_EXT_TILE
3944 if (cm->large_scale_tile) {
3945 tile_rows_start = single_row ? dec_tile_row : 0;
3946 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
3947 tile_cols_start = single_col ? dec_tile_col : 0;
3948 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3949 } else {
3950#endif // CONFIG_EXT_TILE
3951 tile_rows_start = 0;
3952 tile_rows_end = tile_rows;
3953 tile_cols_start = 0;
3954 tile_cols_end = tile_cols;
3955#if CONFIG_EXT_TILE
3956 }
3957#endif // CONFIG_EXT_TILE
3958
3959#if !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003960 int final_worker = -1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003961#endif // !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003962
3963 assert(tile_rows <= MAX_TILE_ROWS);
3964 assert(tile_cols <= MAX_TILE_COLS);
3965
3966 assert(tile_cols * tile_rows > 1);
3967
Yaowu Xuc27fc142016-08-22 16:08:15 -07003968 // TODO(jzern): See if we can remove the restriction of passing in max
3969 // threads to the decoder.
3970 if (pbi->num_tile_workers == 0) {
3971 const int num_threads = pbi->max_threads & ~1;
3972 CHECK_MEM_ERROR(cm, pbi->tile_workers,
Yaowu Xuf883b422016-08-30 14:01:10 -07003973 aom_malloc(num_threads * sizeof(*pbi->tile_workers)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003974 // Ensure tile data offsets will be properly aligned. This may fail on
3975 // platforms without DECLARE_ALIGNED().
3976 assert((sizeof(*pbi->tile_worker_data) % 16) == 0);
3977 CHECK_MEM_ERROR(
3978 cm, pbi->tile_worker_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003979 aom_memalign(32, num_threads * sizeof(*pbi->tile_worker_data)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003980 CHECK_MEM_ERROR(cm, pbi->tile_worker_info,
Yaowu Xuf883b422016-08-30 14:01:10 -07003981 aom_malloc(num_threads * sizeof(*pbi->tile_worker_info)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003982 for (i = 0; i < num_threads; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003983 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003984 ++pbi->num_tile_workers;
3985
3986 winterface->init(worker);
3987 if (i < num_threads - 1 && !winterface->reset(worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003988 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003989 "Tile decoder thread creation failed");
3990 }
3991 }
3992 }
3993
3994 // Reset tile decoding hook
3995 for (i = 0; i < num_workers; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003996 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003997 winterface->sync(worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003998 worker->hook = (AVxWorkerHook)tile_worker_hook;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003999 worker->data1 = &pbi->tile_worker_data[i];
4000 worker->data2 = &pbi->tile_worker_info[i];
4001 }
4002
4003 // Initialize thread frame counts.
4004 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4005 for (i = 0; i < num_workers; ++i) {
4006 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004007 av1_zero(twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004008 }
4009 }
4010
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004011// Load tile data into tile_buffers
4012#if CONFIG_EXT_TILE
4013 if (cm->large_scale_tile)
4014 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
4015 else
4016#endif // CONFIG_EXT_TILE
4017 get_tile_buffers(pbi, data, data_end, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004018
4019 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
4020 // Sort the buffers in this tile row based on size in descending order.
4021 qsort(&tile_buffers[tile_row][tile_cols_start],
4022 tile_cols_end - tile_cols_start, sizeof(tile_buffers[0][0]),
4023 compare_tile_buffers);
4024
4025 // Rearrange the tile buffers in this tile row such that per-tile group
4026 // the largest, and presumably the most difficult tile will be decoded in
4027 // the main thread. This should help minimize the number of instances
4028 // where the main thread is waiting for a worker to complete.
4029 {
4030 int group_start;
4031 for (group_start = tile_cols_start; group_start < tile_cols_end;
4032 group_start += num_workers) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004033 const int group_end = AOMMIN(group_start + num_workers, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004034 const TileBufferDec largest = tile_buffers[tile_row][group_start];
4035 memmove(&tile_buffers[tile_row][group_start],
4036 &tile_buffers[tile_row][group_start + 1],
4037 (group_end - group_start - 1) * sizeof(tile_buffers[0][0]));
4038 tile_buffers[tile_row][group_end - 1] = largest;
4039 }
4040 }
4041
4042 for (tile_col = tile_cols_start; tile_col < tile_cols_end;) {
4043 // Launch workers for individual columns
4044 for (i = 0; i < num_workers && tile_col < tile_cols_end;
4045 ++i, ++tile_col) {
4046 TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuf883b422016-08-30 14:01:10 -07004047 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004048 TileWorkerData *const twd = (TileWorkerData *)worker->data1;
4049 TileInfo *const tile_info = (TileInfo *)worker->data2;
4050
4051 twd->pbi = pbi;
4052 twd->xd = pbi->mb;
4053 twd->xd.corrupted = 0;
4054 twd->xd.counts =
4055 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
4056 ? &twd->counts
4057 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07004058 av1_zero(twd->dqcoeff);
4059 av1_tile_init(tile_info, cm, tile_row, buf->col);
4060 av1_tile_init(&twd->xd.tile, cm, tile_row, buf->col);
Yi Luof190a162017-07-13 16:16:56 -07004061
David Barker5c06a642017-08-18 13:18:16 +01004062#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07004063 dec_setup_across_tile_boundary_info(cm, tile_info);
David Barker5c06a642017-08-18 13:18:16 +01004064#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07004065
Yaowu Xuc27fc142016-08-22 16:08:15 -07004066 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08004067 &twd->bit_reader,
4068#if CONFIG_ANS && ANS_MAX_SYMBOLS
4069 1 << cm->ans_window_size_log2,
4070#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
4071 pbi->decrypt_cb, pbi->decrypt_state);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004072 av1_init_macroblockd(cm, &twd->xd,
4073#if CONFIG_PVQ
4074 twd->pvq_ref_coeff,
4075#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04004076#if CONFIG_CFL
4077 &twd->cfl,
4078#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07004079 twd->dqcoeff);
4080#if CONFIG_PVQ
Nathan E. Eggeab083972016-12-28 15:31:46 -05004081 daala_dec_init(cm, &twd->xd.daala_dec, &twd->bit_reader);
Yushin Choc49ef3a2017-03-13 17:27:25 -07004082 twd->xd.daala_dec.state.adapt = &twd->tctx.pvq_context;
Yushin Cho77bba8d2016-11-04 16:36:56 -07004083#endif
Yushin Chod767beb2017-03-24 10:15:47 -07004084 // Initialise the tile context from the frame context
4085 twd->tctx = *cm->fc;
4086 twd->xd.tile_ctx = &twd->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004087 twd->xd.plane[0].color_index_map = twd->color_index_map[0];
4088 twd->xd.plane[1].color_index_map = twd->color_index_map[1];
4089
4090 worker->had_error = 0;
4091 if (i == num_workers - 1 || tile_col == tile_cols_end - 1) {
4092 winterface->execute(worker);
4093 } else {
4094 winterface->launch(worker);
4095 }
4096
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004097#if !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004098 if (tile_row == tile_rows - 1 && buf->col == tile_cols - 1) {
4099 final_worker = i;
4100 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004101#endif // !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004102 }
4103
4104 // Sync all workers
4105 for (; i > 0; --i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004106 AVxWorker *const worker = &pbi->tile_workers[i - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004107 // TODO(jzern): The tile may have specific error data associated with
Yaowu Xuf883b422016-08-30 14:01:10 -07004108 // its aom_internal_error_info which could be propagated to the main
Yaowu Xuc27fc142016-08-22 16:08:15 -07004109 // info in cm. Additionally once the threads have been synced and an
4110 // error is detected, there's no point in continuing to decode tiles.
4111 pbi->mb.corrupted |= !winterface->sync(worker);
4112 }
4113 }
4114 }
4115
4116 // Accumulate thread frame counts.
4117 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4118 for (i = 0; i < num_workers; ++i) {
4119 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08004120 av1_accumulate_frame_counts(&cm->counts, &twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004121 }
4122 }
4123
4124#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004125 if (cm->large_scale_tile) {
4126 // Return the end of the last tile buffer
4127 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
4128 } else {
4129#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004130#if CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004131 return data_end;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004132#else
4133 assert(final_worker != -1);
4134 {
4135 TileWorkerData *const twd =
4136 (TileWorkerData *)pbi->tile_workers[final_worker].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004137 return aom_reader_find_end(&twd->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004138 }
4139#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004140#if CONFIG_EXT_TILE
4141 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004142#endif // CONFIG_EXT_TILE
4143}
4144
4145static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004146 AV1_COMMON *const cm = (AV1_COMMON *)data;
4147 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004148}
4149
Yaowu Xuf883b422016-08-30 14:01:10 -07004150static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004151 struct aom_read_bit_buffer *rb,
4152 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004153 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004154 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004155 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004156 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004157 }
4158
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004159#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004160 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07004161#else
4162 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004163#endif
anorkin76fb1262017-03-22 15:12:12 -07004164#if CONFIG_COLORSPACE_HEADERS
4165 cm->color_space = aom_rb_read_literal(rb, 5);
4166 cm->transfer_function = aom_rb_read_literal(rb, 5);
4167#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004168 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07004169#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004170 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004171 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07004172 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004173 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004174 cm->subsampling_x = aom_rb_read_bit(rb);
4175 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004176 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07004177 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004178 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07004179 if (aom_rb_read_bit(rb))
4180 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004181 "Reserved bit set");
4182 } else {
4183 cm->subsampling_y = cm->subsampling_x = 1;
4184 }
anorkin76fb1262017-03-22 15:12:12 -07004185#if CONFIG_COLORSPACE_HEADERS
4186 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
4187 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
4188 }
4189#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004190 } else {
4191 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
4192 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
4193 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
4194 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07004195 if (aom_rb_read_bit(rb))
4196 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004197 "Reserved bit set");
4198 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004199 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004200 "4:4:4 color not supported in profile 0 or 2");
4201 }
4202 }
4203}
4204
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004205#if CONFIG_REFERENCE_BUFFER
4206void read_sequence_header(SequenceHeader *seq_params) {
4207 /* Placeholder for actually reading from the bitstream */
4208 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
4209 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
4210 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
4211}
4212#endif
4213
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004214#if CONFIG_EXT_INTER
4215static void read_compound_tools(AV1_COMMON *cm,
4216 struct aom_read_bit_buffer *rb) {
4217 (void)cm;
4218 (void)rb;
4219#if CONFIG_INTERINTRA
4220 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
4221 cm->allow_interintra_compound = aom_rb_read_bit(rb);
4222 } else {
4223 cm->allow_interintra_compound = 0;
4224 }
4225#endif // CONFIG_INTERINTRA
4226#if CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
Zoe Liu85b66462017-04-20 14:28:19 -07004227#if CONFIG_COMPOUND_SINGLEREF
4228 if (!frame_is_intra_only(cm)) {
4229#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004230 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07004231#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004232 cm->allow_masked_compound = aom_rb_read_bit(rb);
4233 } else {
4234 cm->allow_masked_compound = 0;
4235 }
4236#endif // CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
4237}
4238#endif // CONFIG_EXT_INTER
4239
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004240#if CONFIG_VAR_REFS
4241static void check_valid_ref_frames(AV1_COMMON *cm) {
4242 MV_REFERENCE_FRAME ref_frame;
4243 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
4244 // reference frames: Current encoder invalid ALTREF when ALTREF
4245 // is the same as LAST, but invalid all the other references
4246 // when they are the same as ALTREF.
4247 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4248 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
4249
4250 if (ref_buf->idx != INVALID_IDX) {
4251 ref_buf->is_valid = 1;
4252
4253 MV_REFERENCE_FRAME ref;
4254 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
4255 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
4256 if (buf->is_valid && buf->idx == ref_buf->idx) {
4257 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
4258 ref_buf->is_valid = 0;
4259 break;
4260 } else {
4261 buf->is_valid = 0;
4262 }
4263 }
4264 }
4265 } else {
4266 ref_buf->is_valid = 0;
4267 }
4268 }
4269}
4270#endif // CONFIG_VAR_REFS
4271
Sarah Parker3e579a62017-08-23 16:53:20 -07004272#if CONFIG_GLOBAL_MOTION
4273static int read_global_motion_params(WarpedMotionParams *params,
4274 WarpedMotionParams *ref_params,
4275 struct aom_read_bit_buffer *rb,
4276 int allow_hp) {
4277 TransformationType type = aom_rb_read_bit(rb);
4278 if (type != IDENTITY) {
4279#if GLOBAL_TRANS_TYPES > 4
4280 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
4281#else
4282 if (aom_rb_read_bit(rb))
4283 type = ROTZOOM;
4284 else
4285 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
4286#endif // GLOBAL_TRANS_TYPES > 4
4287 }
4288
4289 int trans_bits;
4290 int trans_dec_factor;
4291 int trans_prec_diff;
4292 set_default_warp_params(params);
4293 params->wmtype = type;
4294 switch (type) {
4295 case HOMOGRAPHY:
4296 case HORTRAPEZOID:
4297 case VERTRAPEZOID:
4298 if (type != HORTRAPEZOID)
4299 params->wmmat[6] =
4300 aom_rb_read_signed_primitive_refsubexpfin(
4301 rb, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4302 (ref_params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF)) *
4303 GM_ROW3HOMO_DECODE_FACTOR;
4304 if (type != VERTRAPEZOID)
4305 params->wmmat[7] =
4306 aom_rb_read_signed_primitive_refsubexpfin(
4307 rb, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4308 (ref_params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF)) *
4309 GM_ROW3HOMO_DECODE_FACTOR;
4310 case AFFINE:
4311 case ROTZOOM:
4312 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
4313 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4314 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
4315 (1 << GM_ALPHA_PREC_BITS)) *
4316 GM_ALPHA_DECODE_FACTOR +
4317 (1 << WARPEDMODEL_PREC_BITS);
4318 if (type != VERTRAPEZOID)
4319 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
4320 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4321 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
4322 GM_ALPHA_DECODE_FACTOR;
4323 if (type >= AFFINE) {
4324 if (type != HORTRAPEZOID)
4325 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
4326 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4327 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
4328 GM_ALPHA_DECODE_FACTOR;
4329 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
4330 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4331 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4332 (1 << GM_ALPHA_PREC_BITS)) *
4333 GM_ALPHA_DECODE_FACTOR +
4334 (1 << WARPEDMODEL_PREC_BITS);
4335 } else {
4336 params->wmmat[4] = -params->wmmat[3];
4337 params->wmmat[5] = params->wmmat[2];
4338 }
4339 // fallthrough intended
4340 case TRANSLATION:
4341 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
4342 : GM_ABS_TRANS_BITS;
4343 trans_dec_factor = (type == TRANSLATION)
4344 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
4345 : GM_TRANS_DECODE_FACTOR;
4346 trans_prec_diff = (type == TRANSLATION)
4347 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
4348 : GM_TRANS_PREC_DIFF;
4349 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
4350 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
4351 (ref_params->wmmat[0] >> trans_prec_diff)) *
4352 trans_dec_factor;
4353 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
4354 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
4355 (ref_params->wmmat[1] >> trans_prec_diff)) *
4356 trans_dec_factor;
4357 case IDENTITY: break;
4358 default: assert(0);
4359 }
4360 if (params->wmtype <= AFFINE) {
4361 int good_shear_params = get_shear_params(params);
4362 if (!good_shear_params) return 0;
4363 }
4364
4365 return 1;
4366}
4367
4368static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
4369 int frame;
4370 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
4371 int good_params = read_global_motion_params(
4372 &cm->global_motion[frame], &cm->prev_frame->global_motion[frame], rb,
4373 cm->allow_high_precision_mv);
4374 if (!good_params)
4375 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4376 "Invalid shear parameters for global motion.");
4377
4378 // TODO(sarahparker, debargha): The logic in the commented out code below
4379 // does not work currently and causes mismatches when resize is on. Fix it
4380 // before turning the optimization back on.
4381 /*
4382 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
4383 if (cm->width == ref_buf->y_crop_width &&
4384 cm->height == ref_buf->y_crop_height) {
4385 read_global_motion_params(&cm->global_motion[frame],
4386 &cm->prev_frame->global_motion[frame], rb,
4387 cm->allow_high_precision_mv);
4388 } else {
4389 set_default_warp_params(&cm->global_motion[frame]);
4390 }
4391 */
4392 /*
4393 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
4394 frame, cm->current_video_frame, cm->show_frame,
4395 cm->global_motion[frame].wmmat[0],
4396 cm->global_motion[frame].wmmat[1],
4397 cm->global_motion[frame].wmmat[2],
4398 cm->global_motion[frame].wmmat[3]);
4399 */
4400 }
4401 memcpy(cm->cur_frame->global_motion, cm->global_motion,
4402 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
4403}
4404#endif // CONFIG_GLOBAL_MOTION
4405
Yaowu Xuf883b422016-08-30 14:01:10 -07004406static size_t read_uncompressed_header(AV1Decoder *pbi,
4407 struct aom_read_bit_buffer *rb) {
4408 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004409 MACROBLOCKD *const xd = &pbi->mb;
4410 BufferPool *const pool = cm->buffer_pool;
4411 RefCntBuffer *const frame_bufs = pool->frame_bufs;
4412 int i, mask, ref_index = 0;
4413 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004414
4415#if CONFIG_REFERENCE_BUFFER
4416 /* TODO: Move outside frame loop or inside key-frame branch */
4417 read_sequence_header(&pbi->seq_params);
4418#endif
4419
Yaowu Xuc27fc142016-08-22 16:08:15 -07004420 cm->last_frame_type = cm->frame_type;
4421 cm->last_intra_only = cm->intra_only;
4422
4423#if CONFIG_EXT_REFS
4424 // NOTE: By default all coded frames to be used as a reference
4425 cm->is_reference_frame = 1;
4426#endif // CONFIG_EXT_REFS
4427
Yaowu Xuf883b422016-08-30 14:01:10 -07004428 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
4429 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004430 "Invalid frame marker");
4431
Yaowu Xuf883b422016-08-30 14:01:10 -07004432 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02004433
4434 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
4435 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
4436
4437 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07004438 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004439 "Unsupported bitstream profile");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004440
Yunqing Wangc2502b52017-07-19 17:44:18 -07004441#if CONFIG_EXT_TILE
4442 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
4443#if CONFIG_REFERENCE_BUFFER
4444 if (cm->large_scale_tile) pbi->seq_params.frame_id_numbers_present_flag = 0;
4445#endif // CONFIG_REFERENCE_BUFFER
4446#endif // CONFIG_EXT_TILE
4447
Yaowu Xuf883b422016-08-30 14:01:10 -07004448 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004449
4450 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08004451 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004452 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
4453 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08004454#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004455 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004456 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4457 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004458 /* Compare display_frame_id with ref_frame_id and check valid for
4459 * referencing */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004460 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
4461 cm->valid_for_referencing[existing_frame_idx] == 0)
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004462 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4463 "Reference buffer frame ID mismatch");
4464 }
4465#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004466 lock_buffer_pool(pool);
4467 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
4468 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07004469 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004470 "Buffer %d does not contain a decoded frame",
4471 frame_to_show);
4472 }
4473 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
4474 unlock_buffer_pool(pool);
4475
Cheng Chen13fc8192017-08-19 11:49:28 -07004476#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07004477 cm->lf.filter_level[0] = 0;
4478 cm->lf.filter_level[1] = 0;
4479#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004480 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07004481#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004482 cm->show_frame = 1;
4483 pbi->refresh_frame_flags = 0;
4484
4485 if (cm->frame_parallel_decode) {
4486 for (i = 0; i < REF_FRAMES; ++i)
4487 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
4488 }
4489
4490 return 0;
4491 }
4492
Yaowu Xuf883b422016-08-30 14:01:10 -07004493 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
4494 cm->show_frame = aom_rb_read_bit(rb);
4495 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004496#if CONFIG_REFERENCE_BUFFER
4497 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004498 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4499 int diff_len = pbi->seq_params.delta_frame_id_length_minus2 + 2;
4500 int prev_frame_id = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004501 if (cm->frame_type != KEY_FRAME) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004502 prev_frame_id = cm->current_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004503 }
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004504 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004505
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004506 if (cm->frame_type != KEY_FRAME) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004507 int diff_frame_id;
4508 if (cm->current_frame_id > prev_frame_id) {
4509 diff_frame_id = cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004510 } else {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004511 diff_frame_id =
4512 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004513 }
4514 /* Check current_frame_id for conformance */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004515 if (prev_frame_id == cm->current_frame_id ||
4516 diff_frame_id >= (1 << (frame_id_length - 1))) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004517 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4518 "Invalid value of current_frame_id");
4519 }
4520 }
4521 /* Check if some frames need to be marked as not valid for referencing */
4522 for (i = 0; i < REF_FRAMES; i++) {
4523 if (cm->frame_type == KEY_FRAME) {
4524 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004525 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004526 if (cm->ref_frame_id[i] > cm->current_frame_id ||
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004527 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004528 cm->valid_for_referencing[i] = 0;
4529 } else {
4530 if (cm->ref_frame_id[i] > cm->current_frame_id &&
4531 cm->ref_frame_id[i] <
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004532 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004533 cm->valid_for_referencing[i] = 0;
4534 }
4535 }
4536 }
4537#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004538 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004539 if (!av1_read_sync_code(rb))
4540 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004541 "Invalid frame sync code");
4542
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004543 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004544 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
4545
4546 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4547 cm->frame_refs[i].idx = INVALID_IDX;
4548 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004549#if CONFIG_VAR_REFS
4550 cm->frame_refs[i].is_valid = 0;
4551#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004552 }
4553
4554 setup_frame_size(cm, rb);
4555 if (pbi->need_resync) {
4556 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4557 pbi->need_resync = 0;
4558 }
Alex Converseeb780e72016-12-13 12:46:41 -08004559#if CONFIG_ANS && ANS_MAX_SYMBOLS
4560 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4561#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07004562 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Fangwen Fu930c51c2017-05-07 20:39:17 -07004563#if CONFIG_TEMPMV_SIGNALING
4564 cm->use_prev_frame_mvs = 0;
4565#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004566 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004567 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
hui su24f7b072016-10-12 11:36:24 -07004568 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07004569#if CONFIG_TEMPMV_SIGNALING
4570 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
4571#endif
4572#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
4573// The only way to reset all frame contexts to their default values is with a
4574// keyframe.
4575#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004576 if (cm->error_resilient_mode) {
4577 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
4578 } else {
4579 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004580 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004581 ? RESET_FRAME_CONTEXT_ALL
4582 : RESET_FRAME_CONTEXT_CURRENT;
4583 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004584 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004585 ? RESET_FRAME_CONTEXT_CURRENT
4586 : RESET_FRAME_CONTEXT_NONE;
4587 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07004588 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004589 ? RESET_FRAME_CONTEXT_ALL
4590 : RESET_FRAME_CONTEXT_CURRENT;
4591 }
4592 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07004593#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004594
4595 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004596 if (!av1_read_sync_code(rb))
4597 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004598 "Invalid frame sync code");
4599
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004600 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004601
Yaowu Xuf883b422016-08-30 14:01:10 -07004602 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004603 setup_frame_size(cm, rb);
4604 if (pbi->need_resync) {
4605 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4606 pbi->need_resync = 0;
4607 }
Alex Converseeb780e72016-12-13 12:46:41 -08004608#if CONFIG_ANS && ANS_MAX_SYMBOLS
4609 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4610#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004611 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Yaowu Xuf883b422016-08-30 14:01:10 -07004612 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004613
4614#if CONFIG_EXT_REFS
4615 if (!pbi->refresh_frame_flags) {
4616 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
4617 // will not be used as a reference
4618 cm->is_reference_frame = 0;
4619 }
4620#endif // CONFIG_EXT_REFS
4621
4622 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004623 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004624 const int idx = cm->ref_frame_map[ref];
4625 RefBuffer *const ref_frame = &cm->frame_refs[i];
4626 ref_frame->idx = idx;
4627 ref_frame->buf = &frame_bufs[idx].buf;
Yaowu Xuf883b422016-08-30 14:01:10 -07004628 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004629#if CONFIG_REFERENCE_BUFFER
4630 if (pbi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004631 int frame_id_length = pbi->seq_params.frame_id_length_minus7 + 7;
4632 int diff_len = pbi->seq_params.delta_frame_id_length_minus2 + 2;
4633 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
4634 int ref_frame_id =
4635 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
4636 (1 << frame_id_length)) %
4637 (1 << frame_id_length));
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004638 /* Compare values derived from delta_frame_id_minus1 and
4639 * refresh_frame_flags. Also, check valid for referencing */
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004640 if (ref_frame_id != cm->ref_frame_id[ref] ||
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004641 cm->valid_for_referencing[ref] == 0)
4642 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4643 "Reference buffer frame ID mismatch");
4644 }
4645#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004646 }
4647
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004648#if CONFIG_VAR_REFS
4649 check_valid_ref_frames(cm);
4650#endif // CONFIG_VAR_REFS
4651
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004652#if CONFIG_FRAME_SIZE
4653 if (cm->error_resilient_mode == 0) {
4654 setup_frame_size_with_refs(cm, rb);
4655 } else {
4656 setup_frame_size(cm, rb);
4657 }
4658#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004659 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004660#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004661
Yaowu Xuf883b422016-08-30 14:01:10 -07004662 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Angie Chiang5678ad92016-11-21 09:38:40 -08004663 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08004664#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01004665 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08004666 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01004667 else
4668 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08004669#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004670 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4671 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004672#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07004673 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004674 &ref_buf->sf, ref_buf->buf->y_crop_width,
4675 ref_buf->buf->y_crop_height, cm->width, cm->height,
4676 cm->use_highbitdepth);
4677#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004678 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004679 &ref_buf->sf, ref_buf->buf->y_crop_width,
4680 ref_buf->buf->y_crop_height, cm->width, cm->height);
4681#endif
4682 }
4683 }
4684 }
Jingning Hanc723b342017-08-24 11:19:46 -07004685
4686#if CONFIG_MFMV
4687 if (cm->show_frame == 0) {
4688 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
4689 } else {
4690 cm->frame_offset = cm->current_video_frame;
4691 }
4692#endif
4693
Fangwen Fu8d164de2016-12-14 13:40:54 -08004694#if CONFIG_TEMPMV_SIGNALING
4695 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
4696#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004697
4698#if CONFIG_REFERENCE_BUFFER
4699 if (pbi->seq_params.frame_id_numbers_present_flag) {
4700 /* If bitmask is set, update reference frame id values and
4701 mark frames as valid for reference */
4702 int refresh_frame_flags =
4703 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
4704 for (i = 0; i < REF_FRAMES; i++) {
4705 if ((refresh_frame_flags >> i) & 1) {
4706 cm->ref_frame_id[i] = cm->current_frame_id;
4707 cm->valid_for_referencing[i] = 1;
4708 }
4709 }
4710 }
4711#endif
4712
Yaowu Xuc27fc142016-08-22 16:08:15 -07004713 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004714 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07004715#if CONFIG_COLORSPACE_HEADERS
4716 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
4717 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
4718#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004719 get_frame_new_buffer(cm)->color_range = cm->color_range;
4720 get_frame_new_buffer(cm)->render_width = cm->render_width;
4721 get_frame_new_buffer(cm)->render_height = cm->render_height;
4722
4723 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004724 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004725 "Keyframe / intra-only frame required to reset decoder"
4726 " state");
4727 }
4728
4729 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004730 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004731 ? REFRESH_FRAME_CONTEXT_FORWARD
4732 : REFRESH_FRAME_CONTEXT_BACKWARD;
4733 } else {
4734 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
4735 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07004736#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07004737 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07004738 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07004739 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07004740#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004741
4742 // Generate next_ref_frame_map.
4743 lock_buffer_pool(pool);
4744 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
4745 if (mask & 1) {
4746 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
4747 ++frame_bufs[cm->new_fb_idx].ref_count;
4748 } else {
4749 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
4750 }
4751 // Current thread holds the reference frame.
4752 if (cm->ref_frame_map[ref_index] >= 0)
4753 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
4754 ++ref_index;
4755 }
4756
4757 for (; ref_index < REF_FRAMES; ++ref_index) {
4758 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
4759
4760 // Current thread holds the reference frame.
4761 if (cm->ref_frame_map[ref_index] >= 0)
4762 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
4763 }
4764 unlock_buffer_pool(pool);
4765 pbi->hold_ref_buf = 1;
4766
4767 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07004768 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004769
4770#if CONFIG_EXT_PARTITION
Yaowu Xuf883b422016-08-30 14:01:10 -07004771 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004772#else
4773 set_sb_size(cm, BLOCK_64X64);
4774#endif // CONFIG_EXT_PARTITION
4775
4776 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004777 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004778 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004779
hui su0d103572017-03-01 17:58:01 -08004780#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07004781 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004782 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
4783 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
4784 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
4785 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07004786#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
4787 if (cm->frame_refs[0].idx <= 0) {
4788 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
4789 }
4790#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004791 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07004792#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07004793 }
hui su0d103572017-03-01 17:58:01 -08004794#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004795
4796 setup_segmentation(cm, rb);
4797
Arild Fuldseth07441162016-08-15 15:07:52 +02004798#if CONFIG_DELTA_Q
4799 {
4800 struct segmentation *const seg = &cm->seg;
4801 int segment_quantizer_active = 0;
4802 for (i = 0; i < MAX_SEGMENTS; i++) {
4803 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4804 segment_quantizer_active = 1;
4805 }
4806 }
4807
Thomas Daviesf6936102016-09-05 16:51:31 +01004808 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07004809#if CONFIG_EXT_DELTA_Q
4810 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01004811 cm->delta_lf_present_flag = 0;
Fangwen Fu231fe422017-04-24 17:52:29 -07004812#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01004813 if (segment_quantizer_active == 0 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02004814 cm->delta_q_present_flag = aom_rb_read_bit(rb);
4815 } else {
4816 cm->delta_q_present_flag = 0;
4817 }
4818 if (cm->delta_q_present_flag) {
4819 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01004820 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07004821#if CONFIG_EXT_DELTA_Q
Frederic Barbier237c53e2017-06-20 16:57:27 +02004822 assert(!segment_quantizer_active);
Fangwen Fu231fe422017-04-24 17:52:29 -07004823 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
4824 if (cm->delta_lf_present_flag) {
4825 xd->prev_delta_lf_from_base = 0;
4826 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07004827 }
4828#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02004829 }
4830 }
4831#endif
4832
Urvang Joshi454280d2016-10-14 16:51:44 -07004833 for (i = 0; i < MAX_SEGMENTS; ++i) {
4834 const int qindex = cm->seg.enabled
4835 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
4836 : cm->base_qindex;
4837 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
4838 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
4839 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004840 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07004841 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004842 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07004843#if CONFIG_CDEF
4844 if (!cm->all_lossless) {
4845 setup_cdef(cm, rb);
4846 }
4847#endif
4848#if CONFIG_LOOP_RESTORATION
4849 decode_restoration_mode(cm, rb);
4850#endif // CONFIG_LOOP_RESTORATION
4851 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004852 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004853#if CONFIG_EXT_INTER
4854 read_compound_tools(cm, rb);
4855#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07004856
Sarah Parkere68a3e42017-02-16 14:03:24 -08004857#if CONFIG_EXT_TX
4858 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
4859#endif // CONFIG_EXT_TX
4860
Sarah Parker3e579a62017-08-23 16:53:20 -07004861#if CONFIG_GLOBAL_MOTION
David Barkerd3bbfee2017-09-07 14:53:00 +01004862 if (!(frame_is_intra_only(cm) || cm->error_resilient_mode))
4863 read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07004864#endif
4865
Yaowu Xuc27fc142016-08-22 16:08:15 -07004866 read_tile_info(pbi, rb);
Yaowu Xuf883b422016-08-30 14:01:10 -07004867 sz = aom_rb_read_literal(rb, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004868
4869 if (sz == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07004870 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004871 "Invalid header size");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004872 return sz;
4873}
4874
Yaowu Xuc27fc142016-08-22 16:08:15 -07004875#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07004876static void read_supertx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004877 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -07004878 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004879 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Hanfeb517c2016-12-21 16:02:07 -08004880 for (j = TX_8X8; j < TX_SIZES; ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004881 av1_diff_update_prob(r, &fc->supertx_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004882 }
4883 }
4884 }
4885}
4886#endif // CONFIG_SUPERTX
4887
Yaowu Xuf883b422016-08-30 14:01:10 -07004888static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004889 size_t partition_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004890 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004891#if CONFIG_SUPERTX
4892 MACROBLOCKD *const xd = &pbi->mb;
4893#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004894 aom_reader r;
Thomas Davies599395e2017-07-21 18:02:48 +01004895#if !CONFIG_NEW_MULTISYMBOL
4896 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies61e3e372017-04-04 16:10:23 +01004897 int i;
Thomas Davies599395e2017-07-21 18:02:48 +01004898#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004899
Alex Converse2cdf0d82016-12-13 13:53:09 -08004900#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08004901 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08004902#endif
Alex Converse346440b2017-01-03 13:47:37 -08004903 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
4904 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07004905 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004906 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004907
Yue Chend6bdd462017-07-19 16:05:43 -07004908#if CONFIG_RECT_TX_EXT && (CONFIG_EXT_TX || CONFIG_VAR_TX)
Yue Chen56e226e2017-05-02 16:21:40 -07004909 if (cm->tx_mode == TX_MODE_SELECT)
4910 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07004911#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004912
Angie Chiang800df032017-03-22 11:14:12 -07004913#if CONFIG_LV_MAP
4914 av1_read_txb_probs(fc, cm->tx_mode, &r);
Angie Chiang800df032017-03-22 11:14:12 -07004915#endif // CONFIG_LV_MAP
4916
Thomas Davies985bfc32017-06-27 16:51:26 +01004917#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07004918#if CONFIG_VAR_TX
David Barker16c64e32017-08-23 16:54:59 +01004919 if (cm->tx_mode == TX_MODE_SELECT)
4920 for (i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
4921 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004922#endif // CONFIG_VAR_TX
Thomas Davies61e3e372017-04-04 16:10:23 +01004923 for (i = 0; i < SKIP_CONTEXTS; ++i)
4924 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
4925#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004926
Yaowu Xuc27fc142016-08-22 16:08:15 -07004927 if (frame_is_intra_only(cm)) {
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00004928 av1_copy(cm->fc->kf_y_cdf, av1_kf_y_mode_cdf);
Alex Converse7c412ea2017-06-01 15:16:22 -07004929#if CONFIG_INTRABC
4930 if (cm->allow_screen_content_tools) {
4931 av1_diff_update_prob(&r, &fc->intrabc_prob, ACCT_STR);
4932 }
4933#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004934 } else {
Thomas Davies149eda52017-06-12 18:11:55 +01004935#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07004936 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01004937#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004938
4939#if CONFIG_EXT_INTER
Yue Chen4d26acb2017-05-01 12:28:34 -07004940#if CONFIG_INTERINTRA
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004941 if (cm->reference_mode != COMPOUND_REFERENCE &&
4942 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01004943#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07004944 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4945 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004946 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004947 }
4948 }
Thomas Daviescff91712017-07-07 11:49:55 +01004949#endif
Thomas Daviescff91712017-07-07 11:49:55 +01004950#if CONFIG_WEDGE && !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01004951#if CONFIG_EXT_PARTITION_TYPES
4952 int block_sizes_to_update = BLOCK_SIZES_ALL;
4953#else
4954 int block_sizes_to_update = BLOCK_SIZES;
4955#endif
4956 for (i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004957 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07004958 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004959 }
4960 }
Debargha Mukherjeeed057992017-05-07 05:15:06 -07004961#endif // CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004962 }
Yue Chen4d26acb2017-05-01 12:28:34 -07004963#endif // CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07004964#endif // CONFIG_EXT_INTER
4965
Thomas Daviesf6ad9352017-04-19 11:38:06 +01004966#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07004967 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07004968 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01004969#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004970
4971 if (cm->reference_mode != SINGLE_REFERENCE)
4972 setup_compound_reference_mode(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004973 read_frame_reference_mode_probs(cm, &r);
4974
Zoe Liu85b66462017-04-20 14:28:19 -07004975#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
4976 for (i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
4977 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
4978#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
4979
Thomas Davies599395e2017-07-21 18:02:48 +01004980#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07004981 for (i = 0; i < NMV_CONTEXTS; ++i)
4982 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
Thomas Davies599395e2017-07-21 18:02:48 +01004983#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004984#if CONFIG_SUPERTX
4985 if (!xd->lossless[0]) read_supertx_probs(fc, &r);
4986#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004987 }
4988
Yaowu Xuf883b422016-08-30 14:01:10 -07004989 return aom_reader_has_error(&r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004990}
Yaowu Xuc27fc142016-08-22 16:08:15 -07004991#ifdef NDEBUG
4992#define debug_check_frame_counts(cm) (void)0
4993#else // !NDEBUG
4994// Counts should only be incremented when frame_parallel_decoding_mode and
4995// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07004996static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004997 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07004998 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004999 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
5000 cm->error_resilient_mode);
Nathan E. Egge6bdc40f2017-06-18 19:02:23 -04005001#if CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005002 assert(!memcmp(cm->counts.y_mode, zero_counts.y_mode,
5003 sizeof(cm->counts.y_mode)));
5004 assert(!memcmp(cm->counts.uv_mode, zero_counts.uv_mode,
5005 sizeof(cm->counts.uv_mode)));
Nathan E. Egge5bb3a742017-06-30 12:47:43 -04005006#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005007 assert(!memcmp(cm->counts.partition, zero_counts.partition,
5008 sizeof(cm->counts.partition)));
Thomas Daviesab780672017-02-01 12:07:29 +00005009 assert(!memcmp(cm->counts.blockz_count, zero_counts.blockz_count,
5010 sizeof(cm->counts.blockz_count)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005011 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
5012 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005013#if CONFIG_EXT_INTER
5014 assert(!memcmp(cm->counts.inter_compound_mode,
5015 zero_counts.inter_compound_mode,
5016 sizeof(cm->counts.inter_compound_mode)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005017#if CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07005018 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
5019 sizeof(cm->counts.interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005020#if CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005021 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
5022 sizeof(cm->counts.wedge_interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005023#endif // CONFIG_WEDGE
5024#endif // CONFIG_INTERINTRA
Sarah Parker6fddd182016-11-10 20:57:20 -08005025 assert(!memcmp(cm->counts.compound_interinter,
5026 zero_counts.compound_interinter,
5027 sizeof(cm->counts.compound_interinter)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005028#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07005029#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
5030 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
5031 sizeof(cm->counts.motion_mode)));
5032#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07005033#if CONFIG_NCOBMC_ADAPT_WEIGHT && CONFIG_MOTION_VAR
5034 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
5035 sizeof(cm->counts.ncobmc_mode)));
5036#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005037 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
5038 sizeof(cm->counts.intra_inter)));
Zoe Liu85b66462017-04-20 14:28:19 -07005039#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
5040 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
5041 sizeof(cm->counts.comp_inter_mode)));
5042#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07005043 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
5044 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07005045#if CONFIG_EXT_COMP_REFS
5046 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
5047 sizeof(cm->counts.comp_ref_type)));
5048 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
5049 sizeof(cm->counts.uni_comp_ref)));
5050#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005051 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
5052 sizeof(cm->counts.single_ref)));
5053 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
5054 sizeof(cm->counts.comp_ref)));
5055#if CONFIG_EXT_REFS
5056 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
5057 sizeof(cm->counts.comp_bwdref)));
5058#endif // CONFIG_EXT_REFS
5059 assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
5060 sizeof(cm->counts.tx_size)));
5061 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005062 assert(
5063 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
5064 assert(
5065 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005066 assert(!memcmp(cm->counts.inter_ext_tx, zero_counts.inter_ext_tx,
5067 sizeof(cm->counts.inter_ext_tx)));
5068 assert(!memcmp(cm->counts.intra_ext_tx, zero_counts.intra_ext_tx,
5069 sizeof(cm->counts.intra_ext_tx)));
5070}
5071#endif // NDEBUG
5072
Yaowu Xuf883b422016-08-30 14:01:10 -07005073static struct aom_read_bit_buffer *init_read_bit_buffer(
5074 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
5075 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005076 rb->bit_offset = 0;
5077 rb->error_handler = error_handler;
5078 rb->error_handler_data = &pbi->common;
5079 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005080 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005081 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
5082 rb->bit_buffer = clear_data;
5083 rb->bit_buffer_end = clear_data + n;
5084 } else {
5085 rb->bit_buffer = data;
5086 rb->bit_buffer_end = data_end;
5087 }
5088 return rb;
5089}
5090
5091//------------------------------------------------------------------------------
5092
Yaowu Xuf883b422016-08-30 14:01:10 -07005093int av1_read_sync_code(struct aom_read_bit_buffer *const rb) {
5094 return aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_0 &&
5095 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_1 &&
5096 aom_rb_read_literal(rb, 8) == AV1_SYNC_CODE_2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005097}
5098
Yaowu Xuf883b422016-08-30 14:01:10 -07005099void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
5100 int *height) {
5101 *width = aom_rb_read_literal(rb, 16) + 1;
5102 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005103}
5104
Yaowu Xuf883b422016-08-30 14:01:10 -07005105BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
5106 int profile = aom_rb_read_bit(rb);
5107 profile |= aom_rb_read_bit(rb) << 1;
5108 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005109 return (BITSTREAM_PROFILE)profile;
5110}
5111
Yaowu Xu4ff59b52017-04-24 12:41:56 -07005112static void make_update_tile_list_dec(AV1Decoder *pbi, int tile_rows,
5113 int tile_cols, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00005114 int i;
5115 for (i = 0; i < tile_rows * tile_cols; ++i)
5116 ec_ctxs[i] = &pbi->tile_data[i].tctx;
5117}
Thomas Davies028b57f2017-02-22 16:42:11 +00005118
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005119#if CONFIG_FRAME_SUPERRES
5120void superres_post_decode(AV1Decoder *pbi) {
5121 AV1_COMMON *const cm = &pbi->common;
5122 BufferPool *const pool = cm->buffer_pool;
5123
5124 if (av1_superres_unscaled(cm)) return;
5125
5126 lock_buffer_pool(pool);
5127 av1_superres_upscale(cm, pool);
5128 unlock_buffer_pool(pool);
5129}
5130#endif // CONFIG_FRAME_SUPERRES
5131
Yi Luo10e23002017-07-31 11:54:43 -07005132static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01005133// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
5134// boundary information every frame, since the tile boundaries may
5135// change every frame (particularly when dependent-horztiles is also
5136// enabled); when it is disabled, the only information stored is the frame
5137// boundaries, which only depend on the frame size.
5138#if !CONFIG_LOOPFILTERING_ACROSS_TILES
5139 if (cm->width != cm->last_width || cm->height != cm->last_height)
5140#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
5141 {
Yi Luo10e23002017-07-31 11:54:43 -07005142 int row, col;
5143 for (row = 0; row < cm->mi_rows; ++row) {
5144 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
5145 for (col = 0; col < cm->mi_cols; ++col) {
5146 mi->mbmi.boundary_info = 0;
5147 mi++;
5148 }
5149 }
5150 av1_setup_frame_boundary_info(cm);
5151 }
5152}
5153
Yaowu Xuf883b422016-08-30 14:01:10 -07005154void av1_decode_frame(AV1Decoder *pbi, const uint8_t *data,
5155 const uint8_t *data_end, const uint8_t **p_data_end) {
5156 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005157 MACROBLOCKD *const xd = &pbi->mb;
Yaowu Xuf883b422016-08-30 14:01:10 -07005158 struct aom_read_bit_buffer rb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005159 int context_updated = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07005160 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005161 size_t first_partition_size;
5162 YV12_BUFFER_CONFIG *new_fb;
David Barker40a42d42017-05-09 15:07:32 +01005163#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5164 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
5165#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005166
Yi Luof8e87b42017-04-14 17:20:27 -07005167#if CONFIG_ADAPT_SCAN
5168 av1_deliver_eob_threshold(cm, xd);
5169#endif
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005170#if CONFIG_BITSTREAM_DEBUG
5171 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
5172#endif
5173
Sarah Parker3e579a62017-08-23 16:53:20 -07005174#if CONFIG_GLOBAL_MOTION
5175 int i;
5176 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
5177 set_default_warp_params(&cm->global_motion[i]);
5178 set_default_warp_params(&cm->cur_frame->global_motion[i]);
5179 }
5180 xd->global_motion = cm->global_motion;
5181#endif // CONFIG_GLOBAL_MOTION
5182
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005183 first_partition_size = read_uncompressed_header(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005184 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08005185
5186#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07005187 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
5188 // or a section of a frame is not allowed.
5189 if (!cm->single_tile_decoding &&
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08005190 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
5191 pbi->dec_tile_row = -1;
5192 pbi->dec_tile_col = -1;
5193 }
5194#endif // CONFIG_EXT_TILE
5195
Thomas Davies72712e62016-11-09 12:17:51 +00005196 pbi->first_partition_size = first_partition_size;
5197 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
Angie Chiangcb9a9eb2016-09-01 16:10:50 -07005198 new_fb = get_frame_new_buffer(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005199 xd->cur_buf = new_fb;
Alex Conversee816b312017-05-01 09:51:24 -07005200#if CONFIG_INTRABC
5201#if CONFIG_HIGHBITDEPTH
5202 av1_setup_scale_factors_for_frame(
5203 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5204 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5205 cm->use_highbitdepth);
5206#else
5207 av1_setup_scale_factors_for_frame(
5208 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5209 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
5210#endif // CONFIG_HIGHBITDEPTH
5211#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07005212
5213 if (!first_partition_size) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01005214 // showing a frame directly
5215 *p_data_end = data + aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005216 return;
5217 }
5218
Yaowu Xuf883b422016-08-30 14:01:10 -07005219 data += aom_rb_bytes_read(&rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005220 if (!read_is_valid(data, first_partition_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07005221 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005222 "Truncated packet or corrupt header length");
5223
Dengca8d24d2016-10-17 14:06:35 +08005224 cm->setup_mi(cm);
Dengca8d24d2016-10-17 14:06:35 +08005225
David Barker40a42d42017-05-09 15:07:32 +01005226#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005227 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
5228 // show_exisiting_frame=1, nor can it take a frame not used as
5229 // a reference, it is probable that by the time it is being
5230 // referred to, the frame buffer it originally points to may
5231 // already get expired and have been reassigned to the current
5232 // newly coded frame. Hence, we need to check whether this is
5233 // the case, and if yes, we have 2 choices:
5234 // (1) Simply disable the use of previous frame mvs; or
5235 // (2) Have cm->prev_frame point to one reference frame buffer,
5236 // e.g. LAST_FRAME.
David Barker40a42d42017-05-09 15:07:32 +01005237 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005238 // Reassign the LAST_FRAME buffer to cm->prev_frame.
David Barker40a42d42017-05-09 15:07:32 +01005239 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
5240 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
5241 : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005242 }
David Barker40a42d42017-05-09 15:07:32 +01005243#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5244
5245#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01005246 if (cm->use_prev_frame_mvs) assert(frame_can_use_prev_frame_mvs(cm));
David Barker40a42d42017-05-09 15:07:32 +01005247#else
5248 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005249#if CONFIG_FRAME_SUPERRES
5250 cm->width == cm->last_width &&
5251 cm->height == cm->last_height &&
5252#else
David Barker40a42d42017-05-09 15:07:32 +01005253 cm->width == cm->prev_frame->buf.y_crop_width &&
5254 cm->height == cm->prev_frame->buf.y_crop_height &&
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005255#endif // CONFIG_FRAME_SUPERRES
David Barker40a42d42017-05-09 15:07:32 +01005256 !cm->last_intra_only && cm->last_show_frame &&
5257 (cm->last_frame_type != KEY_FRAME);
5258#endif // CONFIG_TEMPMV_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005259
Jingning Hanc723b342017-08-24 11:19:46 -07005260#if CONFIG_MFMV
5261 av1_setup_frame_buf_refs(cm);
Jingning Hanffbb0f92017-08-24 11:52:21 -07005262 av1_setup_motion_field(cm);
Jingning Hanc723b342017-08-24 11:19:46 -07005263#endif
5264
Yaowu Xuf883b422016-08-30 14:01:10 -07005265 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
Thomas Daededa4d8b92017-06-05 15:44:14 -07005266#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5267 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
5268 // use the default frame context values
5269 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
5270 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
5271 } else {
5272 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
5273 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
5274 }
5275#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07005276 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
Thomas Daede10e1da92017-04-26 13:22:21 -07005277 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
Thomas Daededa4d8b92017-06-05 15:44:14 -07005278#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005279 if (!cm->fc->initialized)
Yaowu Xuf883b422016-08-30 14:01:10 -07005280 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005281 "Uninitialized entropy context.");
5282
Yaowu Xuf883b422016-08-30 14:01:10 -07005283 av1_zero(cm->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005284
5285 xd->corrupted = 0;
5286 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
5287 if (new_fb->corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07005288 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005289 "Decode failed. Frame data header is corrupted.");
5290
Rupert Swarbrick6c545212017-09-01 17:17:25 +01005291#if CONFIG_LOOP_RESTORATION
5292 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5293 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5294 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5295 av1_alloc_restoration_buffers(cm);
5296 }
5297#endif
5298
Cheng Chen13fc8192017-08-19 11:49:28 -07005299#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07005300 if ((cm->lf.filter_level[0] || cm->lf.filter_level[1]) &&
5301 !cm->skip_loop_filter) {
5302 av1_loop_filter_frame_init(cm, cm->lf.filter_level[0],
5303 cm->lf.filter_level[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005304 }
Cheng Chen179479f2017-08-04 10:56:39 -07005305#else
5306 if (cm->lf.filter_level && !cm->skip_loop_filter) {
5307 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
5308 }
5309#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005310
5311 // If encoded in frame parallel mode, frame context is ready after decoding
5312 // the frame header.
5313 if (cm->frame_parallel_decode &&
5314 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005315 AVxWorker *const worker = pbi->frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005316 FrameWorkerData *const frame_worker_data = worker->data1;
5317 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
5318 context_updated = 1;
Thomas Daededa4d8b92017-06-05 15:44:14 -07005319#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5320 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
5321#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07005322 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07005323#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005324 }
Yaowu Xuf883b422016-08-30 14:01:10 -07005325 av1_frameworker_lock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005326 pbi->cur_buf->row = -1;
5327 pbi->cur_buf->col = -1;
5328 frame_worker_data->frame_context_ready = 1;
5329 // Signal the main thread that context is ready.
Yaowu Xuf883b422016-08-30 14:01:10 -07005330 av1_frameworker_signal_stats(worker);
5331 av1_frameworker_unlock_stats(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005332 }
5333
Yi Luo10e23002017-07-31 11:54:43 -07005334 dec_setup_frame_boundary_info(cm);
Yi Luof190a162017-07-13 16:16:56 -07005335
Jingning Han52ece882017-04-07 14:58:25 -07005336 if (pbi->max_threads > 1 && !CONFIG_CB4X4 &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07005337#if CONFIG_EXT_TILE
Jingning Han52ece882017-04-07 14:58:25 -07005338 pbi->dec_tile_col < 0 && // Decoding all columns
Yaowu Xuc27fc142016-08-22 16:08:15 -07005339#endif // CONFIG_EXT_TILE
Jingning Han52ece882017-04-07 14:58:25 -07005340 cm->tile_cols > 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005341 // Multi-threaded tile decoder
5342 *p_data_end = decode_tiles_mt(pbi, data + first_partition_size, data_end);
5343 if (!xd->corrupted) {
5344 if (!cm->skip_loop_filter) {
Cheng Chen179479f2017-08-04 10:56:39 -07005345// If multiple threads are used to decode tiles, then we use those
5346// threads to do parallel loopfiltering.
Cheng Chen13fc8192017-08-19 11:49:28 -07005347#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07005348 av1_loop_filter_frame_mt(new_fb, cm, pbi->mb.plane,
5349 cm->lf.filter_level[0], cm->lf.filter_level[1],
5350 0, 0, pbi->tile_workers, pbi->num_tile_workers,
5351 &pbi->lf_row_sync);
5352#else
Yaowu Xuf883b422016-08-30 14:01:10 -07005353 av1_loop_filter_frame_mt(new_fb, cm, pbi->mb.plane, cm->lf.filter_level,
5354 0, 0, pbi->tile_workers, pbi->num_tile_workers,
5355 &pbi->lf_row_sync);
Cheng Chen13fc8192017-08-19 11:49:28 -07005356#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005357 }
5358 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005359 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005360 "Decode failed. Frame data is corrupted.");
5361 }
5362 } else {
5363 *p_data_end = decode_tiles(pbi, data + first_partition_size, data_end);
5364 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005365
Debargha Mukherjeee168a782017-08-31 12:30:10 -07005366 aom_extend_frame_borders(new_fb);
Jean-Marc Valin01435132017-02-18 14:12:53 -05005367#if CONFIG_CDEF
Thomas Daedef636d5c2017-06-29 13:48:27 -07005368 if (!cm->skip_loop_filter && !cm->all_lossless) {
Jean-Marc Valine9f77422017-03-22 17:09:51 -04005369 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02005370 }
Debargha Mukherjee00c54332017-03-03 15:44:17 -08005371#endif // CONFIG_CDEF
5372
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005373#if CONFIG_FRAME_SUPERRES
5374 superres_post_decode(pbi);
5375#endif // CONFIG_FRAME_SUPERRES
5376
Debargha Mukherjee00c54332017-03-03 15:44:17 -08005377#if CONFIG_LOOP_RESTORATION
5378 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5379 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5380 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5381 av1_loop_restoration_frame(new_fb, cm, cm->rst_info, 7, 0, NULL);
5382 }
5383#endif // CONFIG_LOOP_RESTORATION
Thomas Daedef56859f2016-04-19 16:57:24 -07005384
Yaowu Xuc27fc142016-08-22 16:08:15 -07005385 if (!xd->corrupted) {
5386 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies493623e2017-03-31 16:12:25 +01005387 FRAME_CONTEXT **tile_ctxs = aom_malloc(cm->tile_rows * cm->tile_cols *
5388 sizeof(&pbi->tile_data[0].tctx));
5389 aom_cdf_prob **cdf_ptrs =
5390 aom_malloc(cm->tile_rows * cm->tile_cols *
5391 sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
Thomas Davies028b57f2017-02-22 16:42:11 +00005392 make_update_tile_list_dec(pbi, cm->tile_rows, cm->tile_cols, tile_ctxs);
hui sub53682f2017-08-01 17:09:18 -07005393#if CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -07005394 av1_adapt_coef_probs(cm);
hui sub53682f2017-08-01 17:09:18 -07005395#endif // CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -07005396 av1_adapt_intra_frame_probs(cm);
Thomas Davies493623e2017-03-31 16:12:25 +01005397 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005398 cm->tile_rows * cm->tile_cols);
Thomas Davies493623e2017-03-31 16:12:25 +01005399 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005400 cm->tile_rows * cm->tile_cols);
Yushin Chob188ea12017-03-13 13:45:23 -07005401#if CONFIG_PVQ
5402 av1_average_tile_pvq_cdfs(pbi->common.fc, tile_ctxs,
5403 cm->tile_rows * cm->tile_cols);
5404#endif // CONFIG_PVQ
hui suff0da2b2017-03-07 15:51:37 -08005405#if CONFIG_ADAPT_SCAN
5406 av1_adapt_scan_order(cm);
5407#endif // CONFIG_ADAPT_SCAN
Yaowu Xuc27fc142016-08-22 16:08:15 -07005408
5409 if (!frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005410 av1_adapt_inter_frame_probs(cm);
5411 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies028b57f2017-02-22 16:42:11 +00005412 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies493623e2017-03-31 16:12:25 +01005413 cdf_ptrs, cm->tile_rows * cm->tile_cols);
5414 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies028b57f2017-02-22 16:42:11 +00005415 cm->tile_rows * cm->tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005416 }
Thomas Davies493623e2017-03-31 16:12:25 +01005417 aom_free(tile_ctxs);
5418 aom_free(cdf_ptrs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005419 } else {
5420 debug_check_frame_counts(cm);
5421 }
5422 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005423 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005424 "Decode failed. Frame data is corrupted.");
5425 }
5426
Nathan E. Egge2cf03b12017-02-22 16:19:59 -05005427#if CONFIG_INSPECTION
5428 if (pbi->inspect_cb != NULL) {
5429 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
5430 }
5431#endif
5432
Thomas Daededa4d8b92017-06-05 15:44:14 -07005433// Non frame parallel update frame context here.
5434#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
Thomas Daedea6a854b2017-06-22 17:49:11 -07005435 if (!context_updated) cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07005436#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07005437 if (!cm->error_resilient_mode && !context_updated)
5438 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07005439#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005440}