blob: 1b047e9648d6c8d907ddf586764e05a67f466348 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070022#include "aom_dsp/binary_codes_reader.h"
Jingning Han1aab8182016-06-03 11:09:06 -070023#include "aom_dsp/bitreader.h"
24#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
Angie Chiang85e3b962017-10-01 16:04:43 -070067#include "av1/decoder/symbolrate.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070069#if CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070070#include "av1/common/warped_motion.h"
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070071#endif // CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070072
Yaowu Xuf883b422016-08-30 14:01:10 -070073#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070074#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070075
Yushin Cho77bba8d2016-11-04 16:36:56 -070076#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -070077#include "av1/common/partition.h"
hui suff0da2b2017-03-07 15:51:37 -080078#include "av1/common/pvq.h"
79#include "av1/common/scan.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070080#include "av1/decoder/decint.h"
hui suff0da2b2017-03-07 15:51:37 -080081#include "av1/decoder/pvq_decoder.h"
82#include "av1/encoder/encodemb.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070083#include "av1/encoder/hybrid_fwd_txfm.h"
84#endif
85
Luc Trudeaue3980282017-04-25 23:17:21 -040086#if CONFIG_CFL
87#include "av1/common/cfl.h"
88#endif
89
Ola Hugosson1e7f2d02017-09-22 21:36:26 +020090#if CONFIG_STRIPED_LOOP_RESTORATION && !CONFIG_LOOP_RESTORATION
91#error "striped_loop_restoration requires loop_restoration"
92#endif
93
Rupert Swarbrick6c545212017-09-01 17:17:25 +010094#if CONFIG_LOOP_RESTORATION
95static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
96 MACROBLOCKD *xd,
97 aom_reader *const r, int plane,
98 int rtile_idx);
99#endif
100
Thomas Davies80188d12016-10-26 16:08:35 -0700101static struct aom_read_bit_buffer *init_read_bit_buffer(
102 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
103 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
104static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
105 size_t partition_size);
106static size_t read_uncompressed_header(AV1Decoder *pbi,
107 struct aom_read_bit_buffer *rb);
108
Yaowu Xuf883b422016-08-30 14:01:10 -0700109static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -0700110#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +0200111 return !frame_is_intra_only(cm);
112#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700113 int i;
114 if (frame_is_intra_only(cm)) return 0;
115 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
116 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
117
118 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700119#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120}
121
Yaowu Xuf883b422016-08-30 14:01:10 -0700122static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123#if CONFIG_EXT_REFS
124 cm->comp_fwd_ref[0] = LAST_FRAME;
125 cm->comp_fwd_ref[1] = LAST2_FRAME;
126 cm->comp_fwd_ref[2] = LAST3_FRAME;
127 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
128
129 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700130 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
131 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700132#else // !CONFIG_EXT_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133 if (cm->ref_frame_sign_bias[LAST_FRAME] ==
134 cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
135 cm->comp_fixed_ref = ALTREF_FRAME;
136 cm->comp_var_ref[0] = LAST_FRAME;
137 cm->comp_var_ref[1] = GOLDEN_FRAME;
138 } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
139 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
140 cm->comp_fixed_ref = GOLDEN_FRAME;
141 cm->comp_var_ref[0] = LAST_FRAME;
142 cm->comp_var_ref[1] = ALTREF_FRAME;
143 } else {
144 cm->comp_fixed_ref = LAST_FRAME;
145 cm->comp_var_ref[0] = GOLDEN_FRAME;
146 cm->comp_var_ref[1] = ALTREF_FRAME;
147 }
148#endif // CONFIG_EXT_REFS
149}
150
151static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
152 return len != 0 && len <= (size_t)(end - start);
153}
154
Yaowu Xuf883b422016-08-30 14:01:10 -0700155static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
156 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157 return data > max ? max : data;
158}
159
Thomas Daedef636d5c2017-06-29 13:48:27 -0700160static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800161#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800162 TX_MODE tx_mode;
163#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700164 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400165#if CONFIG_VAR_TX_NO_TX_MODE
166 (void)rb;
167 return TX_MODE_SELECT;
168#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800169#if CONFIG_TX64X64
170 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800171 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
172 return tx_mode;
173#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700174 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800175#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400176#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177}
178
Thomas Daviese7154832017-10-03 10:12:17 +0100179#if !CONFIG_RESTRICT_COMPRESSED_HDR
Yaowu Xuf883b422016-08-30 14:01:10 -0700180static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700181 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700183 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700184 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700185 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700186 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700187 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700189 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190}
Thomas Davies149eda52017-06-12 18:11:55 +0100191#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192
Yaowu Xuc27fc142016-08-22 16:08:15 -0700193static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700194 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700195 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800196#if CONFIG_REF_ADAPT
197 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
198#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700199 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700200 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700201 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800202#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700203 } else {
204 return SINGLE_REFERENCE;
205 }
206}
207
Thomas Daviese7154832017-10-03 10:12:17 +0100208#if !CONFIG_RESTRICT_COMPRESSED_HDR
Yaowu Xuf883b422016-08-30 14:01:10 -0700209static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100211 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212
213 if (cm->reference_mode == REFERENCE_MODE_SELECT)
214 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700215 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216
217 if (cm->reference_mode != COMPOUND_REFERENCE) {
218 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100219 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700221 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 }
223 }
224 }
225
226 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700227#if CONFIG_EXT_COMP_REFS
228 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
229 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
230
Thomas Davies894cc812017-06-22 17:51:33 +0100231 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
232 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700233 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
234 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100235 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700236#endif // CONFIG_EXT_COMP_REFS
237
Yaowu Xuc27fc142016-08-22 16:08:15 -0700238 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100239 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700240#if CONFIG_EXT_REFS
241 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700242 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700244 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700245#else
246 for (j = 0; j < (COMP_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700247 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700248#endif // CONFIG_EXT_REFS
249 }
250 }
251}
252
Yaowu Xuf883b422016-08-30 14:01:10 -0700253static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700254 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700255 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700256}
257
Yaowu Xuf883b422016-08-30 14:01:10 -0700258static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100259 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260 if (allow_hp) {
261 for (i = 0; i < 2; ++i) {
262 nmv_component *const comp_ctx = &ctx->comps[i];
263 update_mv_probs(&comp_ctx->class0_hp, 1, r);
264 update_mv_probs(&comp_ctx->hp, 1, r);
265 }
266 }
267}
Thomas Davies599395e2017-07-21 18:02:48 +0100268#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700269
270static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu708c1ec2017-06-14 14:54:49 -0700271#if CONFIG_LGT
272 PREDICTION_MODE mode,
273#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700274 const TX_TYPE tx_type,
275 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700276 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700278 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700279 av1_inverse_transform_block(xd, dqcoeff,
280#if CONFIG_LGT
281 mode,
282#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700283#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
284 xd->mrc_mask,
285#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Lester Lu708c1ec2017-06-14 14:54:49 -0700286 tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700287 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700288}
289
Lester Lu9c521922017-07-11 11:16:45 -0700290static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
291 const int bsize = xd->mi[0]->mbmi.sb_type;
292 const struct macroblockd_plane *pd = &xd->plane[plane];
293#if CONFIG_CHROMA_SUB8X8
294 const BLOCK_SIZE plane_bsize =
295 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
296#elif CONFIG_CB4X4
297 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
298#else
299 const BLOCK_SIZE plane_bsize =
300 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
301#endif
302 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700303 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700304 const uint8_t txh_unit = tx_size_high_unit[tx_size];
305 return row * max_blocks_wide + col * txh_unit;
306}
307
Yushin Cho77bba8d2016-11-04 16:36:56 -0700308#if CONFIG_PVQ
Thomas Daede6ff6af62017-02-03 16:29:24 -0800309static int av1_pvq_decode_helper(MACROBLOCKD *xd, tran_low_t *ref_coeff,
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800310 tran_low_t *dqcoeff, int16_t *quant, int pli,
Yushin Cho77bba8d2016-11-04 16:36:56 -0700311 int bs, TX_TYPE tx_type, int xdec,
ltrudeaue1c09292017-01-20 15:42:13 -0500312 PVQ_SKIP_TYPE ac_dc_coded) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700313 unsigned int flags; // used for daala's stream analyzer.
314 int off;
315 const int is_keyframe = 0;
316 const int has_dc_skip = 1;
Jingning Hanff705452017-04-27 11:32:15 -0700317 int coeff_shift = 3 - av1_get_tx_scale(bs);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800318 int hbd_downshift = 0;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800319 int rounding_mask;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700320 // DC quantizer for PVQ
321 int pvq_dc_quant;
322 int lossless = (quant[0] == 0);
323 const int blk_size = tx_size_wide[bs];
324 int eob = 0;
325 int i;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800326 od_dec_ctx *dec = &xd->daala_dec;
Yushin Cho70669122016-12-08 09:53:14 -1000327 int use_activity_masking = dec->use_activity_masking;
Thomas Daede1dbda1b2017-02-06 16:06:29 -0800328 DECLARE_ALIGNED(16, tran_low_t, dqcoeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
329 DECLARE_ALIGNED(16, tran_low_t, ref_coeff_pvq[OD_TXSIZE_MAX * OD_TXSIZE_MAX]);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700330
Yushin Cho48f84db2016-11-07 21:20:17 -0800331 od_coeff ref_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
332 od_coeff out_int32[OD_TXSIZE_MAX * OD_TXSIZE_MAX];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700333
Thomas Daede6ff6af62017-02-03 16:29:24 -0800334 hbd_downshift = xd->bd - 8;
Thomas Daede6ff6af62017-02-03 16:29:24 -0800335
Yushin Cho77bba8d2016-11-04 16:36:56 -0700336 od_raster_to_coding_order(ref_coeff_pvq, blk_size, tx_type, ref_coeff,
337 blk_size);
338
Thomas Daede6ff6af62017-02-03 16:29:24 -0800339 assert(OD_COEFF_SHIFT >= 4);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700340 if (lossless)
341 pvq_dc_quant = 1;
342 else {
Yushin Cho70669122016-12-08 09:53:14 -1000343 if (use_activity_masking)
clang-format4eafefe2017-09-04 12:51:20 -0700344 pvq_dc_quant =
345 OD_MAXI(1,
346 (quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift) *
347 dec->state.pvq_qm_q4[pli][od_qm_get_index(bs, 0)] >>
348 4);
Yushin Cho70669122016-12-08 09:53:14 -1000349 else
Thomas Daede6ff6af62017-02-03 16:29:24 -0800350 pvq_dc_quant =
351 OD_MAXI(1, quant[0] << (OD_COEFF_SHIFT - 3) >> hbd_downshift);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700352 }
353
354 off = od_qm_offset(bs, xdec);
355
356 // copy int16 inputs to int32
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800357 for (i = 0; i < blk_size * blk_size; i++) {
Timothy B. Terriberry4e6a8f32017-02-24 11:00:59 -0800358 ref_int32[i] =
Thomas Daede6ff6af62017-02-03 16:29:24 -0800359 AOM_SIGNED_SHL(ref_coeff_pvq[i], OD_COEFF_SHIFT - coeff_shift) >>
360 hbd_downshift;
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800361 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700362
Thomas Daede6ff6af62017-02-03 16:29:24 -0800363 od_pvq_decode(dec, ref_int32, out_int32,
364 OD_MAXI(1, quant[1] << (OD_COEFF_SHIFT - 3) >> hbd_downshift),
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800365 pli, bs, OD_PVQ_BETA[use_activity_masking][pli][bs],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700366 is_keyframe, &flags, ac_dc_coded, dec->state.qm + off,
367 dec->state.qm_inv + off);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700368
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800369 if (!has_dc_skip || out_int32[0]) {
370 out_int32[0] =
Yushin Choc49ef3a2017-03-13 17:27:25 -0700371 has_dc_skip + generic_decode(dec->r, &dec->state.adapt->model_dc[pli],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -0700372 &dec->state.adapt->ex_dc[pli][bs][0], 2,
373 "dc:mag");
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800374 if (out_int32[0]) out_int32[0] *= aom_read_bit(dec->r, "dc:sign") ? -1 : 1;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700375 }
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800376 out_int32[0] = out_int32[0] * pvq_dc_quant + ref_int32[0];
377
378 // copy int32 result back to int16
379 assert(OD_COEFF_SHIFT > coeff_shift);
380 rounding_mask = (1 << (OD_COEFF_SHIFT - coeff_shift - 1)) - 1;
381 for (i = 0; i < blk_size * blk_size; i++) {
Thomas Daede6ff6af62017-02-03 16:29:24 -0800382 out_int32[i] = AOM_SIGNED_SHL(out_int32[i], hbd_downshift);
Timothy B. Terriberrye93acb22017-02-06 13:55:53 -0800383 dqcoeff_pvq[i] = (out_int32[i] + (out_int32[i] < 0) + rounding_mask) >>
384 (OD_COEFF_SHIFT - coeff_shift);
385 }
Yushin Cho77bba8d2016-11-04 16:36:56 -0700386
387 od_coding_order_to_raster(dqcoeff, blk_size, tx_type, dqcoeff_pvq, blk_size);
388
389 eob = blk_size * blk_size;
390
391 return eob;
392}
393
ltrudeaue1c09292017-01-20 15:42:13 -0500394static PVQ_SKIP_TYPE read_pvq_skip(AV1_COMMON *cm, MACROBLOCKD *const xd,
395 int plane, TX_SIZE tx_size) {
396 // decode ac/dc coded flag. bit0: DC coded, bit1 : AC coded
397 // NOTE : we don't use 5 symbols for luma here in aom codebase,
398 // since block partition is taken care of by aom.
399 // So, only AC/DC skip info is coded
Yushin Cho00779272017-02-21 10:38:16 -0800400 const int ac_dc_coded = aom_read_symbol(
ltrudeaue1c09292017-01-20 15:42:13 -0500401 xd->daala_dec.r,
Yushin Choc49ef3a2017-03-13 17:27:25 -0700402 xd->daala_dec.state.adapt->skip_cdf[2 * tx_size + (plane != 0)], 4,
Yushin Cho00779272017-02-21 10:38:16 -0800403 "skip");
ltrudeaue1c09292017-01-20 15:42:13 -0500404 if (ac_dc_coded < 0 || ac_dc_coded > 3) {
405 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
406 "Invalid PVQ Skip Type");
407 }
408 return ac_dc_coded;
409}
410
411static int av1_pvq_decode_helper2(AV1_COMMON *cm, MACROBLOCKD *const xd,
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800412 MB_MODE_INFO *const mbmi, int plane, int row,
413 int col, TX_SIZE tx_size, TX_TYPE tx_type) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700414 struct macroblockd_plane *const pd = &xd->plane[plane];
415 // transform block size in pixels
416 int tx_blk_size = tx_size_wide[tx_size];
417 int i, j;
418 tran_low_t *pvq_ref_coeff = pd->pvq_ref_coeff;
419 const int diff_stride = tx_blk_size;
420 int16_t *pred = pd->pred;
421 tran_low_t *const dqcoeff = pd->dqcoeff;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700422 uint8_t *dst;
423 int eob;
ltrudeaue1c09292017-01-20 15:42:13 -0500424 const PVQ_SKIP_TYPE ac_dc_coded = read_pvq_skip(cm, xd, plane, tx_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700425
426 eob = 0;
427 dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col];
428
Yushin Cho77bba8d2016-11-04 16:36:56 -0700429 if (ac_dc_coded) {
430 int xdec = pd->subsampling_x;
431 int seg_id = mbmi->segment_id;
432 int16_t *quant;
Lester Lu27319b62017-07-10 16:57:15 -0700433 TxfmParam txfm_param;
Yaowu Xufc1b2132016-11-07 15:16:15 -0800434 // ToDo(yaowu): correct this with optimal number from decoding process.
435 const int max_scan_line = tx_size_2d[tx_size];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200436#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800437 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
438 for (j = 0; j < tx_blk_size; j++)
439 for (i = 0; i < tx_blk_size; i++)
440 pred[diff_stride * j + i] =
441 CONVERT_TO_SHORTPTR(dst)[pd->dst.stride * j + i];
442 } else {
443#endif
444 for (j = 0; j < tx_blk_size; j++)
445 for (i = 0; i < tx_blk_size; i++)
446 pred[diff_stride * j + i] = dst[pd->dst.stride * j + i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200447#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800448 }
449#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -0700450
Lester Lu27319b62017-07-10 16:57:15 -0700451 txfm_param.tx_type = tx_type;
452 txfm_param.tx_size = tx_size;
453 txfm_param.lossless = xd->lossless[seg_id];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700454
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200455#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800456 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
Lester Lu27319b62017-07-10 16:57:15 -0700457 txfm_param.bd = xd->bd;
458 av1_highbd_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &txfm_param);
Thomas Daede6ff6af62017-02-03 16:29:24 -0800459 } else {
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200460#endif // CONFIG_HIGHBITDEPTH
Lester Lu27319b62017-07-10 16:57:15 -0700461 av1_fwd_txfm(pred, pvq_ref_coeff, diff_stride, &txfm_param);
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200462#if CONFIG_HIGHBITDEPTH
Thomas Daede6ff6af62017-02-03 16:29:24 -0800463 }
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200464#endif // CONFIG_HIGHBITDEPTH
Yushin Cho77bba8d2016-11-04 16:36:56 -0700465
466 quant = &pd->seg_dequant[seg_id][0]; // aom's quantizer
467
Thomas Daede6ff6af62017-02-03 16:29:24 -0800468 eob = av1_pvq_decode_helper(xd, pvq_ref_coeff, dqcoeff, quant, plane,
469 tx_size, tx_type, xdec, ac_dc_coded);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700470
Lester Lu5ee28082017-07-14 15:11:36 -0700471 inverse_transform_block(xd, plane, tx_type, tx_size, dst, pd->dst.stride,
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800472 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700473 }
474
475 return eob;
476}
477#endif
478
Alex Converse8aca36d2017-01-31 12:33:15 -0800479static void predict_and_reconstruct_intra_block(
480 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
481 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500482 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700483 const int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700484#if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -0700485 (void)r;
486#endif
David Barker761b1ac2017-09-25 11:23:03 +0100487 av1_predict_intra_block_facade(cm, xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700488
489 if (!mbmi->skip) {
Yushin Cho77bba8d2016-11-04 16:36:56 -0700490#if !CONFIG_PVQ
Angie Chiang3d005e42017-04-02 16:31:35 -0700491 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700492#if CONFIG_LV_MAP
493 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700494 int eob;
495 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700496 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700497 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700498 const TX_TYPE tx_type =
499 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700500#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700501 const TX_TYPE tx_type =
502 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700503 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700504 int16_t max_scan_line = 0;
505 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700506 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700507 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700508#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700509 if (eob) {
510 uint8_t *dst =
511 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700512 inverse_transform_block(xd, plane,
Lester Lu708c1ec2017-06-14 14:54:49 -0700513#if CONFIG_LGT
Lester Lu918fe692017-08-17 14:39:29 -0700514 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700515#endif
Hui Su400bf652017-08-15 15:42:19 -0700516 tx_type, tx_size, dst, pd->dst.stride,
517 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700518 }
Lester Lu708c1ec2017-06-14 14:54:49 -0700519#else // !CONFIG_PVQ
Jingning Han19b5c8f2017-07-06 15:10:12 -0700520 const TX_TYPE tx_type =
521 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500522 av1_pvq_decode_helper2(cm, xd, mbmi, plane, row, col, tx_size, tx_type);
Lester Lu708c1ec2017-06-14 14:54:49 -0700523#endif // !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -0700524 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400525#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400526 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400527 cfl_store_tx(xd, row, col, tx_size, mbmi->sb_type);
Luc Trudeaue3980282017-04-25 23:17:21 -0400528 }
Sebastien Alaiwanc4559ca2017-09-27 09:47:30 +0200529#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700530}
531
Jingning Handddb21f2017-02-28 14:44:05 -0800532#if CONFIG_VAR_TX && !CONFIG_COEF_INTERLEAVE
Angie Chiangff6d8902016-10-21 11:02:09 -0700533static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
534 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700535 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700536 int blk_row, int blk_col, int block,
537 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700538 const struct macroblockd_plane *const pd = &xd->plane[plane];
539 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
540 const int tx_row = blk_row >> (1 - pd->subsampling_y);
541 const int tx_col = blk_col >> (1 - pd->subsampling_x);
542 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700543 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700544 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700545 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700546 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
547 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700548
549 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
550
551 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500552 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700553#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700554 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700555 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700556 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700557 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700558 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700559 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700560 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700561#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700562 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700563 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700564 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700565 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700566 const int eob = av1_decode_block_tokens(
567 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
568 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700569#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700570 inverse_transform_block(xd, plane,
571#if CONFIG_LGT
572 mbmi->mode,
573#endif
574 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800575 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
576 << tx_size_wide_log2[0]],
577 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700578 *eob_total += eob;
579 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700580#if CONFIG_RECT_TX_EXT
581 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
582 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
583 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
584#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700585 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Urvang Joshidff57e02017-09-29 11:15:48 -0700586 assert(IMPLIES(tx_size <= TX_4X4, sub_txs == tx_size));
587 assert(IMPLIES(tx_size > TX_4X4, sub_txs < tx_size));
Yue Chend6bdd462017-07-19 16:05:43 -0700588#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700589 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700590 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591 int i;
592
593 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700594
595 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700596#if CONFIG_RECT_TX_EXT
597 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
598 const int offsetr =
599 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
600 : blk_row + ((i >> 1) * bsl);
601 const int offsetc =
602 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
603 : blk_col + (i & 0x01) * bsl;
604#else
Jingning Han5f614262016-10-27 14:27:43 -0700605 const int offsetr = blk_row + (i >> 1) * bsl;
606 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700607#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608
609 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
610
Jingning Han8fd62b72016-10-21 12:55:54 -0700611 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700612 offsetc, block, sub_txs, eob_total);
613 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700614 }
615 }
616}
617#endif // CONFIG_VAR_TX
618
Jingning Handddb21f2017-02-28 14:44:05 -0800619#if !CONFIG_VAR_TX || CONFIG_SUPERTX || CONFIG_COEF_INTERLEAVE || \
Jingning Hanfe45b212016-11-22 10:30:23 -0800620 (!CONFIG_VAR_TX && CONFIG_EXT_TX && CONFIG_RECT_TX)
Angie Chiangff6d8902016-10-21 11:02:09 -0700621static int reconstruct_inter_block(AV1_COMMON *cm, MACROBLOCKD *const xd,
Alex Converse8aca36d2017-01-31 12:33:15 -0800622 aom_reader *const r, int segment_id,
623 int plane, int row, int col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700624 TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500625 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700626 int block_idx = get_block_idx(xd, plane, row, col);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700627#if CONFIG_PVQ
628 int eob;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700629 (void)r;
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800630 (void)segment_id;
631#else
632 struct macroblockd_plane *const pd = &xd->plane[plane];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700633#endif
634
635#if !CONFIG_PVQ
Angie Chiang133733c2017-03-17 12:50:20 -0700636#if CONFIG_LV_MAP
637 (void)segment_id;
Jingning Han1be18782016-10-21 11:48:15 -0700638 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700639 int eob;
640 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane, pd->dqcoeff,
Angie Chiang0eac3192017-06-19 09:57:30 -0700641 tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700642 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700643 const TX_TYPE tx_type =
644 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700645#else // CONFIG_LV_MAP
646 int16_t max_scan_line = 0;
Jingning Han19b5c8f2017-07-06 15:10:12 -0700647 const TX_TYPE tx_type =
648 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700649 const SCAN_ORDER *scan_order =
650 get_scan(cm, tx_size, tx_type, &xd->mi[0]->mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700651 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700652 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
653 tx_type, &max_scan_line, r, segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700654#endif // CONFIG_LV_MAP
Jingning Hanca14dda2016-12-09 09:36:00 -0800655 uint8_t *dst =
656 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Jingning Han1be18782016-10-21 11:48:15 -0700657 if (eob)
Lester Lu708c1ec2017-06-14 14:54:49 -0700658 inverse_transform_block(xd, plane,
659#if CONFIG_LGT
660 xd->mi[0]->mbmi.mode,
661#endif
662 tx_type, tx_size, dst, pd->dst.stride,
Jingning Hanca14dda2016-12-09 09:36:00 -0800663 max_scan_line, eob);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700664#else
Jingning Han19b5c8f2017-07-06 15:10:12 -0700665 const TX_TYPE tx_type =
666 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
ltrudeaue1c09292017-01-20 15:42:13 -0500667 eob = av1_pvq_decode_helper2(cm, xd, &xd->mi[0]->mbmi, plane, row, col,
668 tx_size, tx_type);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700669#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700670 return eob;
671}
672#endif // !CONFIG_VAR_TX || CONFIG_SUPER_TX
673
Angie Chiang44701f22017-02-27 10:36:44 -0800674static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
675 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
676 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700677 const int offset = mi_row * cm->mi_stride + mi_col;
678 int x, y;
679 const TileInfo *const tile = &xd->tile;
680
681 xd->mi = cm->mi_grid_visible + offset;
682 xd->mi[0] = &cm->mi[offset];
683 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
684 // passing bsize from decode_partition().
685 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700686#if CONFIG_RD_DEBUG
687 xd->mi[0]->mbmi.mi_row = mi_row;
688 xd->mi[0]->mbmi.mi_col = mi_col;
689#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400690#if CONFIG_CFL
691 xd->cfl->mi_row = mi_row;
692 xd->cfl->mi_col = mi_col;
693#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700694 for (y = 0; y < y_mis; ++y)
Jingning Han97d85482016-07-15 11:06:05 -0700695 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696
Jingning Hanfaad0e12016-12-07 10:54:57 -0800697 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700698 set_skip_context(xd, mi_row, mi_col);
699
700#if CONFIG_VAR_TX
701 xd->max_tx_size = max_txsize_lookup[bsize];
702#endif
703
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700704 // Distance of Mb to the various image edges. These are specified to 8th pel
705 // as they are always compared to values that are in 1/8th pel units
706 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800707#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700708 cm->dependent_horz_tiles,
709#endif // CONFIG_DEPENDENT_HORZTILES
710 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700711
Jingning Han91d9a792017-04-18 12:01:52 -0700712 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
713 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700714}
715
716#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700717static MB_MODE_INFO *set_offsets_extend(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700718 MACROBLOCKD *const xd,
719 const TileInfo *const tile,
720 BLOCK_SIZE bsize_pred, int mi_row_pred,
721 int mi_col_pred, int mi_row_ori,
722 int mi_col_ori) {
723 // Used in supertx
724 // (mi_row_ori, mi_col_ori): location for mv
725 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
Jingning Han93531242016-12-20 11:54:36 -0800726 const int bw = mi_size_wide[bsize_pred];
727 const int bh = mi_size_high[bsize_pred];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700728 const int offset = mi_row_ori * cm->mi_stride + mi_col_ori;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700729 xd->mi = cm->mi_grid_visible + offset;
730 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700731 set_mi_row_col(xd, tile, mi_row_pred, bh, mi_col_pred, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800732#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700733 cm->dependent_horz_tiles,
734#endif // CONFIG_DEPENDENT_HORZTILES
735 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700736
737 xd->up_available = (mi_row_ori > tile->mi_row_start);
738 xd->left_available = (mi_col_ori > tile->mi_col_start);
739
Jingning Hanfaad0e12016-12-07 10:54:57 -0800740 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700741
742 return &xd->mi[0]->mbmi;
743}
744
Angie Chiang7fcfee42017-02-24 15:51:03 -0800745#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700746static MB_MODE_INFO *set_mb_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
747 BLOCK_SIZE bsize, int mi_row, int mi_col,
748 int bw, int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 const int offset = mi_row * cm->mi_stride + mi_col;
750 const TileInfo *const tile = &xd->tile;
751 int x, y;
752
753 xd->mi = cm->mi_grid_visible + offset;
754 xd->mi[0] = cm->mi + offset;
755 xd->mi[0]->mbmi.sb_type = bsize;
756 for (y = 0; y < y_mis; ++y)
757 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
758
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700759 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800760#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700761 cm->dependent_horz_tiles,
762#endif // CONFIG_DEPENDENT_HORZTILES
763 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700764 return &xd->mi[0]->mbmi;
765}
Angie Chiang7fcfee42017-02-24 15:51:03 -0800766#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700767
Yaowu Xuf883b422016-08-30 14:01:10 -0700768static void set_offsets_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700769 const TileInfo *const tile, BLOCK_SIZE bsize,
770 int mi_row, int mi_col) {
Jingning Han93531242016-12-20 11:54:36 -0800771 const int bw = mi_size_wide[bsize];
772 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700773 const int offset = mi_row * cm->mi_stride + mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774
775 xd->mi = cm->mi_grid_visible + offset;
776 xd->mi[0] = cm->mi + offset;
777
Jingning Hanfaad0e12016-12-07 10:54:57 -0800778 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700779
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700780 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800781#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700782 cm->dependent_horz_tiles,
783#endif // CONFIG_DEPENDENT_HORZTILES
784 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785
Jingning Han91d9a792017-04-18 12:01:52 -0700786 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
787 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700788}
789
Yaowu Xuf883b422016-08-30 14:01:10 -0700790static void set_param_topblock(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700791 BLOCK_SIZE bsize, int mi_row, int mi_col,
792 int txfm, int skip) {
Jingning Han93531242016-12-20 11:54:36 -0800793 const int bw = mi_size_wide[bsize];
794 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700795 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
796 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797 const int offset = mi_row * cm->mi_stride + mi_col;
798 int x, y;
799
800 xd->mi = cm->mi_grid_visible + offset;
801 xd->mi[0] = cm->mi + offset;
802
803 for (y = 0; y < y_mis; ++y)
804 for (x = 0; x < x_mis; ++x) {
805 xd->mi[y * cm->mi_stride + x]->mbmi.skip = skip;
806 xd->mi[y * cm->mi_stride + x]->mbmi.tx_type = txfm;
807 }
808#if CONFIG_VAR_TX
809 xd->above_txfm_context = cm->above_txfm_context + mi_col;
810 xd->left_txfm_context =
811 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
Yaowu Xu52a17632016-11-17 15:48:21 -0800812 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bw, bh, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700813#endif
814}
815
Yaowu Xuf883b422016-08-30 14:01:10 -0700816static void set_ref(AV1_COMMON *const cm, MACROBLOCKD *const xd, int idx,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 int mi_row, int mi_col) {
818 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200819#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700820 RefBuffer *ref_buffer =
821 has_second_ref(mbmi) ? &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME]
822 : &cm->frame_refs[mbmi->ref_frame[0] - LAST_FRAME];
823#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700824 RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200825#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -0700826 xd->block_refs[idx] = ref_buffer;
Yaowu Xuf883b422016-08-30 14:01:10 -0700827 if (!av1_is_valid_scale(&ref_buffer->sf))
828 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 "Invalid scale factors");
Yaowu Xuf883b422016-08-30 14:01:10 -0700830 av1_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col,
831 &ref_buffer->sf);
Angie Chiangd0916d92017-03-10 17:54:18 -0800832 aom_merge_corrupted_flag(&xd->corrupted, ref_buffer->buf->corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700833}
834
835static void dec_predict_b_extend(
Yaowu Xuf883b422016-08-30 14:01:10 -0700836 AV1Decoder *const pbi, MACROBLOCKD *const xd, const TileInfo *const tile,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700837 int block, int mi_row_ori, int mi_col_ori, int mi_row_pred, int mi_col_pred,
Yue Chen8e689e42017-06-02 10:56:10 -0700838 int mi_row_top, int mi_col_top, int plane, uint8_t *dst_buf, int dst_stride,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700839 BLOCK_SIZE bsize_top, BLOCK_SIZE bsize_pred, int b_sub8x8, int bextend) {
840 // Used in supertx
841 // (mi_row_ori, mi_col_ori): location for mv
842 // (mi_row_pred, mi_col_pred, bsize_pred): region to predict
843 // (mi_row_top, mi_col_top, bsize_top): region of the top partition size
844 // block: sub location of sub8x8 blocks
845 // b_sub8x8: 1: ori is sub8x8; 0: ori is not sub8x8
846 // bextend: 1: region to predict is an extension of ori; 0: not
847 int r = (mi_row_pred - mi_row_top) * MI_SIZE;
848 int c = (mi_col_pred - mi_col_top) * MI_SIZE;
Jingning Han93531242016-12-20 11:54:36 -0800849 const int mi_width_top = mi_size_wide[bsize_top];
850 const int mi_height_top = mi_size_high[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700851 MB_MODE_INFO *mbmi;
Yaowu Xuf883b422016-08-30 14:01:10 -0700852 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700853
854 if (mi_row_pred < mi_row_top || mi_col_pred < mi_col_top ||
855 mi_row_pred >= mi_row_top + mi_height_top ||
856 mi_col_pred >= mi_col_top + mi_width_top || mi_row_pred >= cm->mi_rows ||
857 mi_col_pred >= cm->mi_cols)
858 return;
859
860 mbmi = set_offsets_extend(cm, xd, tile, bsize_pred, mi_row_pred, mi_col_pred,
861 mi_row_ori, mi_col_ori);
862 set_ref(cm, xd, 0, mi_row_pred, mi_col_pred);
Zoe Liu85b66462017-04-20 14:28:19 -0700863 if (has_second_ref(&xd->mi[0]->mbmi)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200864#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700865 || is_inter_singleref_comp_mode(xd->mi[0]->mbmi.mode)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200866#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700867 )
Yaowu Xuc27fc142016-08-22 16:08:15 -0700868 set_ref(cm, xd, 1, mi_row_pred, mi_col_pred);
Jingning Han2511c662016-12-22 11:57:34 -0800869 if (!bextend) mbmi->tx_size = max_txsize_lookup[bsize_top];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870
Yue Chen8e689e42017-06-02 10:56:10 -0700871 xd->plane[plane].dst.stride = dst_stride;
872 xd->plane[plane].dst.buf =
873 dst_buf + (r >> xd->plane[plane].subsampling_y) * dst_stride +
874 (c >> xd->plane[plane].subsampling_x);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700875
876 if (!b_sub8x8)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200877 av1_build_inter_predictor_sb_extend(&pbi->common, xd, mi_row_ori,
878 mi_col_ori, mi_row_pred, mi_col_pred,
879 plane, bsize_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880 else
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200881 av1_build_inter_predictor_sb_sub8x8_extend(
882 &pbi->common, xd, mi_row_ori, mi_col_ori, mi_row_pred, mi_col_pred,
883 plane, bsize_pred, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700884}
885
Yaowu Xuf883b422016-08-30 14:01:10 -0700886static void dec_extend_dir(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700887 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -0700888 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
889 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700891 int plane, uint8_t *dst_buf, int dst_stride,
892 int dir) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 // dir: 0-lower, 1-upper, 2-left, 3-right
894 // 4-lowerleft, 5-upperleft, 6-lowerright, 7-upperright
Jingning Han93531242016-12-20 11:54:36 -0800895 const int mi_width = mi_size_wide[bsize];
896 const int mi_height = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897 int xss = xd->plane[1].subsampling_x;
898 int yss = xd->plane[1].subsampling_y;
Jingning Hanfeb517c2016-12-21 16:02:07 -0800899#if CONFIG_CB4X4
900 const int unify_bsize = 1;
901#else
902 const int unify_bsize = 0;
903#endif
904 int b_sub8x8 = (bsize < BLOCK_8X8) && !unify_bsize ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700905 BLOCK_SIZE extend_bsize;
Jingning Han24f24a52016-12-27 10:13:28 -0800906 int mi_row_pred, mi_col_pred;
907
908 int wide_unit, high_unit;
909 int i, j;
910 int ext_offset = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911
912 if (dir == 0 || dir == 1) {
Jingning Han93531242016-12-20 11:54:36 -0800913 extend_bsize =
914 (mi_width == mi_size_wide[BLOCK_8X8] || bsize < BLOCK_8X8 || xss < yss)
915 ? BLOCK_8X8
916 : BLOCK_16X8;
Jingning Han24f24a52016-12-27 10:13:28 -0800917#if CONFIG_CB4X4
918 if (bsize < BLOCK_8X8) {
919 extend_bsize = BLOCK_4X4;
920 ext_offset = mi_size_wide[BLOCK_8X8];
921 }
922#endif
923
924 wide_unit = mi_size_wide[extend_bsize];
925 high_unit = mi_size_high[extend_bsize];
926
927 mi_row_pred = mi_row + ((dir == 0) ? mi_height : -(mi_height + ext_offset));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700928 mi_col_pred = mi_col;
929
Jingning Han24f24a52016-12-27 10:13:28 -0800930 for (j = 0; j < mi_height + ext_offset; j += high_unit)
931 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700932 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800933 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700934 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -0800935 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700936 } else if (dir == 2 || dir == 3) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800937 extend_bsize =
938 (mi_height == mi_size_high[BLOCK_8X8] || bsize < BLOCK_8X8 || yss < xss)
939 ? BLOCK_8X8
940 : BLOCK_8X16;
Jingning Han24f24a52016-12-27 10:13:28 -0800941#if CONFIG_CB4X4
942 if (bsize < BLOCK_8X8) {
943 extend_bsize = BLOCK_4X4;
944 ext_offset = mi_size_wide[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700945 }
Jingning Han24f24a52016-12-27 10:13:28 -0800946#endif
947
948 wide_unit = mi_size_wide[extend_bsize];
949 high_unit = mi_size_high[extend_bsize];
950
951 mi_row_pred = mi_row;
952 mi_col_pred = mi_col + ((dir == 3) ? mi_width : -(mi_width + ext_offset));
953
954 for (j = 0; j < mi_height + ext_offset; j += high_unit)
955 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700956 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800957 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700958 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -0800959 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700960 } else {
961 extend_bsize = BLOCK_8X8;
Jingning Han24f24a52016-12-27 10:13:28 -0800962#if CONFIG_CB4X4
963 if (bsize < BLOCK_8X8) {
964 extend_bsize = BLOCK_4X4;
965 ext_offset = mi_size_wide[BLOCK_8X8];
966 }
967#endif
968 wide_unit = mi_size_wide[extend_bsize];
969 high_unit = mi_size_high[extend_bsize];
970
Jingning Han5b7706a2016-12-21 09:55:10 -0800971 mi_row_pred = mi_row + ((dir == 4 || dir == 6) ? mi_height
Jingning Han24f24a52016-12-27 10:13:28 -0800972 : -(mi_height + ext_offset));
Jingning Han5b7706a2016-12-21 09:55:10 -0800973 mi_col_pred =
Jingning Han24f24a52016-12-27 10:13:28 -0800974 mi_col + ((dir == 6 || dir == 7) ? mi_width : -(mi_width + ext_offset));
975
976 for (j = 0; j < mi_height + ext_offset; j += high_unit)
977 for (i = 0; i < mi_width + ext_offset; i += wide_unit)
Yue Chen8e689e42017-06-02 10:56:10 -0700978 dec_predict_b_extend(pbi, xd, tile, block, mi_row_ori, mi_col_ori,
Jingning Han24f24a52016-12-27 10:13:28 -0800979 mi_row_pred + j, mi_col_pred + i, mi_row_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700980 mi_col_top, plane, dst_buf, dst_stride, top_bsize,
Jingning Han24f24a52016-12-27 10:13:28 -0800981 extend_bsize, b_sub8x8, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700982 }
983}
984
Yaowu Xuf883b422016-08-30 14:01:10 -0700985static void dec_extend_all(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700986 const TileInfo *const tile, int block,
Yue Chen8e689e42017-06-02 10:56:10 -0700987 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
988 int mi_row_ori, int mi_col_ori, int mi_row,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700989 int mi_col, int mi_row_top, int mi_col_top,
Yue Chen8e689e42017-06-02 10:56:10 -0700990 int plane, uint8_t *dst_buf, int dst_stride) {
Sarah Parkerfb9e6652017-04-25 16:32:06 -0700991 for (int i = 0; i < 8; ++i) {
Yue Chen8e689e42017-06-02 10:56:10 -0700992 dec_extend_dir(pbi, xd, tile, block, bsize, top_bsize, mi_row_ori,
993 mi_col_ori, mi_row, mi_col, mi_row_top, mi_col_top, plane,
994 dst_buf, dst_stride, i);
Sarah Parkerfb9e6652017-04-25 16:32:06 -0700995 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700996}
997
Yaowu Xuf883b422016-08-30 14:01:10 -0700998static void dec_predict_sb_complex(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 const TileInfo *const tile, int mi_row,
1000 int mi_col, int mi_row_top, int mi_col_top,
1001 BLOCK_SIZE bsize, BLOCK_SIZE top_bsize,
1002 uint8_t *dst_buf[3], int dst_stride[3]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001003 const AV1_COMMON *const cm = &pbi->common;
Jingning Han5b7706a2016-12-21 09:55:10 -08001004 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001005 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1006 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
1007#if CONFIG_EXT_PARTITION_TYPES
1008 const BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
1009#endif
1010 int i;
1011 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1012 uint8_t *dst_buf1[3], *dst_buf2[3], *dst_buf3[3];
Jingning Hanfeb517c2016-12-21 16:02:07 -08001013#if CONFIG_CB4X4
1014 const int unify_bsize = 1;
1015#else
1016 const int unify_bsize = 0;
1017#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001018
1019 DECLARE_ALIGNED(16, uint8_t, tmp_buf1[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1020 DECLARE_ALIGNED(16, uint8_t, tmp_buf2[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1021 DECLARE_ALIGNED(16, uint8_t, tmp_buf3[MAX_MB_PLANE * MAX_TX_SQUARE * 2]);
1022 int dst_stride1[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1023 int dst_stride2[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1024 int dst_stride3[3] = { MAX_TX_SIZE, MAX_TX_SIZE, MAX_TX_SIZE };
1025
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001026#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001027 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
1028 int len = sizeof(uint16_t);
1029 dst_buf1[0] = CONVERT_TO_BYTEPTR(tmp_buf1);
1030 dst_buf1[1] = CONVERT_TO_BYTEPTR(tmp_buf1 + MAX_TX_SQUARE * len);
1031 dst_buf1[2] = CONVERT_TO_BYTEPTR(tmp_buf1 + 2 * MAX_TX_SQUARE * len);
1032 dst_buf2[0] = CONVERT_TO_BYTEPTR(tmp_buf2);
1033 dst_buf2[1] = CONVERT_TO_BYTEPTR(tmp_buf2 + MAX_TX_SQUARE * len);
1034 dst_buf2[2] = CONVERT_TO_BYTEPTR(tmp_buf2 + 2 * MAX_TX_SQUARE * len);
1035 dst_buf3[0] = CONVERT_TO_BYTEPTR(tmp_buf3);
1036 dst_buf3[1] = CONVERT_TO_BYTEPTR(tmp_buf3 + MAX_TX_SQUARE * len);
1037 dst_buf3[2] = CONVERT_TO_BYTEPTR(tmp_buf3 + 2 * MAX_TX_SQUARE * len);
1038 } else {
1039#endif
1040 dst_buf1[0] = tmp_buf1;
1041 dst_buf1[1] = tmp_buf1 + MAX_TX_SQUARE;
1042 dst_buf1[2] = tmp_buf1 + 2 * MAX_TX_SQUARE;
1043 dst_buf2[0] = tmp_buf2;
1044 dst_buf2[1] = tmp_buf2 + MAX_TX_SQUARE;
1045 dst_buf2[2] = tmp_buf2 + 2 * MAX_TX_SQUARE;
1046 dst_buf3[0] = tmp_buf3;
1047 dst_buf3[1] = tmp_buf3 + MAX_TX_SQUARE;
1048 dst_buf3[2] = tmp_buf3 + 2 * MAX_TX_SQUARE;
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001049#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001050 }
1051#endif
1052
1053 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1054
1055 xd->mi = cm->mi_grid_visible + mi_offset;
1056 xd->mi[0] = cm->mi + mi_offset;
1057
1058 for (i = 0; i < MAX_MB_PLANE; i++) {
1059 xd->plane[i].dst.buf = dst_buf[i];
1060 xd->plane[i].dst.stride = dst_stride[i];
1061 }
1062
1063 switch (partition) {
1064 case PARTITION_NONE:
1065 assert(bsize < top_bsize);
Yue Chen8e689e42017-06-02 10:56:10 -07001066 for (i = 0; i < MAX_MB_PLANE; i++) {
1067 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1068 mi_row_top, mi_col_top, i, dst_buf[i],
1069 dst_stride[i], top_bsize, bsize, 0, 0);
1070 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row, mi_col,
1071 mi_row, mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1072 dst_stride[i]);
1073 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 break;
1075 case PARTITION_HORZ:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001076 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001077 for (i = 0; i < MAX_MB_PLANE; i++) {
1078 // For sub8x8, predict in 8x8 unit
1079 // First half
1080 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1081 mi_row_top, mi_col_top, i, dst_buf[i],
1082 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1083 if (bsize < top_bsize)
1084 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1085 mi_row, mi_col, mi_row_top, mi_col_top, i,
1086 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001087
Yue Chen8e689e42017-06-02 10:56:10 -07001088 // Second half
1089 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1090 mi_row_top, mi_col_top, i, dst_buf1[i],
1091 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1092 if (bsize < top_bsize)
1093 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1094 mi_row, mi_col, mi_row_top, mi_col_top, i,
1095 dst_buf1[i], dst_stride1[i]);
1096 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001097
1098 // weighted average to smooth the boundary
1099 xd->plane[0].dst.buf = dst_buf[0];
1100 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001101 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001102 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1103 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1104 0);
1105 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001106 for (i = 0; i < MAX_MB_PLANE; i++) {
1107#if CONFIG_CB4X4
1108 const struct macroblockd_plane *pd = &xd->plane[i];
1109 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1110 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001111
Yue Chen8e689e42017-06-02 10:56:10 -07001112 if (handle_chroma_sub8x8) {
1113 int mode_offset_row = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001114
Yue Chen8e689e42017-06-02 10:56:10 -07001115 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1116 mi_col, mi_row, mi_col, mi_row_top, mi_col_top,
1117 i, dst_buf[i], dst_stride[i], top_bsize, bsize,
1118 0, 0);
1119 if (bsize < top_bsize)
1120 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize,
1121 mi_row + mode_offset_row, mi_col, mi_row, mi_col,
1122 mi_row_top, mi_col_top, i, dst_buf[i],
1123 dst_stride[i]);
1124 } else {
1125#endif
1126 // First half
1127 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1128 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1129 dst_stride[i], top_bsize, subsize, 0, 0);
1130 if (bsize < top_bsize)
1131 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1132 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1133 dst_buf[i], dst_stride[i]);
1134 else
1135 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1136 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1137 dst_buf[i], dst_stride[i], 0);
1138
1139 if (mi_row + hbs < cm->mi_rows) {
1140 // Second half
1141 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1142 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1143 i, dst_buf1[i], dst_stride1[i], top_bsize,
1144 subsize, 0, 0);
1145 if (bsize < top_bsize)
1146 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1147 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1148 mi_row_top, mi_col_top, i, dst_buf1[i],
1149 dst_stride1[i]);
1150 else
1151 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize,
1152 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1153 mi_row_top, mi_col_top, i, dst_buf1[i],
1154 dst_stride1[i], 1);
1155
1156 // weighted average to smooth the boundary
1157 xd->plane[i].dst.buf = dst_buf[i];
1158 xd->plane[i].dst.stride = dst_stride[i];
1159 av1_build_masked_inter_predictor_complex(
1160 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1161 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1162 PARTITION_HORZ, i);
1163 }
1164#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165 }
Yue Chen8e689e42017-06-02 10:56:10 -07001166#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001167 }
1168 }
1169 break;
1170 case PARTITION_VERT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001171 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001172 for (i = 0; i < MAX_MB_PLANE; i++) {
1173 // First half
1174 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1175 mi_row_top, mi_col_top, i, dst_buf[i],
1176 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1177 if (bsize < top_bsize)
1178 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1179 mi_row, mi_col, mi_row_top, mi_col_top, i,
1180 dst_buf[i], dst_stride[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001181
Yue Chen8e689e42017-06-02 10:56:10 -07001182 // Second half
1183 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1184 mi_row_top, mi_col_top, i, dst_buf1[i],
1185 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1186 if (bsize < top_bsize)
1187 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1188 mi_row, mi_col, mi_row_top, mi_col_top, i,
1189 dst_buf1[i], dst_stride1[i]);
1190 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191
1192 // Smooth
1193 xd->plane[0].dst.buf = dst_buf[0];
1194 xd->plane[0].dst.stride = dst_stride[0];
Yaowu Xuf883b422016-08-30 14:01:10 -07001195 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001196 xd, dst_buf[0], dst_stride[0], dst_buf1[0], dst_stride1[0], mi_row,
1197 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1198 0);
1199 } else {
Yue Chen8e689e42017-06-02 10:56:10 -07001200 for (i = 0; i < MAX_MB_PLANE; i++) {
1201#if CONFIG_CB4X4
1202 const struct macroblockd_plane *pd = &xd->plane[i];
1203 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1204 subsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001205
Yue Chen8e689e42017-06-02 10:56:10 -07001206 if (handle_chroma_sub8x8) {
1207 int mode_offset_col = CONFIG_CHROMA_SUB8X8 ? hbs : 0;
1208 assert(i > 0 && bsize == BLOCK_8X8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001209
Yue Chen8e689e42017-06-02 10:56:10 -07001210 dec_predict_b_extend(pbi, xd, tile, 0, mi_row,
1211 mi_col + mode_offset_col, mi_row, mi_col,
1212 mi_row_top, mi_col_top, i, dst_buf[i],
1213 dst_stride[i], top_bsize, bsize, 0, 0);
1214 if (bsize < top_bsize)
1215 dec_extend_all(pbi, xd, tile, 0, bsize, top_bsize, mi_row,
1216 mi_col + mode_offset_col, mi_row, mi_col,
1217 mi_row_top, mi_col_top, i, dst_buf[i],
1218 dst_stride[i]);
1219 } else {
1220#endif
1221 // First half
1222 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1223 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1224 dst_stride[i], top_bsize, subsize, 0, 0);
1225 if (bsize < top_bsize)
1226 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1227 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1228 dst_buf[i], dst_stride[i]);
1229 else
1230 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1231 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1232 dst_buf[i], dst_stride[i], 3);
1233
1234 // Second half
1235 if (mi_col + hbs < cm->mi_cols) {
1236 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1237 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1238 i, dst_buf1[i], dst_stride1[i], top_bsize,
1239 subsize, 0, 0);
1240 if (bsize < top_bsize)
1241 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1242 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1243 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1244 else
1245 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1246 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1247 mi_col_top, i, dst_buf1[i], dst_stride1[i], 2);
1248
1249 // Smooth
1250 xd->plane[i].dst.buf = dst_buf[i];
1251 xd->plane[i].dst.stride = dst_stride[i];
1252 av1_build_masked_inter_predictor_complex(
1253 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1254 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1255 PARTITION_VERT, i);
1256 }
1257#if CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07001258 }
Yue Chen8e689e42017-06-02 10:56:10 -07001259#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001260 }
1261 }
1262 break;
1263 case PARTITION_SPLIT:
Jingning Hanfeb517c2016-12-21 16:02:07 -08001264 if (bsize == BLOCK_8X8 && !unify_bsize) {
Yue Chen8e689e42017-06-02 10:56:10 -07001265 for (i = 0; i < MAX_MB_PLANE; i++) {
1266 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1267 mi_row_top, mi_col_top, i, dst_buf[i],
1268 dst_stride[i], top_bsize, BLOCK_8X8, 1, 0);
1269 dec_predict_b_extend(pbi, xd, tile, 1, mi_row, mi_col, mi_row, mi_col,
1270 mi_row_top, mi_col_top, i, dst_buf1[i],
1271 dst_stride1[i], top_bsize, BLOCK_8X8, 1, 1);
1272 dec_predict_b_extend(pbi, xd, tile, 2, mi_row, mi_col, mi_row, mi_col,
1273 mi_row_top, mi_col_top, i, dst_buf2[i],
1274 dst_stride2[i], top_bsize, BLOCK_8X8, 1, 1);
1275 dec_predict_b_extend(pbi, xd, tile, 3, mi_row, mi_col, mi_row, mi_col,
1276 mi_row_top, mi_col_top, i, dst_buf3[i],
1277 dst_stride3[i], top_bsize, BLOCK_8X8, 1, 1);
1278 if (bsize < top_bsize) {
1279 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1280 mi_row, mi_col, mi_row_top, mi_col_top, i,
1281 dst_buf[i], dst_stride[i]);
1282 dec_extend_all(pbi, xd, tile, 1, subsize, top_bsize, mi_row, mi_col,
1283 mi_row, mi_col, mi_row_top, mi_col_top, i,
1284 dst_buf1[i], dst_stride1[i]);
1285 dec_extend_all(pbi, xd, tile, 2, subsize, top_bsize, mi_row, mi_col,
1286 mi_row, mi_col, mi_row_top, mi_col_top, i,
1287 dst_buf2[i], dst_stride2[i]);
1288 dec_extend_all(pbi, xd, tile, 3, subsize, top_bsize, mi_row, mi_col,
1289 mi_row, mi_col, mi_row_top, mi_col_top, i,
1290 dst_buf3[i], dst_stride3[i]);
1291 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001292 }
Yue Chen8e689e42017-06-02 10:56:10 -07001293#if CONFIG_CB4X4
1294 } else if (bsize == BLOCK_8X8) {
1295 for (i = 0; i < MAX_MB_PLANE; i++) {
1296 const struct macroblockd_plane *pd = &xd->plane[i];
1297 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1298 subsize, pd->subsampling_x, pd->subsampling_y);
1299
1300 if (handle_chroma_sub8x8) {
1301 int mode_offset_row =
1302 CONFIG_CHROMA_SUB8X8 && mi_row + hbs < cm->mi_rows ? hbs : 0;
1303 int mode_offset_col =
1304 CONFIG_CHROMA_SUB8X8 && mi_col + hbs < cm->mi_cols ? hbs : 0;
1305
1306 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + mode_offset_row,
1307 mi_col + mode_offset_col, mi_row, mi_col,
1308 mi_row_top, mi_col_top, i, dst_buf[i],
1309 dst_stride[i], top_bsize, BLOCK_8X8, 0, 0);
1310 if (bsize < top_bsize)
1311 dec_extend_all(pbi, xd, tile, 0, BLOCK_8X8, top_bsize,
1312 mi_row + mode_offset_row, mi_col + mode_offset_col,
1313 mi_row, mi_col, mi_row_top, mi_col_top, i,
1314 dst_buf[i], dst_stride[i]);
1315 } else {
1316 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row,
1317 mi_col, mi_row_top, mi_col_top, i, dst_buf[i],
1318 dst_stride[i], top_bsize, subsize, 0, 0);
1319 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1320 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs,
1321 mi_row, mi_col + hbs, mi_row_top, mi_col_top,
1322 i, dst_buf1[i], dst_stride1[i], top_bsize,
1323 subsize, 0, 0);
1324 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1325 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col,
1326 mi_row + hbs, mi_col, mi_row_top, mi_col_top,
1327 i, dst_buf2[i], dst_stride2[i], top_bsize,
1328 subsize, 0, 0);
1329 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1330 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1331 mi_row + hbs, mi_col + hbs, mi_row_top,
1332 mi_col_top, i, dst_buf3[i], dst_stride3[i],
1333 top_bsize, subsize, 0, 0);
1334
1335 if (bsize < top_bsize) {
1336 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1337 mi_col, mi_row, mi_col, mi_row_top, mi_col_top, i,
1338 dst_buf[i], dst_stride[i]);
1339 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1340 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1341 mi_col + hbs, mi_row, mi_col + hbs, mi_row_top,
1342 mi_col_top, i, dst_buf1[i], dst_stride1[i]);
1343 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1344 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1345 mi_row + hbs, mi_col, mi_row + hbs, mi_col,
1346 mi_row_top, mi_col_top, i, dst_buf2[i],
1347 dst_stride2[i]);
1348 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1349 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize,
1350 mi_row + hbs, mi_col + hbs, mi_row + hbs,
1351 mi_col + hbs, mi_row_top, mi_col_top, i,
1352 dst_buf3[i], dst_stride3[i]);
1353 }
1354 }
1355 }
1356#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001357 } else {
1358 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row_top,
1359 mi_col_top, subsize, top_bsize, dst_buf,
1360 dst_stride);
1361 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1362 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col + hbs,
1363 mi_row_top, mi_col_top, subsize, top_bsize,
1364 dst_buf1, dst_stride1);
1365 if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols)
1366 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col,
1367 mi_row_top, mi_col_top, subsize, top_bsize,
1368 dst_buf2, dst_stride2);
1369 if (mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols)
1370 dec_predict_sb_complex(pbi, xd, tile, mi_row + hbs, mi_col + hbs,
1371 mi_row_top, mi_col_top, subsize, top_bsize,
1372 dst_buf3, dst_stride3);
1373 }
1374 for (i = 0; i < MAX_MB_PLANE; i++) {
Yue Chen8e689e42017-06-02 10:56:10 -07001375#if CONFIG_CB4X4
1376 const struct macroblockd_plane *pd = &xd->plane[i];
1377 int handle_chroma_sub8x8 = need_handle_chroma_sub8x8(
1378 subsize, pd->subsampling_x, pd->subsampling_y);
1379 if (handle_chroma_sub8x8) continue; // Skip <4x4 chroma smoothing
1380#else
Jingning Han24f24a52016-12-27 10:13:28 -08001381 if (bsize == BLOCK_8X8 && i != 0)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 continue; // Skip <4x4 chroma smoothing
Jingning Han9e0976a2016-12-27 17:52:42 -08001383#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001384 if (mi_row < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001385 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001386 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i],
1387 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1388 PARTITION_VERT, i);
1389 if (mi_row + hbs < cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001390 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001391 xd, dst_buf2[i], dst_stride2[i], dst_buf3[i], dst_stride3[i],
1392 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1393 PARTITION_VERT, i);
Yaowu Xuf883b422016-08-30 14:01:10 -07001394 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001395 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1396 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1397 PARTITION_HORZ, i);
1398 }
1399 } else if (mi_row + hbs < cm->mi_rows && mi_col < cm->mi_cols) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001400 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001401 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i],
1402 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1403 PARTITION_HORZ, i);
1404 }
1405 }
1406 break;
1407#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001408#if CONFIG_EXT_PARTITION_TYPES_AB
1409#error HORZ/VERT_A/B partitions not yet updated in superres code
1410#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001411 case PARTITION_HORZ_A:
1412 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1413 mi_row_top, mi_col_top, dst_buf, dst_stride,
1414 top_bsize, bsize2, 0, 0);
1415 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1416 mi_row_top, mi_col_top, dst_buf, dst_stride);
1417
1418 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1419 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1420 dst_stride1, top_bsize, bsize2, 0, 0);
1421 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1422 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1423
1424 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1425 mi_col, mi_row_top, mi_col_top, dst_buf2,
1426 dst_stride2, top_bsize, subsize, 0, 0);
1427 if (bsize < top_bsize)
1428 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1429 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2);
1430 else
1431 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row + hbs,
1432 mi_col, mi_row_top, mi_col_top, dst_buf2, dst_stride2,
1433 1);
1434
1435 for (i = 0; i < MAX_MB_PLANE; i++) {
1436 xd->plane[i].dst.buf = dst_buf[i];
1437 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001438 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1440 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1441 i);
1442 }
1443 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001444 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001445 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1446 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1447 i);
1448 }
1449 break;
1450 case PARTITION_VERT_A:
1451
1452 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1453 mi_row_top, mi_col_top, dst_buf, dst_stride,
1454 top_bsize, bsize2, 0, 0);
1455 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col,
1456 mi_row_top, mi_col_top, dst_buf, dst_stride);
1457
1458 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1459 mi_col, mi_row_top, mi_col_top, dst_buf1,
1460 dst_stride1, top_bsize, bsize2, 0, 0);
1461 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1462 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1463
1464 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1465 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1466 dst_stride2, top_bsize, subsize, 0, 0);
1467 if (bsize < top_bsize)
1468 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1469 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1470 dst_stride2);
1471 else
1472 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row,
1473 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1474 dst_stride2, 2);
1475
1476 for (i = 0; i < MAX_MB_PLANE; i++) {
1477 xd->plane[i].dst.buf = dst_buf[i];
1478 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001479 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001480 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1481 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1482 i);
1483 }
1484 for (i = 0; i < MAX_MB_PLANE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001485 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 xd, dst_buf[i], dst_stride[i], dst_buf2[i], dst_stride2[i], mi_row,
1487 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1488 i);
1489 }
1490 break;
1491 case PARTITION_HORZ_B:
1492 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1493 mi_row_top, mi_col_top, dst_buf, dst_stride,
1494 top_bsize, subsize, 0, 0);
1495 if (bsize < top_bsize)
1496 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1497 mi_row_top, mi_col_top, dst_buf, dst_stride);
1498 else
1499 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1500 mi_row_top, mi_col_top, dst_buf, dst_stride, 0);
1501
1502 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col, mi_row + hbs,
1503 mi_col, mi_row_top, mi_col_top, dst_buf1,
1504 dst_stride1, top_bsize, bsize2, 0, 0);
1505 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs, mi_col,
1506 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1507
1508 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1509 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1510 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1511 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1512 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1513 dst_stride2);
1514
1515 for (i = 0; i < MAX_MB_PLANE; i++) {
1516 xd->plane[i].dst.buf = dst_buf1[i];
1517 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001518 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001519 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1520 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1521 PARTITION_VERT, i);
1522 }
1523 for (i = 0; i < MAX_MB_PLANE; i++) {
1524 xd->plane[i].dst.buf = dst_buf[i];
1525 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001526 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001527 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1528 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_HORZ,
1529 i);
1530 }
1531 break;
1532 case PARTITION_VERT_B:
1533 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col, mi_row, mi_col,
1534 mi_row_top, mi_col_top, dst_buf, dst_stride,
1535 top_bsize, subsize, 0, 0);
1536 if (bsize < top_bsize)
1537 dec_extend_all(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1538 mi_row_top, mi_col_top, dst_buf, dst_stride);
1539 else
1540 dec_extend_dir(pbi, xd, tile, 0, subsize, top_bsize, mi_row, mi_col,
1541 mi_row_top, mi_col_top, dst_buf, dst_stride, 3);
1542
1543 dec_predict_b_extend(pbi, xd, tile, 0, mi_row, mi_col + hbs, mi_row,
1544 mi_col + hbs, mi_row_top, mi_col_top, dst_buf1,
1545 dst_stride1, top_bsize, bsize2, 0, 0);
1546 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row, mi_col + hbs,
1547 mi_row_top, mi_col_top, dst_buf1, dst_stride1);
1548
1549 dec_predict_b_extend(pbi, xd, tile, 0, mi_row + hbs, mi_col + hbs,
1550 mi_row + hbs, mi_col + hbs, mi_row_top, mi_col_top,
1551 dst_buf2, dst_stride2, top_bsize, bsize2, 0, 0);
1552 dec_extend_all(pbi, xd, tile, 0, bsize2, top_bsize, mi_row + hbs,
1553 mi_col + hbs, mi_row_top, mi_col_top, dst_buf2,
1554 dst_stride2);
1555
1556 for (i = 0; i < MAX_MB_PLANE; i++) {
1557 xd->plane[i].dst.buf = dst_buf1[i];
1558 xd->plane[i].dst.stride = dst_stride1[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001559 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001560 xd, dst_buf1[i], dst_stride1[i], dst_buf2[i], dst_stride2[i],
1561 mi_row, mi_col, mi_row_top, mi_col_top, bsize, top_bsize,
1562 PARTITION_HORZ, i);
1563 }
1564 for (i = 0; i < MAX_MB_PLANE; i++) {
1565 xd->plane[i].dst.buf = dst_buf[i];
1566 xd->plane[i].dst.stride = dst_stride[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07001567 av1_build_masked_inter_predictor_complex(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001568 xd, dst_buf[i], dst_stride[i], dst_buf1[i], dst_stride1[i], mi_row,
1569 mi_col, mi_row_top, mi_col_top, bsize, top_bsize, PARTITION_VERT,
1570 i);
1571 }
1572 break;
1573#endif // CONFIG_EXT_PARTITION_TYPES
1574 default: assert(0);
1575 }
1576}
1577
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001578static void set_segment_id_supertx(const AV1_COMMON *const cm, int mi_row,
1579 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001580 const struct segmentation *seg = &cm->seg;
Jingning Han5b7706a2016-12-21 09:55:10 -08001581 const int miw = AOMMIN(mi_size_wide[bsize], cm->mi_cols - mi_col);
1582 const int mih = AOMMIN(mi_size_high[bsize], cm->mi_rows - mi_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001583 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1584 MODE_INFO **const mip = cm->mi_grid_visible + mi_offset;
1585 int r, c;
1586 int seg_id_supertx = MAX_SEGMENTS;
1587
1588 if (!seg->enabled) {
1589 seg_id_supertx = 0;
1590 } else {
1591 // Find the minimum segment_id
1592 for (r = 0; r < mih; r++)
1593 for (c = 0; c < miw; c++)
1594 seg_id_supertx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001595 AOMMIN(mip[r * cm->mi_stride + c]->mbmi.segment_id, seg_id_supertx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001596 assert(0 <= seg_id_supertx && seg_id_supertx < MAX_SEGMENTS);
1597 }
1598
1599 // Assign the the segment_id back to segment_id_supertx
1600 for (r = 0; r < mih; r++)
1601 for (c = 0; c < miw; c++)
1602 mip[r * cm->mi_stride + c]->mbmi.segment_id_supertx = seg_id_supertx;
1603}
1604#endif // CONFIG_SUPERTX
1605
Yue Chen64550b62017-01-12 12:18:22 -08001606static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001607#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001608 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001609#endif // CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001610 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001611#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001612 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001613#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -08001614 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001615 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -08001616 const int bw = mi_size_wide[bsize];
1617 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001618 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1619 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -04001620
Michael Bebenita6048d052016-08-25 14:40:54 -07001621#if CONFIG_ACCOUNTING
1622 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
1623#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001624#if CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001625 if (supertx_enabled) {
Yue Chen64550b62017-01-12 12:18:22 -08001626 set_mb_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001627 } else {
Yue Chen64550b62017-01-12 12:18:22 -08001628 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001629 }
1630#if CONFIG_EXT_PARTITION_TYPES
1631 xd->mi[0]->mbmi.partition = partition;
1632#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001633 av1_read_mode_info(pbi, xd, supertx_enabled, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001634#else
Yue Chen64550b62017-01-12 12:18:22 -08001635 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001636#if CONFIG_EXT_PARTITION_TYPES
1637 xd->mi[0]->mbmi.partition = partition;
1638#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001639 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001641 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
1642 const BLOCK_SIZE uv_subsize =
1643 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
1644 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -07001645 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001646 "Invalid block size.");
1647 }
1648
1649#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001650 xd->mi[0]->mbmi.segment_id_supertx = MAX_SEGMENTS;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001651#endif // CONFIG_SUPERTX
1652
Angie Chiangd0916d92017-03-10 17:54:18 -08001653 int reader_corrupted_flag = aom_reader_has_error(r);
1654 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -08001655}
1656
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07001657#if CONFIG_NCOBMC_ADAPT_WEIGHT
1658static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1659 int mi_row, int mi_col) {
1660 const int offset = mi_row * cm->mi_stride + mi_col;
1661 xd->mi = cm->mi_grid_visible + offset;
1662 xd->mi[0] = &cm->mi[offset];
1663}
1664
1665static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
1666 int mi_col, int bsize, int mode) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07001667 uint8_t *pred_buf[4][MAX_MB_PLANE];
1668 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
1669 // target block in pxl
1670 int pxl_row = mi_row << MI_SIZE_LOG2;
1671 int pxl_col = mi_col << MI_SIZE_LOG2;
1672
1673 int plane;
1674#if CONFIG_HIGHBITDEPTH
1675 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
1676 int len = sizeof(uint16_t);
Debargha Mukherjee5d108a32017-10-05 19:47:08 -07001677 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE,
1678 len);
1679 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE,
1680 len);
1681 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE,
1682 len);
1683 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE,
1684 len);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07001685 } else {
1686#endif // CONFIG_HIGHBITDEPTH
Debargha Mukherjee5d108a32017-10-05 19:47:08 -07001687 ASSIGN_ALIGNED_PTRS(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE);
1688 ASSIGN_ALIGNED_PTRS(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE);
1689 ASSIGN_ALIGNED_PTRS(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE);
1690 ASSIGN_ALIGNED_PTRS(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07001691#if CONFIG_HIGHBITDEPTH
1692 }
1693#endif
1694 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
1695 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
1696 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1697 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
1698 pred_stride, mode);
1699 }
1700}
1701
1702static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1703 int bsize, const int mi_row, const int mi_col,
1704 const NCOBMC_MODE modes) {
1705 const int mi_width = mi_size_wide[bsize];
1706 const int mi_height = mi_size_high[bsize];
1707
1708 assert(bsize >= BLOCK_8X8);
1709
1710 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
1711 cm->mi_cols);
1712 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
1713}
1714
1715static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
1716 MACROBLOCKD *const xd, int mi_row,
1717 int mi_col, BLOCK_SIZE bsize) {
1718 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1719 const int mi_width = mi_size_wide[bsize];
1720 const int mi_height = mi_size_high[bsize];
1721 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
1722 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
1723 if (mi_width > mi_height) {
1724 // horizontal partition
1725 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1726 xd->mi += hbs;
1727 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
1728 mbmi->ncobmc_mode[1]);
1729 } else if (mi_height > mi_width) {
1730 // vertical partition
1731 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1732 xd->mi += hbs * xd->mi_stride;
1733 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
1734 mbmi->ncobmc_mode[1]);
1735 } else {
1736 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
1737 }
1738 set_mode_info_offsets(cm, xd, mi_row, mi_col);
1739 // restore dst buffer and mode info
1740 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
1741 mi_col);
1742}
1743#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
1744
Yue Chen64550b62017-01-12 12:18:22 -08001745static void decode_token_and_recon_block(AV1Decoder *const pbi,
1746 MACROBLOCKD *const xd, int mi_row,
1747 int mi_col, aom_reader *r,
1748 BLOCK_SIZE bsize) {
1749 AV1_COMMON *const cm = &pbi->common;
1750 const int bw = mi_size_wide[bsize];
1751 const int bh = mi_size_high[bsize];
1752 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
1753 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -08001754
Angie Chiang44701f22017-02-27 10:36:44 -08001755 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
1756 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Luc Trudeaub05eeae2017-08-18 15:14:30 -04001757#if CONFIG_CFL && CONFIG_CHROMA_SUB8X8
1758 CFL_CTX *const cfl = xd->cfl;
1759 cfl->is_chroma_reference = is_chroma_reference(
1760 mi_row, mi_col, bsize, cfl->subsampling_x, cfl->subsampling_y);
1761#endif // CONFIG_CFL && CONFIG_CHROMA_SUB8X8
Yue Chen19e7aa82016-11-30 14:05:39 -08001762
Arild Fuldseth07441162016-08-15 15:07:52 +02001763 if (cm->delta_q_present_flag) {
1764 int i;
1765 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -07001766#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -07001767 const int current_qindex =
1768 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -07001769#else
Cheng Chen49d30e62017-08-28 20:59:27 -07001770 const int current_qindex = xd->current_qindex;
1771#endif // CONFIG_EXT_DELTA_Q
1772 int j;
1773 for (j = 0; j < MAX_MB_PLANE; ++j) {
1774 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
1775 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
1776
1777 xd->plane[j].seg_dequant[i][0] =
1778 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
1779 xd->plane[j].seg_dequant[i][1] =
1780 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
1781 }
Arild Fuldseth07441162016-08-15 15:07:52 +02001782 }
1783 }
Arild Fuldseth07441162016-08-15 15:07:52 +02001784
Jingning Han41bb3392016-12-14 10:46:48 -08001785#if CONFIG_CB4X4
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001786 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08001787#else
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07001788 if (mbmi->skip) {
1789 av1_reset_skip_context(xd, mi_row, mi_col, AOMMAX(BLOCK_8X8, bsize));
1790 }
Jingning Han41bb3392016-12-14 10:46:48 -08001791#endif
Jingning Hand39cc722016-12-02 14:03:26 -08001792
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001793#if CONFIG_COEF_INTERLEAVE
1794 {
1795 const struct macroblockd_plane *const pd_y = &xd->plane[0];
1796 const struct macroblockd_plane *const pd_c = &xd->plane[1];
1797 const TX_SIZE tx_log2_y = mbmi->tx_size;
hui su0c6244b2017-07-12 17:11:43 -07001798 const TX_SIZE tx_log2_c = av1_get_uv_tx_size(mbmi, pd_c);
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001799 const int tx_sz_y = (1 << tx_log2_y);
1800 const int tx_sz_c = (1 << tx_log2_c);
1801 const int num_4x4_w_y = pd_y->n4_w;
1802 const int num_4x4_h_y = pd_y->n4_h;
1803 const int num_4x4_w_c = pd_c->n4_w;
1804 const int num_4x4_h_c = pd_c->n4_h;
1805 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
1806 pd_y->subsampling_x);
1807 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
1808 pd_y->subsampling_y);
1809 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
1810 pd_c->subsampling_x);
1811 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
1812 pd_c->subsampling_y);
1813
1814 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
1815 // i.e. when the SB is splitted by tile boundaries.
1816 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
1817 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
1818 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
1819 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001820 const int tu_num_c = tu_num_w_c * tu_num_h_c;
1821
1822 if (!is_inter_block(mbmi)) {
1823 int tu_idx_c = 0;
1824 int row_y, col_y, row_c, col_c;
1825 int plane;
1826
Yushin Choa8810392017-09-06 15:16:14 -07001827// TODO(anybody) : remove this flag when PVQ supports pallete coding tool
1828#if !CONFIG_PVQ
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001829 for (plane = 0; plane <= 1; ++plane) {
1830 if (mbmi->palette_mode_info.palette_size[plane])
1831 av1_decode_palette_tokens(xd, plane, r);
1832 }
Yushin Choa8810392017-09-06 15:16:14 -07001833#endif // !CONFIG_PVQ
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001834
1835 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1836 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1837 // luma
1838 predict_and_reconstruct_intra_block(
1839 cm, xd, r, mbmi, 0, row_y * tx_sz_y, col_y * tx_sz_y, tx_log2_y);
1840 // chroma
1841 if (tu_idx_c < tu_num_c) {
1842 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1843 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1844 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c,
1845 col_c, tx_log2_c);
1846 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c,
1847 col_c, tx_log2_c);
1848 tu_idx_c++;
1849 }
1850 }
1851 }
1852
1853 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1854 while (tu_idx_c < tu_num_c) {
1855 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1856 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1857 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c, col_c,
1858 tx_log2_c);
1859 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c, col_c,
1860 tx_log2_c);
1861 tu_idx_c++;
1862 }
1863 } else {
1864 // Prediction
Jingning Hanc44009c2017-05-06 11:36:49 -07001865 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001866 AOMMAX(bsize, BLOCK_8X8));
1867
1868 // Reconstruction
1869 if (!mbmi->skip) {
1870 int eobtotal = 0;
1871 int tu_idx_c = 0;
1872 int row_y, col_y, row_c, col_c;
1873
1874 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
1875 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
1876 // luma
1877 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 0,
1878 row_y * tx_sz_y,
1879 col_y * tx_sz_y, tx_log2_y);
1880 // chroma
1881 if (tu_idx_c < tu_num_c) {
1882 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1883 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1884 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1885 1, row_c, col_c, tx_log2_c);
1886 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
1887 2, row_c, col_c, tx_log2_c);
1888 tu_idx_c++;
1889 }
1890 }
1891 }
1892
1893 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1894 while (tu_idx_c < tu_num_c) {
1895 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
1896 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
1897 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 1,
1898 row_c, col_c, tx_log2_c);
1899 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 2,
1900 row_c, col_c, tx_log2_c);
1901 tu_idx_c++;
1902 }
1903
Alex Converse64d7ef62017-03-22 18:09:16 -07001904 // TODO(CONFIG_COEF_INTERLEAVE owners): bring eob == 0 corner case
1905 // into line with the defaut configuration
1906 if (bsize >= BLOCK_8X8 && eobtotal == 0) mbmi->skip = 1;
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001907 }
1908 }
1909 }
Angie Chiang133733c2017-03-17 12:50:20 -07001910#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001911 if (!is_inter_block(mbmi)) {
1912 int plane;
Yushin Choa8810392017-09-06 15:16:14 -07001913
1914// TODO(anybody) : remove this flag when PVQ supports pallete coding tool
1915#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07001916 for (plane = 0; plane <= 1; ++plane) {
1917 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -07001918 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001919 }
Yushin Choa8810392017-09-06 15:16:14 -07001920#endif // #if !CONFIG_PVQ
1921
Yaowu Xuc27fc142016-08-22 16:08:15 -07001922 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1923 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -07001924 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07001925 const int stepr = tx_size_high_unit[tx_size];
1926 const int stepc = tx_size_wide_unit[tx_size];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07001927#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001928 const BLOCK_SIZE plane_bsize =
1929 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07001930#elif CONFIG_CB4X4
1931 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han41bb3392016-12-14 10:46:48 -08001932#else
Jingning Hanbafee8d2016-12-02 10:25:03 -08001933 const BLOCK_SIZE plane_bsize =
1934 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08001935#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001936 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -08001937 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
1938 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001939#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07001940 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
1941 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001942 continue;
1943#endif
Jingning Han5b701742017-07-19 14:39:07 -07001944 int blk_row, blk_col;
1945 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1946 int mu_blocks_wide =
1947 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1948 int mu_blocks_high =
1949 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1950 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
1951 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001952
Jingning Han5b701742017-07-19 14:39:07 -07001953 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -04001954 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -07001955 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -07001956 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
1957
1958 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
1959 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
1960 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
1961 blk_row, blk_col, tx_size);
1962 }
1963 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001964 }
1965 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -08001966 int ref;
1967
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02001968#if CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -07001969 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
1970#else
1971 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02001972#endif // CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -07001973 {
Zoe Liu85b66462017-04-20 14:28:19 -07001974 const MV_REFERENCE_FRAME frame =
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02001975#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07001976 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
1977#else
Yushin Cho127c5832017-07-28 16:39:04 -07001978 mbmi->ref_frame[ref];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02001979#endif // CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -07001980 if (frame < LAST_FRAME) {
1981#if CONFIG_INTRABC
1982 assert(is_intrabc_block(mbmi));
1983 assert(frame == INTRA_FRAME);
1984 assert(ref == 0);
1985#else
1986 assert(0);
1987#endif // CONFIG_INTRABC
1988 } else {
1989 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -08001990
Alex Converse28744302017-04-13 14:46:22 -07001991 xd->block_refs[ref] = ref_buf;
1992 if ((!av1_is_valid_scale(&ref_buf->sf)))
1993 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
1994 "Reference frame has invalid dimensions");
1995 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
1996 &ref_buf->sf);
1997 }
Yue Chen9ab6d712017-01-12 15:50:46 -08001998 }
Yue Chen69f18e12016-09-08 14:48:15 -07001999
Jingning Han41bb3392016-12-14 10:46:48 -08002000#if CONFIG_CB4X4
Jingning Hanc44009c2017-05-06 11:36:49 -07002001 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Jingning Han41bb3392016-12-14 10:46:48 -08002002#else
Jingning Hanc44009c2017-05-06 11:36:49 -07002003 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
Jingning Han41bb3392016-12-14 10:46:48 -08002004 AOMMAX(bsize, BLOCK_8X8));
2005#endif
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002006
Yue Chencb60b182016-10-13 15:18:22 -07002007#if CONFIG_MOTION_VAR
2008 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -08002009#if CONFIG_NCOBMC
2010 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
2011#else
Yue Chen894fcce2016-10-21 16:50:52 -07002012 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -08002013#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002014 }
Yue Chencb60b182016-10-13 15:18:22 -07002015#endif // CONFIG_MOTION_VAR
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002016#if CONFIG_NCOBMC_ADAPT_WEIGHT
2017 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
2018 int plane;
2019 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
2020 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2021 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
2022 }
2023 }
2024#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002025 // Reconstruction
2026 if (!mbmi->skip) {
2027 int eobtotal = 0;
2028 int plane;
2029
2030 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2031 const struct macroblockd_plane *const pd = &xd->plane[plane];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07002032#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002033 const BLOCK_SIZE plane_bsize =
2034 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -07002035#elif CONFIG_CB4X4
2036 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002037#else
Yushin Cho127c5832017-07-28 16:39:04 -07002038 const BLOCK_SIZE plane_bsize =
2039 get_plane_block_size(AOMMAX(BLOCK_8X8, bsize), pd);
Jingning Han41bb3392016-12-14 10:46:48 -08002040#endif
Jingning Hanbafee8d2016-12-02 10:25:03 -08002041 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2042 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002043 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002044
2045#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002046 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
2047 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002048 continue;
2049#endif
2050
Yaowu Xuc27fc142016-08-22 16:08:15 -07002051#if CONFIG_VAR_TX
Jingning Hanc2b797f2017-07-19 09:37:11 -07002052 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
2053 int mu_blocks_wide =
2054 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
2055 int mu_blocks_high =
2056 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
2057
2058 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
2059 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
2060
Rupert Swarbrick4e7b7d62017-09-28 17:30:44 +01002061 const TX_SIZE max_tx_size = get_vartx_max_txsize(
2062 mbmi, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hanf64062f2016-11-02 16:22:18 -07002063 const int bh_var_tx = tx_size_high_unit[max_tx_size];
2064 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -07002065 int block = 0;
2066 int step =
2067 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -07002068
2069 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
2070 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
2071 int blk_row, blk_col;
2072 const int unit_height =
2073 AOMMIN(mu_blocks_high + row, max_blocks_high);
2074 const int unit_width =
2075 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
2076 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
2077 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
2078 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
2079 blk_row, blk_col, block, max_tx_size,
2080 &eobtotal);
2081 block += step;
2082 }
2083 }
Jingning Hana65f3052017-06-23 10:52:05 -07002084 }
2085 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002086#else
hui su0c6244b2017-07-12 17:11:43 -07002087 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -07002088 const int stepr = tx_size_high_unit[tx_size];
2089 const int stepc = tx_size_wide_unit[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002090 for (row = 0; row < max_blocks_high; row += stepr)
2091 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002092 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
2093 plane, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002094#endif
2095 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002096 }
2097 }
Luc Trudeaub05eeae2017-08-18 15:14:30 -04002098#if CONFIG_CFL && CONFIG_CHROMA_SUB8X8
2099 if (mbmi->uv_mode != UV_CFL_PRED) {
2100#if CONFIG_DEBUG
2101 if (cfl->is_chroma_reference) {
2102 cfl_clear_sub8x8_val(cfl);
2103 }
2104#endif
2105 if (!cfl->is_chroma_reference && is_inter_block(mbmi)) {
2106 cfl_store_block(xd, mbmi->sb_type, mbmi->tx_size);
2107 }
2108 }
2109#endif // CONFIG_CFL && CONFIG_CHROMA_SUB8X8
Angie Chiang133733c2017-03-17 12:50:20 -07002110#endif // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002111
Angie Chiangd0916d92017-03-10 17:54:18 -08002112 int reader_corrupted_flag = aom_reader_has_error(r);
2113 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002114}
2115
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07002116#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08002117static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2118 int mi_row, int mi_col, aom_reader *r,
2119 BLOCK_SIZE bsize) {
2120 AV1_COMMON *const cm = &pbi->common;
2121 const int hbs = mi_size_wide[bsize] >> 1;
2122#if CONFIG_CB4X4
2123 const int unify_bsize = 1;
2124#else
2125 const int unify_bsize = 0;
2126#endif
2127#if CONFIG_EXT_PARTITION_TYPES
2128 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
2129#endif
2130 PARTITION_TYPE partition;
2131 BLOCK_SIZE subsize;
2132 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2133 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2134
2135 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2136
2137 partition = get_partition(cm, mi_row, mi_col, bsize);
2138 subsize = subsize_lookup[partition][bsize];
2139
2140 if (!hbs && !unify_bsize) {
2141 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2142 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
2143 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2144 } else {
2145 switch (partition) {
2146 case PARTITION_NONE:
2147 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
2148 break;
2149 case PARTITION_HORZ:
2150 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2151 if (has_rows)
2152 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r,
2153 subsize);
2154 break;
2155 case PARTITION_VERT:
2156 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2157 if (has_cols)
2158 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r,
2159 subsize);
2160 break;
2161 case PARTITION_SPLIT:
2162 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
2163 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2164 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2165 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
2166 break;
2167#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002168#if CONFIG_EXT_PARTITION_TYPES_AB
2169#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
2170#endif
Yue Chen9ab6d712017-01-12 15:50:46 -08002171 case PARTITION_HORZ_A:
2172 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2173 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2174 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
2175 break;
2176 case PARTITION_HORZ_B:
2177 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2178 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2179 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2180 bsize2);
2181 break;
2182 case PARTITION_VERT_A:
2183 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
2184 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
2185 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
2186 break;
2187 case PARTITION_VERT_B:
2188 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
2189 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
2190 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
2191 bsize2);
2192 break;
2193#endif
2194 default: assert(0 && "Invalid partition type");
2195 }
2196 }
2197}
2198#endif
2199
Yue Chen64550b62017-01-12 12:18:22 -08002200static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
2201#if CONFIG_SUPERTX
2202 int supertx_enabled,
2203#endif // CONFIG_SUPERTX
2204 int mi_row, int mi_col, aom_reader *r,
2205#if CONFIG_EXT_PARTITION_TYPES
2206 PARTITION_TYPE partition,
2207#endif // CONFIG_EXT_PARTITION_TYPES
2208 BLOCK_SIZE bsize) {
2209 decode_mbmi_block(pbi, xd,
2210#if CONFIG_SUPERTX
2211 supertx_enabled,
2212#endif
2213 mi_row, mi_col, r,
2214#if CONFIG_EXT_PARTITION_TYPES
2215 partition,
2216#endif
2217 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002218
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07002219#if !(CONFIG_MOTION_VAR && NC_MODE_INFO)
Yue Chen64550b62017-01-12 12:18:22 -08002220#if CONFIG_SUPERTX
2221 if (!supertx_enabled)
2222#endif // CONFIG_SUPERTX
2223 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08002224#endif
Yue Chen64550b62017-01-12 12:18:22 -08002225}
2226
Yaowu Xuf883b422016-08-30 14:01:10 -07002227static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
2228 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002229 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -08002230 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -08002231#if CONFIG_UNPOISON_PARTITION_CTX
2232 const int ctx =
2233 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08002234#else
Jingning Han1beb0102016-12-07 11:08:30 -08002235 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08002236#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002237 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002238 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2239 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002240
Jingning Han5fe79db2017-03-27 15:10:30 -07002241 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -07002242
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002243 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002244#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002245 const int num_partition_types =
2246 (mi_width_log2_lookup[bsize] > mi_width_log2_lookup[BLOCK_8X8])
2247 ? EXT_PARTITION_TYPES
2248 : PARTITION_TYPES;
Alex Converse57795a42017-03-14 12:18:25 -07002249#else
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002250 const int num_partition_types = PARTITION_TYPES;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002251#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002252 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, num_partition_types,
2253 ACCT_STR);
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002254 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002255 assert(bsize > BLOCK_8X8);
2256 aom_cdf_prob cdf[2];
2257 partition_gather_vert_alike(cdf, partition_cdf);
2258 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
2259 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
2260 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002261 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002262 assert(bsize > BLOCK_8X8);
2263 aom_cdf_prob cdf[2];
2264 partition_gather_horz_alike(cdf, partition_cdf);
2265 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
2266 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002267 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002268 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01002269 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002270
Yaowu Xuc27fc142016-08-22 16:08:15 -07002271 return p;
2272}
2273
2274#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002275static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
2276 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
2278 return 1;
2279 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002280 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +01002281#if CONFIG_NEW_MULTISYMBOL
Thomas Davies61e3e372017-04-04 16:10:23 +01002282 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +01002283 const int skip = aom_read_symbol(r, ec_ctx->skip_cdfs[ctx], 2, ACCT_STR);
2284#else
Michael Bebenita6048d052016-08-25 14:40:54 -07002285 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Thomas Davies61e3e372017-04-04 16:10:23 +01002286#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002287 FRAME_COUNTS *counts = xd->counts;
2288 if (counts) ++counts->skip[ctx][skip];
2289 return skip;
2290 }
2291}
2292#endif // CONFIG_SUPERTX
2293
2294// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -07002295static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002296#if CONFIG_SUPERTX
2297 int supertx_enabled,
2298#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002299 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -07002300 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002301 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -08002302 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303 const int hbs = num_8x8_wh >> 1;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002304#if CONFIG_EXT_PARTITION_TYPES && CONFIG_EXT_PARTITION_TYPES_AB
2305 const int qbs = num_8x8_wh >> 2;
2306#endif
Jingning Han41bb3392016-12-14 10:46:48 -08002307#if CONFIG_CB4X4
2308 const int unify_bsize = 1;
2309#else
2310 const int unify_bsize = 0;
2311#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002312 PARTITION_TYPE partition;
2313 BLOCK_SIZE subsize;
2314#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002315 const int quarter_step = num_8x8_wh / 4;
2316 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002317#if !CONFIG_EXT_PARTITION_TYPES_AB
2318 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
2319#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002320#endif
2321 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2322 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2323#if CONFIG_SUPERTX
2324 const int read_token = !supertx_enabled;
2325 int skip = 0;
Jingning Han2511c662016-12-22 11:57:34 -08002326 TX_SIZE supertx_size = max_txsize_lookup[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002327 const TileInfo *const tile = &xd->tile;
2328 int txfm = DCT_DCT;
2329#endif // CONFIG_SUPERTX
2330
2331 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2332
Jingning Hancd959762017-03-27 14:49:59 -07002333 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
2334 : read_partition(cm, xd, mi_row, mi_col, r,
2335 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002336 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002337
Rupert Swarbrick415c8f12017-10-09 16:26:23 +01002338 // Check the bitstream is conformant: if there is subsampling on the
2339 // chroma planes, subsize must subsample to a valid block size.
2340 const struct macroblockd_plane *const pd_u = &xd->plane[1];
2341 if (get_plane_block_size(subsize, pd_u) == BLOCK_INVALID) {
2342 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2343 "Block size %dx%d invalid with this subsampling mode",
2344 block_size_wide[subsize], block_size_high[subsize]);
2345 }
2346
Yushin Cho77bba8d2016-11-04 16:36:56 -07002347#if CONFIG_PVQ
2348 assert(partition < PARTITION_TYPES);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002349 assert(subsize < BLOCK_SIZES_ALL);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002350#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002351#if CONFIG_SUPERTX
2352 if (!frame_is_intra_only(cm) && partition != PARTITION_NONE &&
2353 bsize <= MAX_SUPERTX_BLOCK_SIZE && !supertx_enabled && !xd->lossless[0]) {
2354 const int supertx_context = partition_supertx_context_lookup[partition];
Michael Bebenita6048d052016-08-25 14:40:54 -07002355 supertx_enabled = aom_read(
2356 r, cm->fc->supertx_prob[supertx_context][supertx_size], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002357 if (xd->counts)
2358 xd->counts->supertx[supertx_context][supertx_size][supertx_enabled]++;
2359#if CONFIG_VAR_TX
2360 if (supertx_enabled) xd->supertx_size = supertx_size;
2361#endif
2362 }
2363#endif // CONFIG_SUPERTX
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002364
2365#if CONFIG_SUPERTX
2366#define DEC_BLOCK_STX_ARG supertx_enabled,
2367#else
2368#define DEC_BLOCK_STX_ARG
2369#endif
2370#if CONFIG_EXT_PARTITION_TYPES
2371#define DEC_BLOCK_EPT_ARG partition,
2372#else
2373#define DEC_BLOCK_EPT_ARG
2374#endif
2375#define DEC_BLOCK(db_r, db_c, db_subsize) \
2376 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
2377 DEC_BLOCK_EPT_ARG(db_subsize))
2378#define DEC_PARTITION(db_r, db_c, db_subsize) \
2379 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
2380
Jingning Han41bb3392016-12-14 10:46:48 -08002381 if (!hbs && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002382 // calculate bmode block dimensions (log 2)
2383 xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT);
2384 xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ);
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002385 DEC_BLOCK(mi_row, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002386 } else {
2387 switch (partition) {
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002388 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002389 case PARTITION_HORZ:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002390 DEC_BLOCK(mi_row, mi_col, subsize);
2391 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002392 break;
2393 case PARTITION_VERT:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002394 DEC_BLOCK(mi_row, mi_col, subsize);
2395 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002396 break;
2397 case PARTITION_SPLIT:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002398 DEC_PARTITION(mi_row, mi_col, subsize);
2399 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
2400 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
2401 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002402 break;
2403#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002404#if CONFIG_EXT_PARTITION_TYPES_AB
2405 case PARTITION_HORZ_A:
2406 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
2407 DEC_BLOCK(mi_row + qbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
2408 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
2409 break;
2410 case PARTITION_HORZ_B:
2411 DEC_BLOCK(mi_row, mi_col, subsize);
2412 DEC_BLOCK(mi_row + hbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
2413 if (mi_row + 3 * qbs < cm->mi_rows)
2414 DEC_BLOCK(mi_row + 3 * qbs, mi_col,
2415 get_subsize(bsize, PARTITION_HORZ_4));
2416 break;
2417 case PARTITION_VERT_A:
2418 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_VERT_4));
2419 DEC_BLOCK(mi_row, mi_col + qbs, get_subsize(bsize, PARTITION_VERT_4));
2420 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
2421 break;
2422 case PARTITION_VERT_B:
2423 DEC_BLOCK(mi_row, mi_col, subsize);
2424 DEC_BLOCK(mi_row, mi_col + hbs, get_subsize(bsize, PARTITION_VERT_4));
2425 if (mi_col + 3 * qbs < cm->mi_cols)
2426 DEC_BLOCK(mi_row, mi_col + 3 * qbs,
2427 get_subsize(bsize, PARTITION_VERT_4));
2428 break;
2429#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002430 case PARTITION_HORZ_A:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002431 DEC_BLOCK(mi_row, mi_col, bsize2);
2432 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
2433 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002434 break;
2435 case PARTITION_HORZ_B:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002436 DEC_BLOCK(mi_row, mi_col, subsize);
2437 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
2438 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002439 break;
2440 case PARTITION_VERT_A:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002441 DEC_BLOCK(mi_row, mi_col, bsize2);
2442 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
2443 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002444 break;
2445 case PARTITION_VERT_B:
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002446 DEC_BLOCK(mi_row, mi_col, subsize);
2447 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
2448 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002449 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002450#endif
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002451 case PARTITION_HORZ_4:
2452 for (i = 0; i < 4; ++i) {
2453 int this_mi_row = mi_row + i * quarter_step;
2454 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002455 DEC_BLOCK(this_mi_row, mi_col, subsize);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002456 }
2457 break;
2458 case PARTITION_VERT_4:
2459 for (i = 0; i < 4; ++i) {
2460 int this_mi_col = mi_col + i * quarter_step;
2461 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002462 DEC_BLOCK(mi_row, this_mi_col, subsize);
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002463 }
2464 break;
2465#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002466 default: assert(0 && "Invalid partition type");
2467 }
2468 }
2469
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01002470#undef DEC_PARTITION
2471#undef DEC_BLOCK
2472#undef DEC_BLOCK_EPT_ARG
2473#undef DEC_BLOCK_STX_ARG
2474
Yaowu Xuc27fc142016-08-22 16:08:15 -07002475#if CONFIG_SUPERTX
2476 if (supertx_enabled && read_token) {
2477 uint8_t *dst_buf[3];
2478 int dst_stride[3], i;
2479 int offset = mi_row * cm->mi_stride + mi_col;
2480
2481 set_segment_id_supertx(cm, mi_row, mi_col, bsize);
2482
David Barker3aec8d62017-01-31 14:55:32 +00002483 if (cm->delta_q_present_flag) {
2484 for (i = 0; i < MAX_SEGMENTS; i++) {
Cheng Chen49d30e62017-08-28 20:59:27 -07002485 int j;
2486 for (j = 0; j < MAX_MB_PLANE; ++j) {
2487 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
2488 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
2489
2490 xd->plane[j].seg_dequant[i][0] =
2491 av1_dc_quant(xd->current_qindex, dc_delta_q, cm->bit_depth);
2492 xd->plane[j].seg_dequant[i][1] =
2493 av1_ac_quant(xd->current_qindex, ac_delta_q, cm->bit_depth);
2494 }
David Barker3aec8d62017-01-31 14:55:32 +00002495 }
2496 }
David Barker3aec8d62017-01-31 14:55:32 +00002497
Yaowu Xuc27fc142016-08-22 16:08:15 -07002498 xd->mi = cm->mi_grid_visible + offset;
2499 xd->mi[0] = cm->mi + offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002500 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2501 mi_size_wide[bsize],
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002502#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002503 cm->dependent_horz_tiles,
2504#endif // CONFIG_DEPENDENT_HORZTILES
2505 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002506 set_skip_context(xd, mi_row, mi_col);
2507 skip = read_skip(cm, xd, xd->mi[0]->mbmi.segment_id_supertx, r);
2508 if (skip) {
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -07002509 av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002510 } else {
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002511 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002512#if CONFIG_EXT_TX
Sarah Parkere68a3e42017-02-16 14:03:24 -08002513 if (get_ext_tx_types(supertx_size, bsize, 1, cm->reduced_tx_set_used) >
2514 1) {
2515 const int eset =
2516 get_ext_tx_set(supertx_size, bsize, 1, cm->reduced_tx_set_used);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 if (eset > 0) {
Hui Suddbcde22017-09-18 17:22:02 -07002518 const TxSetType tx_set_type = get_ext_tx_set_type(
2519 supertx_size, bsize, 1, cm->reduced_tx_set_used);
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002520 const int packed_sym =
2521 aom_read_symbol(r, ec_ctx->inter_ext_tx_cdf[eset][supertx_size],
Hui Suddbcde22017-09-18 17:22:02 -07002522 av1_num_ext_tx_set[tx_set_type], ACCT_STR);
2523 txfm = av1_ext_tx_inv[tx_set_type][packed_sym];
Hui Su98b0b3e2017-09-19 13:54:02 -07002524#if CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002525 if (xd->counts) ++xd->counts->inter_ext_tx[eset][supertx_size][txfm];
Hui Su98b0b3e2017-09-19 13:54:02 -07002526#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527 }
2528 }
2529#else
2530 if (supertx_size < TX_32X32) {
Rupert Swarbrick580943a2017-06-23 14:51:38 +01002531 txfm = aom_read_symbol(r, ec_ctx->inter_ext_tx_cdf[supertx_size],
2532 TX_TYPES, ACCT_STR);
Hui Su98b0b3e2017-09-19 13:54:02 -07002533#if CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002534 if (xd->counts) ++xd->counts->inter_ext_tx[supertx_size][txfm];
Hui Su98b0b3e2017-09-19 13:54:02 -07002535#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002536 }
2537#endif // CONFIG_EXT_TX
2538 }
2539
Jingning Han91d9a792017-04-18 12:01:52 -07002540 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
2541 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002542 for (i = 0; i < MAX_MB_PLANE; i++) {
2543 dst_buf[i] = xd->plane[i].dst.buf;
2544 dst_stride[i] = xd->plane[i].dst.stride;
2545 }
2546 dec_predict_sb_complex(pbi, xd, tile, mi_row, mi_col, mi_row, mi_col, bsize,
2547 bsize, dst_buf, dst_stride);
2548
2549 if (!skip) {
2550 int eobtotal = 0;
2551 MB_MODE_INFO *mbmi;
2552 set_offsets_topblock(cm, xd, tile, bsize, mi_row, mi_col);
2553 mbmi = &xd->mi[0]->mbmi;
2554 mbmi->tx_type = txfm;
2555 assert(mbmi->segment_id_supertx != MAX_SEGMENTS);
2556 for (i = 0; i < MAX_MB_PLANE; ++i) {
2557 const struct macroblockd_plane *const pd = &xd->plane[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002558 int row, col;
hui su0c6244b2017-07-12 17:11:43 -07002559 const TX_SIZE tx_size = av1_get_tx_size(i, xd);
Jingning Han5b7706a2016-12-21 09:55:10 -08002560 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
Jingning Han32b20282016-10-28 15:42:44 -07002561 const int stepr = tx_size_high_unit[tx_size];
2562 const int stepc = tx_size_wide_unit[tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002563 const int max_blocks_wide = max_block_wide(xd, plane_bsize, i);
2564 const int max_blocks_high = max_block_high(xd, plane_bsize, i);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002565
2566 for (row = 0; row < max_blocks_high; row += stepr)
2567 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangff6d8902016-10-21 11:02:09 -07002568 eobtotal += reconstruct_inter_block(
2569 cm, xd, r, mbmi->segment_id_supertx, i, row, col, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002570 }
Jingning Han24f24a52016-12-27 10:13:28 -08002571 if ((unify_bsize || !(subsize < BLOCK_8X8)) && eobtotal == 0) skip = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002572 }
2573 set_param_topblock(cm, xd, bsize, mi_row, mi_col, txfm, skip);
2574 }
2575#endif // CONFIG_SUPERTX
2576
2577#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07002578 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002579#else
2580 // update partition context
2581 if (bsize >= BLOCK_8X8 &&
2582 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08002583 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002584#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07002585
Cheng Chenf572cd32017-08-25 18:34:51 -07002586#if CONFIG_LPF_SB
2587 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07002588 int filt_lvl;
2589 if (mi_row == 0 && mi_col == 0) {
2590 filt_lvl = aom_read_literal(r, 6, ACCT_STR);
Cheng Chen41d37c22017-09-08 19:00:21 -07002591 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
2592 cm->mi_grid_visible[0]->mbmi.delta = 0;
2593 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07002594 } else {
2595 int prev_mi_row, prev_mi_col;
2596 if (mi_col - MAX_MIB_SIZE < 0) {
2597 prev_mi_row = mi_row - MAX_MIB_SIZE;
2598 prev_mi_col = mi_col;
2599 } else {
2600 prev_mi_row = mi_row;
2601 prev_mi_col = mi_col - MAX_MIB_SIZE;
2602 }
Cheng Chenc7855b12017-09-05 10:49:08 -07002603
Cheng Chen41d37c22017-09-08 19:00:21 -07002604 MB_MODE_INFO *curr_mbmi =
2605 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
2606 MB_MODE_INFO *prev_mbmi =
2607 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
2608 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07002609
Cheng Chen41d37c22017-09-08 19:00:21 -07002610 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
2611 const int reuse_prev_lvl = aom_read_symbol(
2612 r, xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2, ACCT_STR);
2613 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
2614
Cheng Chenc7855b12017-09-05 10:49:08 -07002615 if (reuse_prev_lvl) {
Cheng Chena4b27de2017-08-31 16:05:19 -07002616 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07002617 curr_mbmi->delta = 0;
2618 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07002619 } else {
Cheng Chen41d37c22017-09-08 19:00:21 -07002620 const int delta_ctx = prev_mbmi->delta;
2621 unsigned int delta = aom_read_symbol(
2622 r, xd->tile_ctx->lpf_delta_cdf[delta_ctx], DELTA_RANGE, ACCT_STR);
2623 curr_mbmi->delta = delta;
Cheng Chenf89ca3e2017-09-07 14:47:47 -07002624 delta *= LPF_STEP;
Cheng Chenc7855b12017-09-05 10:49:08 -07002625
2626 if (delta) {
Cheng Chen41d37c22017-09-08 19:00:21 -07002627 const int sign_ctx = prev_mbmi->sign;
2628 const int sign = aom_read_symbol(
2629 r, xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2, ACCT_STR);
2630 curr_mbmi->sign = sign;
Cheng Chenc7855b12017-09-05 10:49:08 -07002631 filt_lvl = sign ? prev_lvl + delta : prev_lvl - delta;
2632 } else {
2633 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07002634 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07002635 }
Cheng Chena4b27de2017-08-31 16:05:19 -07002636 }
2637 }
Cheng Chen5589d712017-09-05 12:03:25 -07002638
2639 av1_loop_filter_sb_level_init(cm, mi_row, mi_col, filt_lvl);
Cheng Chenf572cd32017-08-25 18:34:51 -07002640 }
2641#endif
2642
Jean-Marc Valin01435132017-02-18 14:12:53 -05002643#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07002644 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07002645 int width_step = mi_size_wide[BLOCK_64X64];
2646 int height_step = mi_size_wide[BLOCK_64X64];
2647 int w, h;
2648 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
2649 h += height_step) {
2650 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
2651 w += width_step) {
2652 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
2653 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
2654 ->mbmi.cdef_strength =
2655 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
2656 else
2657 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
2658 ->mbmi.cdef_strength = -1;
2659 }
Yaowu Xud71be782016-10-14 08:47:03 -07002660 }
2661 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05002662#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002663#if CONFIG_LOOP_RESTORATION
2664 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
2665 int rcol0, rcol1, rrow0, rrow1, nhtiles;
2666 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
2667 &rcol0, &rcol1, &rrow0, &rrow1,
2668 &nhtiles)) {
2669 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
2670 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
2671 int rtile_idx = rcol + rrow * nhtiles;
2672 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
2673 }
2674 }
2675 }
2676 }
2677#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002678}
2679
Yaowu Xuc27fc142016-08-22 16:08:15 -07002680static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
2681 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002682 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08002683 aom_reader *r,
2684#if CONFIG_ANS && ANS_MAX_SYMBOLS
2685 int window_size,
2686#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2687 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002688 // Validate the calculated partition length. If the buffer
2689 // described by the partition can't be fully read, then restrict
2690 // it to the portion that can be (for EC mode) or throw an error.
2691 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002692 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002693 "Truncated packet or corrupt tile length");
2694
Alex Converse2cdf0d82016-12-13 13:53:09 -08002695#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08002696 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08002697#endif
Alex Converse346440b2017-01-03 13:47:37 -08002698 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07002699 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002700 "Failed to allocate bool decoder %d", 1);
2701}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002702
Yaowu Xuf883b422016-08-30 14:01:10 -07002703static void setup_segmentation(AV1_COMMON *const cm,
2704 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002705 struct segmentation *const seg = &cm->seg;
2706 int i, j;
2707
2708 seg->update_map = 0;
2709 seg->update_data = 0;
Ryandd8df162017-09-27 15:40:13 -07002710 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002711
Yaowu Xuf883b422016-08-30 14:01:10 -07002712 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002713 if (!seg->enabled) return;
2714
2715 // Segmentation map update
2716 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2717 seg->update_map = 1;
2718 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002719 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002720 }
2721 if (seg->update_map) {
2722 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
2723 seg->temporal_update = 0;
2724 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002725 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002726 }
2727 }
2728
2729 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07002730 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002731 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002732 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002733
Yaowu Xuf883b422016-08-30 14:01:10 -07002734 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002735
2736 for (i = 0; i < MAX_SEGMENTS; i++) {
2737 for (j = 0; j < SEG_LVL_MAX; j++) {
2738 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002739 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002740 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002741 av1_enable_segfeature(seg, i, j);
2742 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
2743 if (av1_is_segfeature_signed(j))
2744 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002745 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002746 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002747 }
2748 }
2749 }
2750}
2751
2752#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002753static void decode_restoration_mode(AV1_COMMON *cm,
2754 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002755 int p;
2756 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002757 if (aom_rb_read_bit(rb)) {
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08002758 rsi->frame_restoration_type =
2759 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002760 } else {
2761 rsi->frame_restoration_type =
2762 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
2763 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002764 for (p = 1; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002765 rsi = &cm->rst_info[p];
2766 if (aom_rb_read_bit(rb)) {
2767 rsi->frame_restoration_type =
2768 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
2769 } else {
2770 rsi->frame_restoration_type = RESTORE_NONE;
2771 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002772 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002773
2774 cm->rst_info[0].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2775 cm->rst_info[1].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2776 cm->rst_info[2].restoration_tilesize = RESTORATION_TILESIZE_MAX;
2777 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
2778 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
2779 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
2780 rsi = &cm->rst_info[0];
2781 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2782 if (rsi->restoration_tilesize != RESTORATION_TILESIZE_MAX) {
2783 rsi->restoration_tilesize >>= aom_rb_read_bit(rb);
2784 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002785 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002786 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2787 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
2788 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
2789 cm->rst_info[1].restoration_tilesize =
2790 cm->rst_info[0].restoration_tilesize >> (aom_rb_read_bit(rb) * s);
2791 } else {
2792 cm->rst_info[1].restoration_tilesize = cm->rst_info[0].restoration_tilesize;
2793 }
2794 cm->rst_info[2].restoration_tilesize = cm->rst_info[1].restoration_tilesize;
Debargha Mukherjee7a5587a2017-08-31 07:41:30 -07002795
2796 cm->rst_info[0].procunit_width = cm->rst_info[0].procunit_height =
2797 RESTORATION_PROC_UNIT_SIZE;
2798 cm->rst_info[1].procunit_width = cm->rst_info[2].procunit_width =
2799 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_x;
2800 cm->rst_info[1].procunit_height = cm->rst_info[2].procunit_height =
2801 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_y;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002802}
2803
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002804static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002805 WienerInfo *ref_wiener_info, aom_reader *rb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002806 if (wiener_win == WIENER_WIN)
2807 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
2808 aom_read_primitive_refsubexpfin(
2809 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2810 WIENER_FILT_TAP0_SUBEXP_K,
2811 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
2812 WIENER_FILT_TAP0_MINV;
2813 else
2814 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002815 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002816 aom_read_primitive_refsubexpfin(
2817 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2818 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002819 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002820 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002821 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002822 aom_read_primitive_refsubexpfin(
2823 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2824 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002825 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002826 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002827 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002828 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002829 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
2830 wiener_info->vfilter[2]);
2831
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002832 if (wiener_win == WIENER_WIN)
2833 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
2834 aom_read_primitive_refsubexpfin(
2835 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2836 WIENER_FILT_TAP0_SUBEXP_K,
2837 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
2838 WIENER_FILT_TAP0_MINV;
2839 else
2840 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002841 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002842 aom_read_primitive_refsubexpfin(
2843 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2844 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002845 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002846 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002847 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002848 aom_read_primitive_refsubexpfin(
2849 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2850 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002851 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002852 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00002853 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002854 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00002855 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
2856 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002857 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002858}
2859
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002860static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
2861 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002862 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
2863 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002864 aom_read_primitive_refsubexpfin(
2865 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002866 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002867 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002868 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002869 aom_read_primitive_refsubexpfin(
2870 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07002871 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002872 SGRPROJ_PRJ_MIN1;
2873 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002874}
2875
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002876static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
2877 MACROBLOCKD *xd,
2878 aom_reader *const r, int plane,
2879 int rtile_idx) {
2880 const RestorationInfo *rsi = cm->rst_info + plane;
2881 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002882
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002883 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2884 WienerInfo *wiener_info = xd->wiener_info + plane;
2885 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002886
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002887 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
2888 assert(plane == 0);
2889 rsi->restoration_type[rtile_idx] =
2890 aom_read_tree(r, av1_switchable_restore_tree,
2891 cm->fc->switchable_restore_prob, ACCT_STR);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002892
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002893 if (rsi->restoration_type[rtile_idx] == RESTORE_WIENER) {
2894 read_wiener_filter(wiener_win, &rsi->wiener_info[rtile_idx], wiener_info,
2895 r);
2896 } else if (rsi->restoration_type[rtile_idx] == RESTORE_SGRPROJ) {
2897 read_sgrproj_filter(&rsi->sgrproj_info[rtile_idx], sgrproj_info, r);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002898 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002899 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
2900 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
2901 rsi->restoration_type[rtile_idx] = RESTORE_WIENER;
2902 read_wiener_filter(wiener_win, &rsi->wiener_info[rtile_idx], wiener_info,
2903 r);
2904 } else {
2905 rsi->restoration_type[rtile_idx] = RESTORE_NONE;
2906 }
2907 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
2908 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
2909 rsi->restoration_type[rtile_idx] = RESTORE_SGRPROJ;
2910 read_sgrproj_filter(&rsi->sgrproj_info[rtile_idx], sgrproj_info, r);
2911 } else {
2912 rsi->restoration_type[rtile_idx] = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002913 }
2914 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002915}
2916#endif // CONFIG_LOOP_RESTORATION
2917
Yaowu Xuf883b422016-08-30 14:01:10 -07002918static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002919 struct loopfilter *lf = &cm->lf;
Cheng Chenf572cd32017-08-25 18:34:51 -07002920#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07002921#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002922 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
2923 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
2924 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07002925 lf->filter_level_u = aom_rb_read_literal(rb, 6);
2926 lf->filter_level_v = aom_rb_read_literal(rb, 6);
2927 }
Cheng Chen179479f2017-08-04 10:56:39 -07002928#else
2929 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07002930#endif
Cheng Chenf572cd32017-08-25 18:34:51 -07002931#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933
2934 // Read in loop filter deltas applied at the MB level based on mode or ref
2935 // frame.
2936 lf->mode_ref_delta_update = 0;
2937
Yaowu Xuf883b422016-08-30 14:01:10 -07002938 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002939 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002940 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941 if (lf->mode_ref_delta_update) {
2942 int i;
2943
2944 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002945 if (aom_rb_read_bit(rb))
2946 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002947
2948 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002949 if (aom_rb_read_bit(rb))
2950 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002951 }
2952 }
2953}
2954
Jean-Marc Valin01435132017-02-18 14:12:53 -05002955#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002956static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002957 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002958#if CONFIG_CDEF_SINGLEPASS
2959 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
2960#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02002961 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
2962 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002963#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002964 cm->cdef_bits = aom_rb_read_literal(rb, 2);
2965 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
2966 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2967 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002968 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
2969 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
2970 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002971 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002972}
Jean-Marc Valin01435132017-02-18 14:12:53 -05002973#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002974
Yaowu Xuf883b422016-08-30 14:01:10 -07002975static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
2976 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002977}
2978
Yaowu Xuf883b422016-08-30 14:01:10 -07002979static void setup_quantization(AV1_COMMON *const cm,
2980 struct aom_read_bit_buffer *rb) {
2981 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002982 cm->y_dc_delta_q = read_delta_q(rb);
2983 cm->uv_dc_delta_q = read_delta_q(rb);
2984 cm->uv_ac_delta_q = read_delta_q(rb);
2985 cm->dequant_bit_depth = cm->bit_depth;
2986#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002987 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002988 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002989 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
2990 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002991 } else {
2992 cm->min_qmlevel = 0;
2993 cm->max_qmlevel = 0;
2994 }
2995#endif
2996}
2997
Alex Converse05a3e7d2017-05-16 12:20:07 -07002998// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07002999static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003000#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003001 const int using_qm = cm->using_qmatrix;
3002 const int minqm = cm->min_qmlevel;
3003 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003004#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07003005 // When segmentation is disabled, only the first value is used. The
3006 // remaining are don't cares.
3007 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
3008 for (int i = 0; i < max_segments; ++i) {
3009 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
3010 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
3011 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
3012 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07003013 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07003014 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07003015 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003016#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003017 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
3018 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3019 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003020 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07003021 const int qmlevel = (lossless || using_qm == 0)
3022 ? NUM_QM_LEVELS - 1
3023 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01003024 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003025 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
3026 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
3027 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
3028 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
3029 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07003030#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07003032 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
3033 for (int b = 0; b < COEF_BANDS; ++b) {
3034 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
3035 cm->y_dequant_nuq[i][dq][b], NULL, dq);
3036 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
3037 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003038 }
3039 }
3040#endif // CONFIG_NEW_QUANT
3041 }
3042}
3043
Angie Chiang5678ad92016-11-21 09:38:40 -08003044static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003045 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07003046 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003047}
3048
Yaowu Xuf883b422016-08-30 14:01:10 -07003049static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003050#if CONFIG_FRAME_SUPERRES
3051 cm->render_width = cm->superres_upscaled_width;
3052 cm->render_height = cm->superres_upscaled_height;
3053#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003054 cm->render_width = cm->width;
3055 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003056#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07003057 if (aom_rb_read_bit(rb))
3058 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003059}
3060
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003061#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003062// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003063static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
3064 int *width, int *height) {
3065 cm->superres_upscaled_width = *width;
3066 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07003067 if (aom_rb_read_bit(rb)) {
Urvang Joshide71d142017-10-05 12:12:15 -07003068 cm->superres_scale_denominator =
Fergus Simpsone7508412017-03-14 18:14:09 -07003069 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
Urvang Joshide71d142017-10-05 12:12:15 -07003070 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003071 // Don't edit cm->width or cm->height directly, or the buffers won't get
3072 // resized correctly
Urvang Joshide71d142017-10-05 12:12:15 -07003073 av1_calculate_scaled_size(width, height, cm->superres_scale_denominator);
Fergus Simpsone7508412017-03-14 18:14:09 -07003074 } else {
3075 // 1:1 scaling - ie. no scaling, scale not provided
Urvang Joshide71d142017-10-05 12:12:15 -07003076 cm->superres_scale_denominator = SCALE_NUMERATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07003077 }
3078}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003079#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003080
Yaowu Xuf883b422016-08-30 14:01:10 -07003081static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003082#if CONFIG_SIZE_LIMIT
3083 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07003084 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003085 "Dimensions of %dx%d beyond allowed size of %dx%d.",
3086 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
3087#endif
3088 if (cm->width != width || cm->height != height) {
3089 const int new_mi_rows =
3090 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
3091 const int new_mi_cols =
3092 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
3093
Yaowu Xuf883b422016-08-30 14:01:10 -07003094 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07003095 // dimensions as well as the overall size.
3096 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003097 if (av1_alloc_context_buffers(cm, width, height))
3098 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003099 "Failed to allocate context buffers");
3100 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003101 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003102 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003103 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003104 cm->width = width;
3105 cm->height = height;
3106 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003107
3108 ensure_mv_buffer(cm->cur_frame, cm);
3109 cm->cur_frame->width = cm->width;
3110 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003111}
3112
Yaowu Xuf883b422016-08-30 14:01:10 -07003113static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003114 int width, height;
3115 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07003116 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003117#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003118 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003119#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003120 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003121 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003122
3123 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003124 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003125 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3126 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003127#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003128 cm->use_highbitdepth,
3129#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003130 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003131 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3132 pool->cb_priv)) {
3133 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003134 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003135 "Failed to allocate frame buffer");
3136 }
3137 unlock_buffer_pool(pool);
3138
3139 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3140 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3141 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3142 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003143#if CONFIG_COLORSPACE_HEADERS
3144 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3145 cm->transfer_function;
3146 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3147 cm->chroma_sample_position;
3148#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003149 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3150 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3151 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3152}
3153
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003154static void setup_sb_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
3155 (void)rb;
3156#if CONFIG_EXT_PARTITION
3157 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
3158#else
3159 set_sb_size(cm, BLOCK_64X64);
3160#endif // CONFIG_EXT_PARTITION
3161}
3162
Yaowu Xuf883b422016-08-30 14:01:10 -07003163static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003164 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07003165 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003166 int this_xss, int this_yss) {
3167 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
3168 ref_yss == this_yss;
3169}
3170
Yaowu Xuf883b422016-08-30 14:01:10 -07003171static void setup_frame_size_with_refs(AV1_COMMON *cm,
3172 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003173 int width, height;
3174 int found = 0, i;
3175 int has_valid_ref_frame = 0;
3176 BufferPool *const pool = cm->buffer_pool;
3177 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003178 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003179 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
3180 width = buf->y_crop_width;
3181 height = buf->y_crop_height;
3182 cm->render_width = buf->render_width;
3183 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003184#if CONFIG_FRAME_SUPERRES
3185 setup_superres(cm, rb, &width, &height);
3186#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003187 found = 1;
3188 break;
3189 }
3190 }
3191
3192 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003193 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003194#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003195 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07003196#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003197 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003198 }
3199
3200 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07003201 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003202 "Invalid frame size");
3203
3204 // Check to make sure at least one of frames that this frame references
3205 // has valid dimensions.
3206 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3207 RefBuffer *const ref_frame = &cm->frame_refs[i];
3208 has_valid_ref_frame |=
3209 valid_ref_frame_size(ref_frame->buf->y_crop_width,
3210 ref_frame->buf->y_crop_height, width, height);
3211 }
3212 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07003213 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003214 "Referenced frame has invalid size");
3215 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3216 RefBuffer *const ref_frame = &cm->frame_refs[i];
3217 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
3218 ref_frame->buf->subsampling_x,
3219 ref_frame->buf->subsampling_y, cm->bit_depth,
3220 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07003221 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003222 "Referenced frame has incompatible color format");
3223 }
3224
3225 resize_context_buffers(cm, width, height);
3226
3227 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003228 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003229 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
3230 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003231#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07003232 cm->use_highbitdepth,
3233#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003234 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003235 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
3236 pool->cb_priv)) {
3237 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07003238 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003239 "Failed to allocate frame buffer");
3240 }
3241 unlock_buffer_pool(pool);
3242
3243 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
3244 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
3245 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
3246 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003247#if CONFIG_COLORSPACE_HEADERS
3248 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
3249 cm->transfer_function;
3250 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
3251 cm->chroma_sample_position;
3252#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003253 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
3254 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
3255 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
3256}
3257
David Barker1a191122017-09-06 15:24:16 +01003258static void read_tile_group_range(AV1Decoder *pbi,
3259 struct aom_read_bit_buffer *const rb) {
3260 AV1_COMMON *const cm = &pbi->common;
3261 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
3262 const int num_tiles =
3263 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
3264 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
3265 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
3266 if (pbi->tg_start + pbi->tg_size > num_tiles)
3267 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3268 "Tile group extends past last tile in frame");
3269}
3270
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003271#if CONFIG_MAX_TILE
3272
3273// Same function as av1_read_uniform but reading from uncompresses header wb
3274static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
3275 const int l = get_unsigned_bits(n);
3276 const int m = (1 << l) - n;
3277 const int v = aom_rb_read_literal(rb, l - 1);
3278 assert(l != 0);
3279 if (v < m)
3280 return v;
3281 else
3282 return (v << 1) - m + aom_rb_read_literal(rb, 1);
3283}
3284
3285static void read_tile_info_max_tile(AV1_COMMON *const cm,
3286 struct aom_read_bit_buffer *const rb) {
3287 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
3288 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
3289 int width_sb = width_mi >> MAX_MIB_SIZE_LOG2;
3290 int height_sb = height_mi >> MAX_MIB_SIZE_LOG2;
3291 int start_sb, size_sb, i;
3292
3293 av1_get_tile_limits(cm);
3294 cm->uniform_tile_spacing_flag = aom_rb_read_bit(rb);
3295
3296 // Read tile columns
3297 if (cm->uniform_tile_spacing_flag) {
3298 cm->log2_tile_cols = cm->min_log2_tile_cols;
3299 while (cm->log2_tile_cols < cm->max_log2_tile_cols) {
3300 if (!aom_rb_read_bit(rb)) {
3301 break;
3302 }
3303 cm->log2_tile_cols++;
3304 }
3305 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02003306 for (i = 0, start_sb = 0; width_sb > 0 && i < MAX_TILE_COLS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003307 size_sb = 1 + rb_read_uniform(rb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB));
3308 cm->tile_col_start_sb[i] = start_sb;
3309 start_sb += size_sb;
3310 width_sb -= size_sb;
3311 }
3312 cm->tile_cols = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02003313 cm->tile_col_start_sb[i] = start_sb + width_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003314 }
3315 av1_calculate_tile_cols(cm);
3316
3317 // Read tile rows
3318 if (cm->uniform_tile_spacing_flag) {
3319 cm->log2_tile_rows = cm->min_log2_tile_rows;
3320 while (cm->log2_tile_rows < cm->max_log2_tile_rows) {
3321 if (!aom_rb_read_bit(rb)) {
3322 break;
3323 }
3324 cm->log2_tile_rows++;
3325 }
3326 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02003327 for (i = 0, start_sb = 0; height_sb > 0 && i < MAX_TILE_ROWS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003328 size_sb =
3329 1 + rb_read_uniform(rb, AOMMIN(height_sb, cm->max_tile_height_sb));
3330 cm->tile_row_start_sb[i] = start_sb;
3331 start_sb += size_sb;
3332 height_sb -= size_sb;
3333 }
3334 cm->tile_rows = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02003335 cm->tile_row_start_sb[i] = start_sb + height_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003336 }
3337 av1_calculate_tile_rows(cm);
3338}
3339#endif
3340
Yaowu Xuf883b422016-08-30 14:01:10 -07003341static void read_tile_info(AV1Decoder *const pbi,
3342 struct aom_read_bit_buffer *const rb) {
3343 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003344#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003345 cm->single_tile_decoding = 0;
3346 if (cm->large_scale_tile) {
3347 struct loopfilter *lf = &cm->lf;
3348
3349 // Figure out single_tile_decoding by loopfilter_level.
3350 cm->single_tile_decoding = (!lf->filter_level) ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003351// Read the tile width/height
3352#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003353 if (cm->sb_size == BLOCK_128X128) {
3354 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
3355 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
3356 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003357#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003358 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
3359 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
3360#if CONFIG_EXT_PARTITION
3361 }
3362#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003363
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003364#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003365 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003366#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003367
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003368 cm->tile_width <<= cm->mib_size_log2;
3369 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003370
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003371 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
3372 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003373
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003374 // Get the number of tiles
3375 cm->tile_cols = 1;
3376 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003377
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003378 cm->tile_rows = 1;
3379 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003380
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003381 if (cm->tile_cols * cm->tile_rows > 1) {
3382 // Read the number of bytes used to store tile size
3383 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
3384 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
3385 }
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003386
3387#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003388 cm->dependent_horz_tiles = 0;
3389#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003390 } else {
3391#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003392
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003393#if CONFIG_MAX_TILE
3394 read_tile_info_max_tile(cm, rb);
3395#else
3396 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
3397 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003398
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003399 // columns
3400 max_ones = max_log2_tile_cols - min_log2_tile_cols;
3401 cm->log2_tile_cols = min_log2_tile_cols;
3402 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003403
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003404 if (cm->log2_tile_cols > 6)
3405 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3406 "Invalid number of tile columns");
3407
3408 // rows
3409 cm->log2_tile_rows = aom_rb_read_bit(rb);
3410 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
3411
Rupert Swarbrick5a010aa2017-09-26 16:16:48 +01003412 cm->tile_width =
3413 get_tile_size(cm->mi_cols, cm->log2_tile_cols, &cm->tile_cols);
3414 cm->tile_height =
3415 get_tile_size(cm->mi_rows, cm->log2_tile_rows, &cm->tile_rows);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003416
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003417#endif // CONFIG_MAX_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003418#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02003419 if (cm->tile_rows > 1)
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003420 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
3421 else
3422 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003423#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003424#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003425 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003426#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003427
Thomas Daviesb25ba502017-07-18 10:18:24 +01003428 // tile size magnitude
3429 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003430#if CONFIG_EXT_TILE
3431 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003432#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00003433
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003434// each tile group header is in its own tile group OBU
3435#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07003436 // Store an index to the location of the tile group information
3437 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01003438 read_tile_group_range(pbi, rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003439#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003440}
3441
Yaowu Xu4ff59b52017-04-24 12:41:56 -07003442static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003443 switch (sz) {
3444 case 1: return src[0];
3445 case 2: return mem_get_le16(src);
3446 case 3: return mem_get_le24(src);
3447 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07003448 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003449 }
3450}
3451
3452#if CONFIG_EXT_TILE
3453// Reads the next tile returning its size and adjusting '*data' accordingly
3454// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003455static void get_ls_tile_buffer(
3456 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
3457 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
3458 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
3459 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003460 size_t size;
3461
3462 size_t copy_size = 0;
3463 const uint8_t *copy_data = NULL;
3464
3465 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003466 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003467 "Truncated packet or corrupt tile length");
3468 if (decrypt_cb) {
3469 uint8_t be_data[4];
3470 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3471
3472 // Only read number of bytes in cm->tile_size_bytes.
3473 size = mem_get_varsize(be_data, tile_size_bytes);
3474 } else {
3475 size = mem_get_varsize(*data, tile_size_bytes);
3476 }
3477
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003478 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
3479 // mode.
3480 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003481 // The remaining bits in the top byte signal the row offset
3482 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
3483
3484 // Currently, only use tiles in same column as reference tiles.
3485 copy_data = tile_buffers[row - offset][col].data;
3486 copy_size = tile_buffers[row - offset][col].size;
3487 size = 0;
3488 }
3489
3490 *data += tile_size_bytes;
3491
3492 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003493 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003494 "Truncated packet or corrupt tile size");
3495
3496 if (size > 0) {
3497 tile_buffers[row][col].data = *data;
3498 tile_buffers[row][col].size = size;
3499 } else {
3500 tile_buffers[row][col].data = copy_data;
3501 tile_buffers[row][col].size = copy_size;
3502 }
3503
3504 *data += size;
3505
3506 tile_buffers[row][col].raw_data_end = *data;
3507}
3508
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003509static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07003510 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003511 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003512 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003513 const int tile_cols = cm->tile_cols;
3514 const int tile_rows = cm->tile_rows;
3515 const int have_tiles = tile_cols * tile_rows > 1;
3516
3517 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07003518 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003519 tile_buffers[0][0].data = data;
3520 tile_buffers[0][0].size = tile_size;
3521 tile_buffers[0][0].raw_data_end = NULL;
3522 } else {
3523 // We locate only the tile buffers that are required, which are the ones
3524 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
3525 // need the last (bottom right) tile buffer, as we need to know where the
3526 // end of the compressed frame buffer is for proper superframe decoding.
3527
3528 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
3529 const uint8_t *const data_start = data;
3530
Yaowu Xuf883b422016-08-30 14:01:10 -07003531 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003532 const int single_row = pbi->dec_tile_row >= 0;
3533 const int tile_rows_start = single_row ? dec_tile_row : 0;
3534 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07003535 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003536 const int single_col = pbi->dec_tile_col >= 0;
3537 const int tile_cols_start = single_col ? dec_tile_col : 0;
3538 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3539
3540 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
3541 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003542 const int tile_copy_mode =
3543 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
3544 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003545 size_t tile_col_size;
3546 int r, c;
3547
3548 // Read tile column sizes for all columns (we need the last tile buffer)
3549 for (c = 0; c < tile_cols; ++c) {
3550 const int is_last = c == tile_cols - 1;
3551 if (!is_last) {
3552 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
3553 data += tile_col_size_bytes;
3554 tile_col_data_end[c] = data + tile_col_size;
3555 } else {
3556 tile_col_size = data_end - data;
3557 tile_col_data_end[c] = data_end;
3558 }
3559 data += tile_col_size;
3560 }
3561
3562 data = data_start;
3563
3564 // Read the required tile sizes.
3565 for (c = tile_cols_start; c < tile_cols_end; ++c) {
3566 const int is_last = c == tile_cols - 1;
3567
3568 if (c > 0) data = tile_col_data_end[c - 1];
3569
3570 if (!is_last) data += tile_col_size_bytes;
3571
3572 // Get the whole of the last column, otherwise stop at the required tile.
3573 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
3574 tile_buffers[r][c].col = c;
3575
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003576 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3577 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
3578 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003579 }
3580 }
3581
3582 // If we have not read the last column, then read it to get the last tile.
3583 if (tile_cols_end != tile_cols) {
3584 c = tile_cols - 1;
3585
3586 data = tile_col_data_end[c - 1];
3587
3588 for (r = 0; r < tile_rows; ++r) {
3589 tile_buffers[r][c].col = c;
3590
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003591 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
3592 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
3593 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003594 }
3595 }
3596 }
3597}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003598#endif // CONFIG_EXT_TILE
3599
Yaowu Xuc27fc142016-08-22 16:08:15 -07003600// Reads the next tile returning its size and adjusting '*data' accordingly
3601// based on 'is_last'.
3602static void get_tile_buffer(const uint8_t *const data_end,
3603 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07003604 struct aom_internal_error_info *error_info,
3605 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003606 void *decrypt_state, TileBufferDec *const buf) {
3607 size_t size;
3608
3609 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08003610 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07003611 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003612 "Truncated packet or corrupt tile length");
3613
3614 if (decrypt_cb) {
3615 uint8_t be_data[4];
3616 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
3617 size = mem_get_varsize(be_data, tile_size_bytes);
3618 } else {
3619 size = mem_get_varsize(*data, tile_size_bytes);
3620 }
3621 *data += tile_size_bytes;
3622
3623 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07003624 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003625 "Truncated packet or corrupt tile size");
3626 } else {
3627 size = data_end - *data;
3628 }
3629
3630 buf->data = *data;
3631 buf->size = size;
3632
3633 *data += size;
3634}
3635
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003636static void get_tile_buffers(AV1Decoder *pbi, const uint8_t *data,
3637 const uint8_t *data_end,
3638 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
3639 int startTile, int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003640 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003641 int r, c;
3642 const int tile_cols = cm->tile_cols;
3643 const int tile_rows = cm->tile_rows;
3644 int tc = 0;
3645 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003646 struct aom_read_bit_buffer rb_tg_hdr;
3647 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003648#if !CONFIG_OBU
James Zern6efba482017-04-20 20:53:49 -07003649 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003650 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003651#else
3652 const int tg_size_bit_offset = 0;
3653#endif
3654
Fangwen Fu73126c02017-02-08 22:37:47 -08003655#if CONFIG_DEPENDENT_HORZTILES
3656 int tile_group_start_col = 0;
3657 int tile_group_start_row = 0;
3658#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003659
3660 for (r = 0; r < tile_rows; ++r) {
3661 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07003662 TileBufferDec *const buf = &tile_buffers[r][c];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003663#if CONFIG_OBU
3664 const int is_last = (tc == endTile);
3665 const size_t hdr_offset = 0;
3666#else
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003667 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07003668 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003669#endif
3670
3671 if (tc < startTile || tc > endTile) continue;
Thomas Davies80188d12016-10-26 16:08:35 -07003672
Rupert Swarbrickcd757392017-09-01 13:57:53 +01003673 if (data + hdr_offset >= data_end)
3674 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3675 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07003676 buf->col = c;
3677 if (hdr_offset) {
3678 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
3679 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01003680 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08003681#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01003682 tile_group_start_row = r;
3683 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08003684#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003685 }
3686 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
3687 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00003688 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
3689 &pbi->common.error, &data, pbi->decrypt_cb,
3690 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08003691#if CONFIG_DEPENDENT_HORZTILES
3692 cm->tile_group_start_row[r][c] = tile_group_start_row;
3693 cm->tile_group_start_col[r][c] = tile_group_start_col;
3694#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003695 }
3696 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003697}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003698
Yushin Cho77bba8d2016-11-04 16:36:56 -07003699#if CONFIG_PVQ
Yushin Cho70669122016-12-08 09:53:14 -10003700static void daala_dec_init(AV1_COMMON *const cm, daala_dec_ctx *daala_dec,
Nathan E. Eggeab083972016-12-28 15:31:46 -05003701 aom_reader *r) {
3702 daala_dec->r = r;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003703
Yushin Cho70669122016-12-08 09:53:14 -10003704 // TODO(yushin) : activity masking info needs be signaled by a bitstream
3705 daala_dec->use_activity_masking = AV1_PVQ_ENABLE_ACTIVITY_MASKING;
3706
3707 if (daala_dec->use_activity_masking)
3708 daala_dec->qm = OD_HVS_QM;
3709 else
3710 daala_dec->qm = OD_FLAT_QM;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003711
3712 od_init_qm(daala_dec->state.qm, daala_dec->state.qm_inv,
3713 daala_dec->qm == OD_HVS_QM ? OD_QM8_Q4_HVS : OD_QM8_Q4_FLAT);
Yushin Cho70669122016-12-08 09:53:14 -10003714
3715 if (daala_dec->use_activity_masking) {
3716 int pli;
3717 int use_masking = daala_dec->use_activity_masking;
3718 int segment_id = 0;
3719 int qindex = av1_get_qindex(&cm->seg, segment_id, cm->base_qindex);
3720
3721 for (pli = 0; pli < MAX_MB_PLANE; pli++) {
3722 int i;
3723 int q;
3724
3725 q = qindex;
3726 if (q <= OD_DEFAULT_QMS[use_masking][0][pli].interp_q << OD_COEFF_SHIFT) {
3727 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3728 &OD_DEFAULT_QMS[use_masking][0][pli], NULL);
3729 } else {
3730 i = 0;
3731 while (OD_DEFAULT_QMS[use_masking][i + 1][pli].qm_q4 != NULL &&
3732 q > OD_DEFAULT_QMS[use_masking][i + 1][pli].interp_q
3733 << OD_COEFF_SHIFT) {
3734 i++;
3735 }
3736 od_interp_qm(&daala_dec->state.pvq_qm_q4[pli][0], q,
3737 &OD_DEFAULT_QMS[use_masking][i][pli],
3738 &OD_DEFAULT_QMS[use_masking][i + 1][pli]);
3739 }
3740 }
3741 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07003742}
Yushin Cho70669122016-12-08 09:53:14 -10003743#endif // #if CONFIG_PVQ
Yushin Cho77bba8d2016-11-04 16:36:56 -07003744
David Barker5c06a642017-08-18 13:18:16 +01003745#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003746static void dec_setup_across_tile_boundary_info(
3747 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02003748 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
3749 tile_info->mi_col_start >= tile_info->mi_col_end)
3750 return;
3751
David Barker5c06a642017-08-18 13:18:16 +01003752 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07003753 av1_setup_across_tile_boundary_info(cm, tile_info);
3754 }
3755}
David Barker5c06a642017-08-18 13:18:16 +01003756#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003757
Yaowu Xuf883b422016-08-30 14:01:10 -07003758static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003759 const uint8_t *data_end, int startTile,
3760 int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003761 AV1_COMMON *const cm = &pbi->common;
3762 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003763 const int tile_cols = cm->tile_cols;
3764 const int tile_rows = cm->tile_rows;
3765 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07003766 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003767#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003768 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003769 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003770 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003771 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003772#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003773 int tile_rows_start;
3774 int tile_rows_end;
3775 int tile_cols_start;
3776 int tile_cols_end;
3777 int inv_col_order;
3778 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003779 int tile_row, tile_col;
3780
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003781#if CONFIG_EXT_TILE
3782 if (cm->large_scale_tile) {
3783 tile_rows_start = single_row ? dec_tile_row : 0;
3784 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
3785 tile_cols_start = single_col ? dec_tile_col : 0;
3786 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
3787 inv_col_order = pbi->inv_tile_order && !single_col;
3788 inv_row_order = pbi->inv_tile_order && !single_row;
3789 } else {
3790#endif // CONFIG_EXT_TILE
3791 tile_rows_start = 0;
3792 tile_rows_end = tile_rows;
3793 tile_cols_start = 0;
3794 tile_cols_end = tile_cols;
3795 inv_col_order = pbi->inv_tile_order;
3796 inv_row_order = pbi->inv_tile_order;
3797#if CONFIG_EXT_TILE
3798 }
3799#endif // CONFIG_EXT_TILE
3800
Yaowu Xuc27fc142016-08-22 16:08:15 -07003801 if (cm->lf.filter_level && !cm->skip_loop_filter &&
3802 pbi->lf_worker.data1 == NULL) {
3803 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07003804 aom_memalign(32, sizeof(LFWorkerData)));
3805 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003806 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003807 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003808 "Loop filter thread creation failed");
3809 }
3810 }
3811
3812 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3813 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
3814 // Be sure to sync as we might be resuming after a failed frame decode.
3815 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07003816 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
3817 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003818 }
3819
3820 assert(tile_rows <= MAX_TILE_ROWS);
3821 assert(tile_cols <= MAX_TILE_COLS);
3822
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003823#if CONFIG_EXT_TILE
3824 if (cm->large_scale_tile)
3825 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
3826 else
3827#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003828 get_tile_buffers(pbi, data, data_end, tile_buffers, startTile, endTile);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003829
3830 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003831 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003832 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07003833 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003834 pbi->allocated_tiles = n_tiles;
3835 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003836#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003837 if (pbi->acct_enabled) {
3838 aom_accounting_reset(&pbi->accounting);
3839 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003840#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841 // Load all tile information into tile_data.
3842 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3843 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3844 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
3845 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
3846
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003847 if (tile_row * cm->tile_cols + tile_col < startTile ||
3848 tile_row * cm->tile_cols + tile_col > endTile)
3849 continue;
3850
Yaowu Xuc27fc142016-08-22 16:08:15 -07003851 td->cm = cm;
3852 td->xd = pbi->mb;
3853 td->xd.corrupted = 0;
3854 td->xd.counts =
3855 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
3856 ? &cm->counts
3857 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07003858 av1_zero(td->dqcoeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003859#if CONFIG_PVQ
Yaowu Xud6ea71c2016-11-07 10:24:14 -08003860 av1_zero(td->pvq_ref_coeff);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003861#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003862 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003863 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08003864 &td->bit_reader,
3865#if CONFIG_ANS && ANS_MAX_SYMBOLS
3866 1 << cm->ans_window_size_log2,
3867#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
3868 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07003869#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003870 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003871 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003872 } else {
David Barkerd971f402016-10-25 13:52:07 +01003873 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003874 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003875#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003876 av1_init_macroblockd(cm, &td->xd,
3877#if CONFIG_PVQ
3878 td->pvq_ref_coeff,
3879#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04003880#if CONFIG_CFL
3881 &td->cfl,
3882#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003883 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07003884
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00003885 // Initialise the tile context from the frame context
3886 td->tctx = *cm->fc;
3887 td->xd.tile_ctx = &td->tctx;
Yushin Choc49ef3a2017-03-13 17:27:25 -07003888
3889#if CONFIG_PVQ
3890 daala_dec_init(cm, &td->xd.daala_dec, &td->bit_reader);
3891 td->xd.daala_dec.state.adapt = &td->tctx.pvq_context;
3892#endif
3893
Yaowu Xuc27fc142016-08-22 16:08:15 -07003894 td->xd.plane[0].color_index_map = td->color_index_map[0];
3895 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07003896#if CONFIG_MRC_TX
3897 td->xd.mrc_mask = td->mrc_mask;
3898#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003899 }
3900 }
3901
3902 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
3903 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
3904 int mi_row = 0;
3905 TileInfo tile_info;
3906
Yaowu Xuf883b422016-08-30 14:01:10 -07003907 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003908
3909 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
3910 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
3911 TileData *const td = pbi->tile_data + tile_cols * row + col;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003912
3913 if (tile_row * cm->tile_cols + tile_col < startTile ||
3914 tile_row * cm->tile_cols + tile_col > endTile)
3915 continue;
3916
Michael Bebenita6048d052016-08-25 14:40:54 -07003917#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003918 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01003919 td->bit_reader.accounting->last_tell_frac =
3920 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04003921 }
Michael Bebenita6048d052016-08-25 14:40:54 -07003922#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003923
Yaowu Xuf883b422016-08-30 14:01:10 -07003924 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003925
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003926#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003927 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
3928 if (!cm->dependent_horz_tiles || tile_row == 0 ||
3929 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003930 av1_zero_above_context(cm, tile_info.mi_col_start,
3931 tile_info.mi_col_end);
3932 }
3933#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003934 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003935#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003936#if CONFIG_LOOP_RESTORATION
3937 for (int p = 0; p < MAX_MB_PLANE; ++p) {
3938 set_default_wiener(td->xd.wiener_info + p);
3939 set_default_sgrproj(td->xd.sgrproj_info + p);
3940 }
3941#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003942
David Barker5c06a642017-08-18 13:18:16 +01003943#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07003944 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01003945#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07003946
Yaowu Xuc27fc142016-08-22 16:08:15 -07003947 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
3948 mi_row += cm->mib_size) {
3949 int mi_col;
3950
Yaowu Xuf883b422016-08-30 14:01:10 -07003951 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003952
3953 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
3954 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003955#if CONFIG_NCOBMC_ADAPT_WEIGHT
3956 alloc_ncobmc_pred_buffer(&td->xd);
3957 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
3958#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003959 decode_partition(pbi, &td->xd,
3960#if CONFIG_SUPERTX
3961 0,
3962#endif // CONFIG_SUPERTX
Jingning Hanea10ad42017-07-20 11:19:08 -07003963 mi_row, mi_col, &td->bit_reader, cm->sb_size);
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07003964#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08003965 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
3966 cm->sb_size);
3967#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003968#if CONFIG_NCOBMC_ADAPT_WEIGHT
3969 free_ncobmc_pred_buffer(&td->xd);
3970#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003971 }
Angie Chiangd0916d92017-03-10 17:54:18 -08003972 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003973 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07003974 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003975 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003976 }
3977 }
3978
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003979#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07003980 assert(mi_row > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003981#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003982
Ryan Lei6f8c1a72016-10-26 10:52:12 -07003983// when Parallel deblocking is enabled, deblocking should not
3984// be interleaved with decoding. Instead, deblocking should be done
3985// after the entire frame is decoded.
Jingning Han52ece882017-04-07 14:58:25 -07003986#if !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING && !CONFIG_CB4X4
Yaowu Xuc27fc142016-08-22 16:08:15 -07003987 // Loopfilter one tile row.
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003988 // Note: If out-of-order tile decoding is used(for example, inv_row_order
3989 // = 1), the loopfiltering has be done after all tile rows are decoded.
3990 if (!inv_row_order && cm->lf.filter_level && !cm->skip_loop_filter) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003991 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003992 const int lf_start = AOMMAX(0, tile_info.mi_row_start - cm->mib_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003993 const int lf_end = tile_info.mi_row_end - cm->mib_size;
3994
3995 // Delay the loopfilter if the first tile row is only
3996 // a single superblock high.
3997 if (lf_end <= 0) continue;
3998
3999 // Decoding has completed. Finish up the loop filter in this thread.
4000 if (tile_info.mi_row_end >= cm->mi_rows) continue;
4001
4002 winterface->sync(&pbi->lf_worker);
4003 lf_data->start = lf_start;
4004 lf_data->stop = lf_end;
4005 if (pbi->max_threads > 1) {
4006 winterface->launch(&pbi->lf_worker);
4007 } else {
4008 winterface->execute(&pbi->lf_worker);
4009 }
4010 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07004011#endif // !CONFIG_VAR_TX && !CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07004012
4013 // After loopfiltering, the last 7 row pixels in each superblock row may
4014 // still be changed by the longest loopfilter of the next superblock row.
4015 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07004016 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004017 }
4018
Jingning Han52ece882017-04-07 14:58:25 -07004019#if CONFIG_VAR_TX || CONFIG_CB4X4
Cheng Chene94df5c2017-07-19 17:25:33 -07004020// Loopfilter the whole frame.
Cheng Chenf572cd32017-08-25 18:34:51 -07004021#if CONFIG_LPF_SB
4022 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
4023 cm->lf.filter_level, 0, 0, 0, 0);
4024#else
Cheng Chen13fc8192017-08-19 11:49:28 -07004025#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07004026 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07004027 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07004028 cm->lf.filter_level[0], cm->lf.filter_level[1], 0, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07004029 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07004030 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07004031 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
Cheng Chen179479f2017-08-04 10:56:39 -07004032 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
Cheng Chene94df5c2017-07-19 17:25:33 -07004033 }
4034#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004035#if CONFIG_OBU
4036 if (endTile == cm->tile_rows * cm->tile_cols - 1)
4037#endif
4038 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
4039 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07004040#endif // CONFIG_LOOPFILTER_LEVEL
Cheng Chenf572cd32017-08-25 18:34:51 -07004041#endif // CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07004042#else
Ryan Lei6f8c1a72016-10-26 10:52:12 -07004043#if CONFIG_PARALLEL_DEBLOCKING
4044 // Loopfilter all rows in the frame in the frame.
4045 if (cm->lf.filter_level && !cm->skip_loop_filter) {
4046 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
4047 winterface->sync(&pbi->lf_worker);
4048 lf_data->start = 0;
4049 lf_data->stop = cm->mi_rows;
4050 winterface->execute(&pbi->lf_worker);
4051 }
4052#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004053 // Loopfilter remaining rows in the frame.
4054 if (cm->lf.filter_level && !cm->skip_loop_filter) {
4055 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
4056 winterface->sync(&pbi->lf_worker);
4057 lf_data->start = lf_data->stop;
4058 lf_data->stop = cm->mi_rows;
4059 winterface->execute(&pbi->lf_worker);
4060 }
Ryan Lei6f8c1a72016-10-26 10:52:12 -07004061#endif // CONFIG_PARALLEL_DEBLOCKING
Yaowu Xuc27fc142016-08-22 16:08:15 -07004062#endif // CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07004063 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07004064 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004065
4066#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004067 if (cm->large_scale_tile) {
4068 if (n_tiles == 1) {
4069#if CONFIG_ANS
4070 return data_end;
4071#else
4072 // Find the end of the single tile buffer
4073 return aom_reader_find_end(&pbi->tile_data->bit_reader);
4074#endif // CONFIG_ANS
4075 } else {
4076 // Return the end of the last tile buffer
4077 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
4078 }
4079 } else {
4080#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004081#if CONFIG_ANS
4082 return data_end;
4083#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004084#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07004085 {
4086 // Get last tile data.
4087 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004088 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004089 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004090#else
4091 TileData *const td = pbi->tile_data + endTile;
4092 return aom_reader_find_end(&td->bit_reader);
4093#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004094#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004095#if CONFIG_EXT_TILE
4096 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004097#endif // CONFIG_EXT_TILE
4098}
4099
4100static int tile_worker_hook(TileWorkerData *const tile_data,
4101 const TileInfo *const tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004102 AV1Decoder *const pbi = tile_data->pbi;
4103 const AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004104 int mi_row, mi_col;
4105
4106 if (setjmp(tile_data->error_info.jmp)) {
4107 tile_data->error_info.setjmp = 0;
Angie Chiangd0916d92017-03-10 17:54:18 -08004108 aom_merge_corrupted_flag(&tile_data->xd.corrupted, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004109 return 0;
4110 }
4111
4112 tile_data->error_info.setjmp = 1;
4113 tile_data->xd.error_info = &tile_data->error_info;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08004114#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08004115 if (!cm->dependent_horz_tiles || tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08004116 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
4117 }
4118#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004119 av1_zero_above_context(&pbi->common, tile->mi_col_start, tile->mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08004120#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004121
4122 for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
4123 mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004124 av1_zero_left_context(&tile_data->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004125
4126 for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
4127 mi_col += cm->mib_size) {
4128 decode_partition(pbi, &tile_data->xd,
4129#if CONFIG_SUPERTX
4130 0,
4131#endif
Jingning Hanea10ad42017-07-20 11:19:08 -07004132 mi_row, mi_col, &tile_data->bit_reader, cm->sb_size);
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07004133#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08004134 detoken_and_recon_sb(pbi, &tile_data->xd, mi_row, mi_col,
4135 &tile_data->bit_reader, cm->sb_size);
4136#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004137 }
4138 }
4139 return !tile_data->xd.corrupted;
4140}
4141
4142// sorts in descending order
4143static int compare_tile_buffers(const void *a, const void *b) {
4144 const TileBufferDec *const buf1 = (const TileBufferDec *)a;
4145 const TileBufferDec *const buf2 = (const TileBufferDec *)b;
4146 return (int)(buf2->size - buf1->size);
4147}
4148
Yaowu Xuf883b422016-08-30 14:01:10 -07004149static const uint8_t *decode_tiles_mt(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004150 const uint8_t *data_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004151 AV1_COMMON *const cm = &pbi->common;
4152 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004153 const int tile_cols = cm->tile_cols;
4154 const int tile_rows = cm->tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07004155 const int num_workers = AOMMIN(pbi->max_threads & ~1, tile_cols);
clang-format67948d32016-09-07 22:40:40 -07004156 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004157#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07004158 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004159 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07004160 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004161 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004162#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004163 int tile_rows_start;
4164 int tile_rows_end;
4165 int tile_cols_start;
4166 int tile_cols_end;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004167 int tile_row, tile_col;
4168 int i;
4169
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004170#if CONFIG_EXT_TILE
4171 if (cm->large_scale_tile) {
4172 tile_rows_start = single_row ? dec_tile_row : 0;
4173 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
4174 tile_cols_start = single_col ? dec_tile_col : 0;
4175 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
4176 } else {
4177#endif // CONFIG_EXT_TILE
4178 tile_rows_start = 0;
4179 tile_rows_end = tile_rows;
4180 tile_cols_start = 0;
4181 tile_cols_end = tile_cols;
4182#if CONFIG_EXT_TILE
4183 }
4184#endif // CONFIG_EXT_TILE
4185
4186#if !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004187 int final_worker = -1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004188#endif // !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004189
4190 assert(tile_rows <= MAX_TILE_ROWS);
4191 assert(tile_cols <= MAX_TILE_COLS);
4192
4193 assert(tile_cols * tile_rows > 1);
4194
Yaowu Xuc27fc142016-08-22 16:08:15 -07004195 // TODO(jzern): See if we can remove the restriction of passing in max
4196 // threads to the decoder.
4197 if (pbi->num_tile_workers == 0) {
4198 const int num_threads = pbi->max_threads & ~1;
4199 CHECK_MEM_ERROR(cm, pbi->tile_workers,
Yaowu Xuf883b422016-08-30 14:01:10 -07004200 aom_malloc(num_threads * sizeof(*pbi->tile_workers)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004201 // Ensure tile data offsets will be properly aligned. This may fail on
4202 // platforms without DECLARE_ALIGNED().
4203 assert((sizeof(*pbi->tile_worker_data) % 16) == 0);
4204 CHECK_MEM_ERROR(
4205 cm, pbi->tile_worker_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07004206 aom_memalign(32, num_threads * sizeof(*pbi->tile_worker_data)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004207 CHECK_MEM_ERROR(cm, pbi->tile_worker_info,
Yaowu Xuf883b422016-08-30 14:01:10 -07004208 aom_malloc(num_threads * sizeof(*pbi->tile_worker_info)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004209 for (i = 0; i < num_threads; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004210 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004211 ++pbi->num_tile_workers;
4212
4213 winterface->init(worker);
4214 if (i < num_threads - 1 && !winterface->reset(worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004215 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004216 "Tile decoder thread creation failed");
4217 }
4218 }
4219 }
4220
4221 // Reset tile decoding hook
4222 for (i = 0; i < num_workers; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004223 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004224 winterface->sync(worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07004225 worker->hook = (AVxWorkerHook)tile_worker_hook;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004226 worker->data1 = &pbi->tile_worker_data[i];
4227 worker->data2 = &pbi->tile_worker_info[i];
4228 }
4229
4230 // Initialize thread frame counts.
4231 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4232 for (i = 0; i < num_workers; ++i) {
4233 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004234 av1_zero(twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004235 }
4236 }
4237
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004238// Load tile data into tile_buffers
4239#if CONFIG_EXT_TILE
4240 if (cm->large_scale_tile)
4241 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
4242 else
4243#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004244 get_tile_buffers(pbi, data, data_end, tile_buffers, 0,
4245 cm->tile_rows * cm->tile_cols - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004246
4247 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
4248 // Sort the buffers in this tile row based on size in descending order.
4249 qsort(&tile_buffers[tile_row][tile_cols_start],
4250 tile_cols_end - tile_cols_start, sizeof(tile_buffers[0][0]),
4251 compare_tile_buffers);
4252
4253 // Rearrange the tile buffers in this tile row such that per-tile group
4254 // the largest, and presumably the most difficult tile will be decoded in
4255 // the main thread. This should help minimize the number of instances
4256 // where the main thread is waiting for a worker to complete.
4257 {
4258 int group_start;
4259 for (group_start = tile_cols_start; group_start < tile_cols_end;
4260 group_start += num_workers) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004261 const int group_end = AOMMIN(group_start + num_workers, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004262 const TileBufferDec largest = tile_buffers[tile_row][group_start];
4263 memmove(&tile_buffers[tile_row][group_start],
4264 &tile_buffers[tile_row][group_start + 1],
4265 (group_end - group_start - 1) * sizeof(tile_buffers[0][0]));
4266 tile_buffers[tile_row][group_end - 1] = largest;
4267 }
4268 }
4269
4270 for (tile_col = tile_cols_start; tile_col < tile_cols_end;) {
4271 // Launch workers for individual columns
4272 for (i = 0; i < num_workers && tile_col < tile_cols_end;
4273 ++i, ++tile_col) {
4274 TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuf883b422016-08-30 14:01:10 -07004275 AVxWorker *const worker = &pbi->tile_workers[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004276 TileWorkerData *const twd = (TileWorkerData *)worker->data1;
4277 TileInfo *const tile_info = (TileInfo *)worker->data2;
4278
4279 twd->pbi = pbi;
4280 twd->xd = pbi->mb;
4281 twd->xd.corrupted = 0;
4282 twd->xd.counts =
4283 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
4284 ? &twd->counts
4285 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07004286 av1_zero(twd->dqcoeff);
4287 av1_tile_init(tile_info, cm, tile_row, buf->col);
4288 av1_tile_init(&twd->xd.tile, cm, tile_row, buf->col);
Yi Luof190a162017-07-13 16:16:56 -07004289
David Barker5c06a642017-08-18 13:18:16 +01004290#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07004291 dec_setup_across_tile_boundary_info(cm, tile_info);
David Barker5c06a642017-08-18 13:18:16 +01004292#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07004293
Yaowu Xuc27fc142016-08-22 16:08:15 -07004294 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08004295 &twd->bit_reader,
4296#if CONFIG_ANS && ANS_MAX_SYMBOLS
4297 1 << cm->ans_window_size_log2,
4298#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
4299 pbi->decrypt_cb, pbi->decrypt_state);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004300 av1_init_macroblockd(cm, &twd->xd,
4301#if CONFIG_PVQ
4302 twd->pvq_ref_coeff,
4303#endif
Luc Trudeauf8164152017-04-11 16:20:51 -04004304#if CONFIG_CFL
4305 &twd->cfl,
4306#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07004307 twd->dqcoeff);
4308#if CONFIG_PVQ
Nathan E. Eggeab083972016-12-28 15:31:46 -05004309 daala_dec_init(cm, &twd->xd.daala_dec, &twd->bit_reader);
Yushin Choc49ef3a2017-03-13 17:27:25 -07004310 twd->xd.daala_dec.state.adapt = &twd->tctx.pvq_context;
Yushin Cho77bba8d2016-11-04 16:36:56 -07004311#endif
Yushin Chod767beb2017-03-24 10:15:47 -07004312 // Initialise the tile context from the frame context
4313 twd->tctx = *cm->fc;
4314 twd->xd.tile_ctx = &twd->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004315 twd->xd.plane[0].color_index_map = twd->color_index_map[0];
4316 twd->xd.plane[1].color_index_map = twd->color_index_map[1];
4317
4318 worker->had_error = 0;
4319 if (i == num_workers - 1 || tile_col == tile_cols_end - 1) {
4320 winterface->execute(worker);
4321 } else {
4322 winterface->launch(worker);
4323 }
4324
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004325#if !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004326 if (tile_row == tile_rows - 1 && buf->col == tile_cols - 1) {
4327 final_worker = i;
4328 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004329#endif // !CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004330 }
4331
4332 // Sync all workers
4333 for (; i > 0; --i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004334 AVxWorker *const worker = &pbi->tile_workers[i - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004335 // TODO(jzern): The tile may have specific error data associated with
Yaowu Xuf883b422016-08-30 14:01:10 -07004336 // its aom_internal_error_info which could be propagated to the main
Yaowu Xuc27fc142016-08-22 16:08:15 -07004337 // info in cm. Additionally once the threads have been synced and an
4338 // error is detected, there's no point in continuing to decode tiles.
4339 pbi->mb.corrupted |= !winterface->sync(worker);
4340 }
4341 }
4342 }
4343
4344 // Accumulate thread frame counts.
4345 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
4346 for (i = 0; i < num_workers; ++i) {
4347 TileWorkerData *const twd = (TileWorkerData *)pbi->tile_workers[i].data1;
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08004348 av1_accumulate_frame_counts(&cm->counts, &twd->counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004349 }
4350 }
4351
4352#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004353 if (cm->large_scale_tile) {
4354 // Return the end of the last tile buffer
4355 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
4356 } else {
4357#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004358#if CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004359 return data_end;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004360#else
4361 assert(final_worker != -1);
4362 {
4363 TileWorkerData *const twd =
4364 (TileWorkerData *)pbi->tile_workers[final_worker].data1;
Yaowu Xuf883b422016-08-30 14:01:10 -07004365 return aom_reader_find_end(&twd->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004366 }
4367#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004368#if CONFIG_EXT_TILE
4369 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004370#endif // CONFIG_EXT_TILE
4371}
4372
4373static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004374 AV1_COMMON *const cm = (AV1_COMMON *)data;
4375 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07004376}
4377
Yaowu Xuf883b422016-08-30 14:01:10 -07004378static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004379 struct aom_read_bit_buffer *rb,
4380 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004381 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004382 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004383 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004384 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004385 }
4386
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004387#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004388 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07004389#else
4390 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01004391#endif
anorkin76fb1262017-03-22 15:12:12 -07004392#if CONFIG_COLORSPACE_HEADERS
4393 cm->color_space = aom_rb_read_literal(rb, 5);
4394 cm->transfer_function = aom_rb_read_literal(rb, 5);
4395#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004396 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07004397#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004398 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004399 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07004400 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004401 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004402 cm->subsampling_x = aom_rb_read_bit(rb);
4403 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004404 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07004405 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004406 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07004407 if (aom_rb_read_bit(rb))
4408 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004409 "Reserved bit set");
4410 } else {
4411 cm->subsampling_y = cm->subsampling_x = 1;
4412 }
anorkin76fb1262017-03-22 15:12:12 -07004413#if CONFIG_COLORSPACE_HEADERS
4414 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
4415 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
4416 }
4417#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004418 } else {
4419 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
4420 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
4421 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
4422 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07004423 if (aom_rb_read_bit(rb))
4424 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004425 "Reserved bit set");
4426 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004427 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004428 "4:4:4 color not supported in profile 0 or 2");
4429 }
4430 }
4431}
4432
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004433#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004434void read_sequence_header(SequenceHeader *seq_params,
4435 struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004436 /* Placeholder for actually reading from the bitstream */
David Barker5e70a112017-10-03 14:28:17 +01004437 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
4438 if (seq_params->frame_id_numbers_present_flag) {
4439 seq_params->frame_id_length_minus7 = aom_rb_read_literal(rb, 4);
4440 seq_params->delta_frame_id_length_minus2 = aom_rb_read_literal(rb, 4);
4441 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004442}
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004443#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004444
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004445static void read_compound_tools(AV1_COMMON *cm,
4446 struct aom_read_bit_buffer *rb) {
4447 (void)cm;
4448 (void)rb;
4449#if CONFIG_INTERINTRA
4450 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
4451 cm->allow_interintra_compound = aom_rb_read_bit(rb);
4452 } else {
4453 cm->allow_interintra_compound = 0;
4454 }
4455#endif // CONFIG_INTERINTRA
4456#if CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
Zoe Liu85b66462017-04-20 14:28:19 -07004457#if CONFIG_COMPOUND_SINGLEREF
4458 if (!frame_is_intra_only(cm)) {
4459#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004460 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07004461#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004462 cm->allow_masked_compound = aom_rb_read_bit(rb);
4463 } else {
4464 cm->allow_masked_compound = 0;
4465 }
4466#endif // CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
4467}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004468
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004469#if CONFIG_VAR_REFS
4470static void check_valid_ref_frames(AV1_COMMON *cm) {
4471 MV_REFERENCE_FRAME ref_frame;
4472 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
4473 // reference frames: Current encoder invalid ALTREF when ALTREF
4474 // is the same as LAST, but invalid all the other references
4475 // when they are the same as ALTREF.
4476 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4477 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
4478
4479 if (ref_buf->idx != INVALID_IDX) {
4480 ref_buf->is_valid = 1;
4481
4482 MV_REFERENCE_FRAME ref;
4483 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
4484 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
4485 if (buf->is_valid && buf->idx == ref_buf->idx) {
4486 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
4487 ref_buf->is_valid = 0;
4488 break;
4489 } else {
4490 buf->is_valid = 0;
4491 }
4492 }
4493 }
4494 } else {
4495 ref_buf->is_valid = 0;
4496 }
4497 }
4498}
4499#endif // CONFIG_VAR_REFS
4500
Sarah Parker3e579a62017-08-23 16:53:20 -07004501#if CONFIG_GLOBAL_MOTION
4502static int read_global_motion_params(WarpedMotionParams *params,
David Barkerd7c8bd52017-09-25 14:47:29 +01004503 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07004504 struct aom_read_bit_buffer *rb,
4505 int allow_hp) {
4506 TransformationType type = aom_rb_read_bit(rb);
4507 if (type != IDENTITY) {
4508#if GLOBAL_TRANS_TYPES > 4
4509 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
4510#else
4511 if (aom_rb_read_bit(rb))
4512 type = ROTZOOM;
4513 else
4514 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
4515#endif // GLOBAL_TRANS_TYPES > 4
4516 }
4517
4518 int trans_bits;
4519 int trans_dec_factor;
4520 int trans_prec_diff;
David Barkerd7c8bd52017-09-25 14:47:29 +01004521 *params = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07004522 params->wmtype = type;
4523 switch (type) {
4524 case HOMOGRAPHY:
4525 case HORTRAPEZOID:
4526 case VERTRAPEZOID:
4527 if (type != HORTRAPEZOID)
4528 params->wmmat[6] =
4529 aom_rb_read_signed_primitive_refsubexpfin(
4530 rb, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4531 (ref_params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF)) *
4532 GM_ROW3HOMO_DECODE_FACTOR;
4533 if (type != VERTRAPEZOID)
4534 params->wmmat[7] =
4535 aom_rb_read_signed_primitive_refsubexpfin(
4536 rb, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4537 (ref_params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF)) *
4538 GM_ROW3HOMO_DECODE_FACTOR;
4539 case AFFINE:
4540 case ROTZOOM:
4541 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
4542 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4543 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
4544 (1 << GM_ALPHA_PREC_BITS)) *
4545 GM_ALPHA_DECODE_FACTOR +
4546 (1 << WARPEDMODEL_PREC_BITS);
4547 if (type != VERTRAPEZOID)
4548 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
4549 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4550 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
4551 GM_ALPHA_DECODE_FACTOR;
4552 if (type >= AFFINE) {
4553 if (type != HORTRAPEZOID)
4554 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
4555 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4556 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
4557 GM_ALPHA_DECODE_FACTOR;
4558 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
4559 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4560 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4561 (1 << GM_ALPHA_PREC_BITS)) *
4562 GM_ALPHA_DECODE_FACTOR +
4563 (1 << WARPEDMODEL_PREC_BITS);
4564 } else {
4565 params->wmmat[4] = -params->wmmat[3];
4566 params->wmmat[5] = params->wmmat[2];
4567 }
4568 // fallthrough intended
4569 case TRANSLATION:
4570 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
4571 : GM_ABS_TRANS_BITS;
4572 trans_dec_factor = (type == TRANSLATION)
4573 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
4574 : GM_TRANS_DECODE_FACTOR;
4575 trans_prec_diff = (type == TRANSLATION)
4576 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
4577 : GM_TRANS_PREC_DIFF;
4578 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
4579 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
4580 (ref_params->wmmat[0] >> trans_prec_diff)) *
4581 trans_dec_factor;
4582 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
4583 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
4584 (ref_params->wmmat[1] >> trans_prec_diff)) *
4585 trans_dec_factor;
4586 case IDENTITY: break;
4587 default: assert(0);
4588 }
4589 if (params->wmtype <= AFFINE) {
4590 int good_shear_params = get_shear_params(params);
4591 if (!good_shear_params) return 0;
4592 }
4593
4594 return 1;
4595}
4596
4597static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
4598 int frame;
4599 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01004600 const WarpedMotionParams *ref_params =
4601 cm->error_resilient_mode ? &default_warp_params
4602 : &cm->prev_frame->global_motion[frame];
Sarah Parker3e579a62017-08-23 16:53:20 -07004603 int good_params = read_global_motion_params(
David Barkerd7c8bd52017-09-25 14:47:29 +01004604 &cm->global_motion[frame], ref_params, rb, cm->allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07004605 if (!good_params)
4606 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4607 "Invalid shear parameters for global motion.");
4608
4609 // TODO(sarahparker, debargha): The logic in the commented out code below
4610 // does not work currently and causes mismatches when resize is on. Fix it
4611 // before turning the optimization back on.
4612 /*
4613 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
4614 if (cm->width == ref_buf->y_crop_width &&
4615 cm->height == ref_buf->y_crop_height) {
4616 read_global_motion_params(&cm->global_motion[frame],
4617 &cm->prev_frame->global_motion[frame], rb,
4618 cm->allow_high_precision_mv);
4619 } else {
David Barkerd7c8bd52017-09-25 14:47:29 +01004620 cm->global_motion[frame] = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07004621 }
4622 */
4623 /*
4624 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
4625 frame, cm->current_video_frame, cm->show_frame,
4626 cm->global_motion[frame].wmmat[0],
4627 cm->global_motion[frame].wmmat[1],
4628 cm->global_motion[frame].wmmat[2],
4629 cm->global_motion[frame].wmmat[3]);
4630 */
4631 }
David Barkercba7da72017-09-14 11:24:27 +01004632 memcpy(cm->cur_frame->global_motion, cm->global_motion,
4633 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Sarah Parker3e579a62017-08-23 16:53:20 -07004634}
4635#endif // CONFIG_GLOBAL_MOTION
4636
Yaowu Xuf883b422016-08-30 14:01:10 -07004637static size_t read_uncompressed_header(AV1Decoder *pbi,
4638 struct aom_read_bit_buffer *rb) {
4639 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004640 MACROBLOCKD *const xd = &pbi->mb;
4641 BufferPool *const pool = cm->buffer_pool;
4642 RefCntBuffer *const frame_bufs = pool->frame_bufs;
4643 int i, mask, ref_index = 0;
4644 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004645
Yaowu Xuc27fc142016-08-22 16:08:15 -07004646 cm->last_frame_type = cm->frame_type;
4647 cm->last_intra_only = cm->intra_only;
4648
4649#if CONFIG_EXT_REFS
4650 // NOTE: By default all coded frames to be used as a reference
4651 cm->is_reference_frame = 1;
4652#endif // CONFIG_EXT_REFS
4653
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004654#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07004655 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
4656 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004657 "Invalid frame marker");
4658
Yaowu Xuf883b422016-08-30 14:01:10 -07004659 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02004660
4661 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
4662 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
4663
4664 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07004665 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004666 "Unsupported bitstream profile");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004667#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004668
Yunqing Wangc2502b52017-07-19 17:44:18 -07004669#if CONFIG_EXT_TILE
4670 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
4671#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004672 if (cm->large_scale_tile) cm->seq_params.frame_id_numbers_present_flag = 0;
Yunqing Wangc2502b52017-07-19 17:44:18 -07004673#endif // CONFIG_REFERENCE_BUFFER
4674#endif // CONFIG_EXT_TILE
4675
Yaowu Xuf883b422016-08-30 14:01:10 -07004676 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004677
4678 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08004679 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004680 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
4681 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08004682#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004683 if (cm->seq_params.frame_id_numbers_present_flag) {
4684 int frame_id_length = cm->seq_params.frame_id_length_minus7 + 7;
4685 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
4686 /* Compare display_frame_id with ref_frame_id and check valid for
4687 * referencing */
4688 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
4689 cm->valid_for_referencing[existing_frame_idx] == 0)
4690 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4691 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004692 }
4693#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004694 lock_buffer_pool(pool);
4695 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
4696 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07004697 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004698 "Buffer %d does not contain a decoded frame",
4699 frame_to_show);
4700 }
4701 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
4702 unlock_buffer_pool(pool);
4703
Cheng Chen13fc8192017-08-19 11:49:28 -07004704#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07004705 cm->lf.filter_level[0] = 0;
4706 cm->lf.filter_level[1] = 0;
4707#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004708 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07004709#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004710 cm->show_frame = 1;
4711 pbi->refresh_frame_flags = 0;
4712
4713 if (cm->frame_parallel_decode) {
4714 for (i = 0; i < REF_FRAMES; ++i)
4715 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
4716 }
4717
4718 return 0;
4719 }
4720
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004721#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07004722 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004723 cm->show_frame = aom_rb_read_bit(rb);
4724 if (cm->frame_type != KEY_FRAME)
4725 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004726#else
4727 cm->frame_type = (FRAME_TYPE)aom_rb_read_literal(rb, 2); // 2 bits
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004728 cm->show_frame = aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004729 cm->intra_only = cm->frame_type == INTRA_ONLY_FRAME;
4730#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004731 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004732#if CONFIG_REFERENCE_BUFFER
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004733#if !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01004734 if (frame_is_intra_only(cm)) read_sequence_header(&cm->seq_params, rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004735#endif // !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01004736 if (cm->seq_params.frame_id_numbers_present_flag) {
4737 int frame_id_length = cm->seq_params.frame_id_length_minus7 + 7;
4738 int diff_len = cm->seq_params.delta_frame_id_length_minus2 + 2;
4739 int prev_frame_id = 0;
4740 if (cm->frame_type != KEY_FRAME) {
4741 prev_frame_id = cm->current_frame_id;
4742 }
4743 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004744
David Barker5e70a112017-10-03 14:28:17 +01004745 if (cm->frame_type != KEY_FRAME) {
4746 int diff_frame_id;
4747 if (cm->current_frame_id > prev_frame_id) {
4748 diff_frame_id = cm->current_frame_id - prev_frame_id;
4749 } else {
4750 diff_frame_id =
4751 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004752 }
David Barker5e70a112017-10-03 14:28:17 +01004753 /* Check current_frame_id for conformance */
4754 if (prev_frame_id == cm->current_frame_id ||
4755 diff_frame_id >= (1 << (frame_id_length - 1))) {
4756 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4757 "Invalid value of current_frame_id");
4758 }
4759 }
4760 /* Check if some frames need to be marked as not valid for referencing */
4761 for (i = 0; i < REF_FRAMES; i++) {
4762 if (cm->frame_type == KEY_FRAME) {
4763 cm->valid_for_referencing[i] = 0;
4764 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
4765 if (cm->ref_frame_id[i] > cm->current_frame_id ||
4766 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004767 cm->valid_for_referencing[i] = 0;
David Barker5e70a112017-10-03 14:28:17 +01004768 } else {
4769 if (cm->ref_frame_id[i] > cm->current_frame_id &&
4770 cm->ref_frame_id[i] <
4771 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
4772 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004773 }
4774 }
4775 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004776#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07004777 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004778#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004779 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004780#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004781 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
4782
4783 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4784 cm->frame_refs[i].idx = INVALID_IDX;
4785 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004786#if CONFIG_VAR_REFS
4787 cm->frame_refs[i].is_valid = 0;
4788#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004789 }
4790
4791 setup_frame_size(cm, rb);
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07004792 setup_sb_size(cm, rb);
4793
Yaowu Xuc27fc142016-08-22 16:08:15 -07004794 if (pbi->need_resync) {
4795 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4796 pbi->need_resync = 0;
4797 }
Alex Converseeb780e72016-12-13 12:46:41 -08004798#if CONFIG_ANS && ANS_MAX_SYMBOLS
4799 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4800#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07004801 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07004802#if CONFIG_AMVR
4803 if (cm->allow_screen_content_tools) {
4804 if (aom_rb_read_bit(rb)) {
4805 cm->seq_mv_precision_level = 2;
4806 } else {
4807 cm->seq_mv_precision_level = aom_rb_read_bit(rb) ? 0 : 1;
4808 }
4809 } else {
4810 cm->seq_mv_precision_level = 0;
4811 }
4812#endif
Fangwen Fu930c51c2017-05-07 20:39:17 -07004813#if CONFIG_TEMPMV_SIGNALING
4814 cm->use_prev_frame_mvs = 0;
4815#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004816 } else {
hui su24f7b072016-10-12 11:36:24 -07004817 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07004818#if CONFIG_TEMPMV_SIGNALING
4819 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
4820#endif
4821#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
4822// The only way to reset all frame contexts to their default values is with a
4823// keyframe.
4824#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004825 if (cm->error_resilient_mode) {
4826 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
4827 } else {
4828 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004829 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004830 ? RESET_FRAME_CONTEXT_ALL
4831 : RESET_FRAME_CONTEXT_CURRENT;
4832 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004833 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004834 ? RESET_FRAME_CONTEXT_CURRENT
4835 : RESET_FRAME_CONTEXT_NONE;
4836 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07004837 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004838 ? RESET_FRAME_CONTEXT_ALL
4839 : RESET_FRAME_CONTEXT_CURRENT;
4840 }
4841 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07004842#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004843
4844 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004845#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02004846 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004847#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004848
Yaowu Xuf883b422016-08-30 14:01:10 -07004849 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004850 setup_frame_size(cm, rb);
Pavel Frolovea3dd3a2017-09-25 16:06:19 +03004851 setup_sb_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004852 if (pbi->need_resync) {
4853 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
4854 pbi->need_resync = 0;
4855 }
Alex Converseeb780e72016-12-13 12:46:41 -08004856#if CONFIG_ANS && ANS_MAX_SYMBOLS
4857 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
4858#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004859 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004860#if CONFIG_OBU
4861 pbi->refresh_frame_flags = (cm->frame_type == S_FRAME)
4862 ? ~(1 << REF_FRAMES)
4863 : aom_rb_read_literal(rb, REF_FRAMES);
4864#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004865 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004866#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004867
4868#if CONFIG_EXT_REFS
4869 if (!pbi->refresh_frame_flags) {
4870 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
4871 // will not be used as a reference
4872 cm->is_reference_frame = 0;
4873 }
4874#endif // CONFIG_EXT_REFS
4875
4876 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004877 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004878 const int idx = cm->ref_frame_map[ref];
Rupert Swarbrick5eb471c2017-10-02 16:06:54 +01004879
4880 // Most of the time, streams start with a keyframe. In that case,
4881 // ref_frame_map will have been filled in at that point and will not
4882 // contain any -1's. However, streams are explicitly allowed to start
4883 // with an intra-only frame, so long as they don't then signal a
4884 // reference to a slot that hasn't been set yet. That's what we are
4885 // checking here.
4886 if (idx == -1)
4887 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4888 "Inter frame requests nonexistent reference");
4889
Yaowu Xuc27fc142016-08-22 16:08:15 -07004890 RefBuffer *const ref_frame = &cm->frame_refs[i];
4891 ref_frame->idx = idx;
4892 ref_frame->buf = &frame_bufs[idx].buf;
Zoe Liu17af2742017-10-06 10:36:42 -07004893#if CONFIG_FRAME_SIGN_BIAS
4894#if CONFIG_OBU
4895 // NOTE: For the scenario of (cm->frame_type != S_FRAME),
4896 // ref_frame_sign_bias will be reset based on frame offsets.
4897 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
4898#endif // CONFIG_OBU
4899#else // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004900#if CONFIG_OBU
4901 cm->ref_frame_sign_bias[LAST_FRAME + i] =
4902 (cm->frame_type == S_FRAME) ? 0 : aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07004903#else // !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07004904 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07004905#endif // CONFIG_OBU
4906#endif // CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004907#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004908 if (cm->seq_params.frame_id_numbers_present_flag) {
4909 int frame_id_length = cm->seq_params.frame_id_length_minus7 + 7;
4910 int diff_len = cm->seq_params.delta_frame_id_length_minus2 + 2;
4911 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
4912 int ref_frame_id =
4913 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
4914 (1 << frame_id_length)) %
4915 (1 << frame_id_length));
4916 /* Compare values derived from delta_frame_id_minus1 and
4917 * refresh_frame_flags. Also, check valid for referencing */
4918 if (ref_frame_id != cm->ref_frame_id[ref] ||
4919 cm->valid_for_referencing[ref] == 0)
4920 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4921 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004922 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004923#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07004924 }
4925
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004926#if CONFIG_VAR_REFS
4927 check_valid_ref_frames(cm);
4928#endif // CONFIG_VAR_REFS
4929
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004930#if CONFIG_FRAME_SIZE
4931 if (cm->error_resilient_mode == 0) {
4932 setup_frame_size_with_refs(cm, rb);
4933 } else {
4934 setup_frame_size(cm, rb);
4935 }
4936#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004937 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004938#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004939
RogerZhou3b635242017-09-19 10:06:46 -07004940#if CONFIG_AMVR
4941 if (cm->seq_mv_precision_level == 2) {
4942 cm->cur_frame_mv_precision_level = aom_rb_read_bit(rb) ? 0 : 1;
4943 } else {
4944 cm->cur_frame_mv_precision_level = cm->seq_mv_precision_level;
4945 }
4946#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004947 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Angie Chiang5678ad92016-11-21 09:38:40 -08004948 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08004949#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01004950 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08004951 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01004952 else
4953 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08004954#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004955 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
4956 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02004957#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07004958 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004959 &ref_buf->sf, ref_buf->buf->y_crop_width,
4960 ref_buf->buf->y_crop_height, cm->width, cm->height,
4961 cm->use_highbitdepth);
4962#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004963 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004964 &ref_buf->sf, ref_buf->buf->y_crop_width,
4965 ref_buf->buf->y_crop_height, cm->width, cm->height);
4966#endif
4967 }
4968 }
4969 }
Jingning Hanc723b342017-08-24 11:19:46 -07004970
Jingning Hanea255c92017-09-29 08:12:09 -07004971#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07004972 if (cm->show_frame == 0) {
4973 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
4974 } else {
4975 cm->frame_offset = cm->current_video_frame;
4976 }
Zoe Liu17af2742017-10-06 10:36:42 -07004977 av1_setup_frame_buf_refs(cm);
4978
4979#if CONFIG_FRAME_SIGN_BIAS
4980#if CONFIG_OBU
4981 if (cm->frame_type != S_FRAME)
4982#endif // CONFIG_OBU
4983 av1_setup_frame_sign_bias(cm);
4984#define FRAME_SIGN_BIAS_DEBUG 0
4985#if FRAME_SIGN_BIAS_DEBUG
4986 {
4987 printf("\n\nDECODER: Frame=%d, show_frame=%d:", cm->current_video_frame,
4988 cm->show_frame);
4989 MV_REFERENCE_FRAME ref_frame;
4990 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4991 printf(" sign_bias[%d]=%d", ref_frame,
4992 cm->ref_frame_sign_bias[ref_frame]);
4993 }
4994 printf("\n");
4995 }
4996#endif // FRAME_SIGN_BIAS_DEBUG
4997#undef FRAME_SIGN_BIAS_DEBUG
4998#endif // CONFIG_FRAME_SIGN_BIAS
4999#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07005000
Fangwen Fu8d164de2016-12-14 13:40:54 -08005001#if CONFIG_TEMPMV_SIGNALING
5002 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
5003#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005004
5005#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01005006 if (cm->seq_params.frame_id_numbers_present_flag) {
5007 /* If bitmask is set, update reference frame id values and
5008 mark frames as valid for reference */
5009 int refresh_frame_flags =
5010 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
5011 for (i = 0; i < REF_FRAMES; i++) {
5012 if ((refresh_frame_flags >> i) & 1) {
5013 cm->ref_frame_id[i] = cm->current_frame_id;
5014 cm->valid_for_referencing[i] = 1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005015 }
5016 }
5017 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07005018#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005019
Yaowu Xuc27fc142016-08-22 16:08:15 -07005020 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005021 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07005022#if CONFIG_COLORSPACE_HEADERS
5023 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
5024 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
5025#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005026 get_frame_new_buffer(cm)->color_range = cm->color_range;
5027 get_frame_new_buffer(cm)->render_width = cm->render_width;
5028 get_frame_new_buffer(cm)->render_height = cm->render_height;
5029
5030 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005031 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005032 "Keyframe / intra-only frame required to reset decoder"
5033 " state");
5034 }
5035
5036 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005037 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005038 ? REFRESH_FRAME_CONTEXT_FORWARD
5039 : REFRESH_FRAME_CONTEXT_BACKWARD;
5040 } else {
5041 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
5042 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07005043#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07005044 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07005045 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07005046 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07005047#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005048
5049 // Generate next_ref_frame_map.
5050 lock_buffer_pool(pool);
5051 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
5052 if (mask & 1) {
5053 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
5054 ++frame_bufs[cm->new_fb_idx].ref_count;
5055 } else {
5056 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
5057 }
5058 // Current thread holds the reference frame.
5059 if (cm->ref_frame_map[ref_index] >= 0)
5060 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
5061 ++ref_index;
5062 }
5063
5064 for (; ref_index < REF_FRAMES; ++ref_index) {
5065 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
5066
5067 // Current thread holds the reference frame.
5068 if (cm->ref_frame_map[ref_index] >= 0)
5069 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
5070 }
5071 unlock_buffer_pool(pool);
5072 pbi->hold_ref_buf = 1;
5073
5074 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07005075 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005076
Yaowu Xuc27fc142016-08-22 16:08:15 -07005077 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005078 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005079 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005080
hui su0d103572017-03-01 17:58:01 -08005081#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07005082 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005083 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
5084 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
5085 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
5086 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07005087#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5088 if (cm->frame_refs[0].idx <= 0) {
5089 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
5090 }
5091#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07005092 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07005093#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07005094 }
hui su0d103572017-03-01 17:58:01 -08005095#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005096
5097 setup_segmentation(cm, rb);
5098
Arild Fuldseth07441162016-08-15 15:07:52 +02005099 {
5100 struct segmentation *const seg = &cm->seg;
5101 int segment_quantizer_active = 0;
5102 for (i = 0; i < MAX_SEGMENTS; i++) {
5103 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
5104 segment_quantizer_active = 1;
5105 }
5106 }
5107
Thomas Daviesf6936102016-09-05 16:51:31 +01005108 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07005109#if CONFIG_EXT_DELTA_Q
5110 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01005111 cm->delta_lf_present_flag = 0;
Cheng Chen880166a2017-10-02 17:48:48 -07005112#if CONFIG_LOOPFILTER_LEVEL
5113 cm->delta_lf_multi = 0;
5114#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07005115#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01005116 if (segment_quantizer_active == 0 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02005117 cm->delta_q_present_flag = aom_rb_read_bit(rb);
5118 } else {
5119 cm->delta_q_present_flag = 0;
5120 }
5121 if (cm->delta_q_present_flag) {
5122 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01005123 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07005124#if CONFIG_EXT_DELTA_Q
Frederic Barbier237c53e2017-06-20 16:57:27 +02005125 assert(!segment_quantizer_active);
Fangwen Fu231fe422017-04-24 17:52:29 -07005126 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
5127 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07005128 xd->prev_delta_lf_from_base = 0;
5129 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07005130#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07005131 cm->delta_lf_multi = aom_rb_read_bit(rb);
Cheng Chena97394f2017-09-27 15:05:14 -07005132 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
5133 xd->prev_delta_lf[lf_id] = 0;
5134#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07005135 }
5136#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02005137 }
5138 }
RogerZhou3b635242017-09-19 10:06:46 -07005139#if CONFIG_AMVR
5140 xd->cur_frame_mv_precision_level = cm->cur_frame_mv_precision_level;
5141#endif
Thomas Davies3ab20b42017-09-19 10:30:53 +01005142
Urvang Joshi454280d2016-10-14 16:51:44 -07005143 for (i = 0; i < MAX_SEGMENTS; ++i) {
5144 const int qindex = cm->seg.enabled
5145 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
5146 : cm->base_qindex;
5147 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
5148 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
5149 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005150 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07005151 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005152 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07005153#if CONFIG_CDEF
5154 if (!cm->all_lossless) {
5155 setup_cdef(cm, rb);
5156 }
5157#endif
5158#if CONFIG_LOOP_RESTORATION
5159 decode_restoration_mode(cm, rb);
5160#endif // CONFIG_LOOP_RESTORATION
5161 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005162 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee6f3c8982017-09-22 21:14:01 -07005163 if (cm->reference_mode != SINGLE_REFERENCE) setup_compound_reference_mode(cm);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07005164 read_compound_tools(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005165
Sarah Parkere68a3e42017-02-16 14:03:24 -08005166#if CONFIG_EXT_TX
5167 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
5168#endif // CONFIG_EXT_TX
5169
Angie Chiang6dbffbf2017-10-06 16:59:54 -07005170#if CONFIG_ADAPT_SCAN
5171 cm->use_adapt_scan = aom_rb_read_bit(rb);
5172 // TODO(angiebird): call av1_init_scan_order only when use_adapt_scan
5173 // switches from 1 to 0
5174 if (cm->use_adapt_scan == 0) av1_init_scan_order(cm);
5175#endif // CONFIG_ADAPT_SCAN
5176
Pavel Frolov57c36e12017-09-12 15:00:40 +03005177#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5178 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
5179 // show_exisiting_frame=1, nor can it take a frame not used as
5180 // a reference, it is probable that by the time it is being
5181 // referred to, the frame buffer it originally points to may
5182 // already get expired and have been reassigned to the current
5183 // newly coded frame. Hence, we need to check whether this is
5184 // the case, and if yes, we have 2 choices:
5185 // (1) Simply disable the use of previous frame mvs; or
5186 // (2) Have cm->prev_frame point to one reference frame buffer,
5187 // e.g. LAST_FRAME.
5188 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
5189 // Reassign the LAST_FRAME buffer to cm->prev_frame.
5190 cm->prev_frame =
5191 cm->frame_refs[LAST_FRAME - LAST_FRAME].idx != INVALID_IDX
5192 ? &cm->buffer_pool
5193 ->frame_bufs[cm->frame_refs[LAST_FRAME - LAST_FRAME].idx]
5194 : NULL;
5195 }
5196#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5197
5198#if CONFIG_TEMPMV_SIGNALING
5199 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
5200 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5201 "Frame wrongly requests previous frame MVs");
5202 }
5203#else
5204 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
5205#if CONFIG_FRAME_SUPERRES
5206 cm->width == cm->last_width &&
5207 cm->height == cm->last_height &&
5208#else
5209 cm->width == cm->prev_frame->buf.y_crop_width &&
5210 cm->height == cm->prev_frame->buf.y_crop_height &&
5211#endif // CONFIG_FRAME_SUPERRES
5212 !cm->last_intra_only && cm->last_show_frame &&
5213 (cm->last_frame_type != KEY_FRAME);
5214#endif // CONFIG_TEMPMV_SIGNALING
5215
Sarah Parker3e579a62017-08-23 16:53:20 -07005216#if CONFIG_GLOBAL_MOTION
Sarah Parkerf289f9f2017-09-12 18:50:02 -07005217 if (!frame_is_intra_only(cm)) read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07005218#endif
5219
Yaowu Xuc27fc142016-08-22 16:08:15 -07005220 read_tile_info(pbi, rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07005221 if (use_compressed_header(cm)) {
5222 sz = aom_rb_read_literal(rb, 16);
5223 if (sz == 0)
5224 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5225 "Invalid header size");
5226 } else {
5227 sz = 0;
5228 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005229 return sz;
5230}
5231
Yaowu Xuc27fc142016-08-22 16:08:15 -07005232#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07005233static void read_supertx_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005234 int i, j;
Michael Bebenita6048d052016-08-25 14:40:54 -07005235 if (aom_read(r, GROUP_DIFF_UPDATE_PROB, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005236 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Hanfeb517c2016-12-21 16:02:07 -08005237 for (j = TX_8X8; j < TX_SIZES; ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -07005238 av1_diff_update_prob(r, &fc->supertx_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005239 }
5240 }
5241 }
5242}
5243#endif // CONFIG_SUPERTX
5244
Yaowu Xuf883b422016-08-30 14:01:10 -07005245static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005246 size_t partition_size) {
Thomas Daviese7154832017-10-03 10:12:17 +01005247#if CONFIG_RESTRICT_COMPRESSED_HDR
5248 (void)pbi;
5249 (void)data;
5250 (void)partition_size;
5251 return 0;
5252#else
Yaowu Xuf883b422016-08-30 14:01:10 -07005253 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005254#if CONFIG_SUPERTX
5255 MACROBLOCKD *const xd = &pbi->mb;
5256#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07005257 aom_reader r;
Thomas Davies599395e2017-07-21 18:02:48 +01005258#if !CONFIG_NEW_MULTISYMBOL
5259 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies61e3e372017-04-04 16:10:23 +01005260 int i;
Thomas Davies599395e2017-07-21 18:02:48 +01005261#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005262
Alex Converse2cdf0d82016-12-13 13:53:09 -08005263#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08005264 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08005265#endif
Alex Converse346440b2017-01-03 13:47:37 -08005266 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
5267 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07005268 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005269 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005270
Yue Chend6bdd462017-07-19 16:05:43 -07005271#if CONFIG_RECT_TX_EXT && (CONFIG_EXT_TX || CONFIG_VAR_TX)
Yue Chen56e226e2017-05-02 16:21:40 -07005272 if (cm->tx_mode == TX_MODE_SELECT)
5273 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07005274#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005275
Jonathan Matthews12d127e2017-09-29 15:10:26 +01005276#if CONFIG_LV_MAP && !LV_MAP_PROB
Angie Chiang85e3b962017-10-01 16:04:43 -07005277 av1_read_txb_probs(fc, cm->tx_mode, &r, &cm->counts);
Jonathan Matthews12d127e2017-09-29 15:10:26 +01005278#endif // CONFIG_LV_MAP && !LV_MAP_PROB
Angie Chiang800df032017-03-22 11:14:12 -07005279
Thomas Davies985bfc32017-06-27 16:51:26 +01005280#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005281#if CONFIG_VAR_TX
David Barker16c64e32017-08-23 16:54:59 +01005282 if (cm->tx_mode == TX_MODE_SELECT)
5283 for (i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
5284 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Yushin Cho77bba8d2016-11-04 16:36:56 -07005285#endif // CONFIG_VAR_TX
Thomas Davies61e3e372017-04-04 16:10:23 +01005286 for (i = 0; i < SKIP_CONTEXTS; ++i)
5287 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
5288#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005289
Debargha Mukherjee801cc922017-09-22 17:22:50 -07005290 if (!frame_is_intra_only(cm)) {
Thomas Davies149eda52017-06-12 18:11:55 +01005291#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005292 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01005293#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005294
Yue Chen4d26acb2017-05-01 12:28:34 -07005295#if CONFIG_INTERINTRA
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07005296 if (cm->reference_mode != COMPOUND_REFERENCE &&
5297 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01005298#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005299 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
5300 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07005301 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005302 }
5303 }
Thomas Daviescff91712017-07-07 11:49:55 +01005304#endif
Thomas Daviescff91712017-07-07 11:49:55 +01005305#if CONFIG_WEDGE && !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01005306#if CONFIG_EXT_PARTITION_TYPES
5307 int block_sizes_to_update = BLOCK_SIZES_ALL;
5308#else
5309 int block_sizes_to_update = BLOCK_SIZES;
5310#endif
5311 for (i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005312 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07005313 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005314 }
5315 }
Debargha Mukherjeeed057992017-05-07 05:15:06 -07005316#endif // CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005317 }
Yue Chen4d26acb2017-05-01 12:28:34 -07005318#endif // CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07005319
Thomas Daviesf6ad9352017-04-19 11:38:06 +01005320#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005321 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07005322 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01005323#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005324
David Barker037ee412017-09-19 12:43:46 +01005325#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07005326 read_frame_reference_mode_probs(cm, &r);
David Barker037ee412017-09-19 12:43:46 +01005327#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005328
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02005329#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07005330 for (i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
5331 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02005332#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07005333
Thomas Davies599395e2017-07-21 18:02:48 +01005334#if !CONFIG_NEW_MULTISYMBOL
RogerZhou3b635242017-09-19 10:06:46 -07005335#if CONFIG_AMVR
5336 if (cm->cur_frame_mv_precision_level == 0) {
5337#endif
5338 for (i = 0; i < NMV_CONTEXTS; ++i)
5339 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
5340#if CONFIG_AMVR
5341 }
5342#endif
Thomas Davies599395e2017-07-21 18:02:48 +01005343#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005344#if CONFIG_SUPERTX
5345 if (!xd->lossless[0]) read_supertx_probs(fc, &r);
5346#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005347 }
5348
Yaowu Xuf883b422016-08-30 14:01:10 -07005349 return aom_reader_has_error(&r);
Thomas Daviese7154832017-10-03 10:12:17 +01005350#endif // CONFIG_RESTRICT_COMPRESSED_HDR
Yaowu Xuc27fc142016-08-22 16:08:15 -07005351}
Debargha Mukherjee2eada612017-09-22 15:37:39 -07005352
Yaowu Xuc27fc142016-08-22 16:08:15 -07005353#ifdef NDEBUG
5354#define debug_check_frame_counts(cm) (void)0
5355#else // !NDEBUG
5356// Counts should only be incremented when frame_parallel_decoding_mode and
5357// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07005358static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005359 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07005360 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005361 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
5362 cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005363 assert(!memcmp(cm->counts.partition, zero_counts.partition,
5364 sizeof(cm->counts.partition)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005365 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
5366 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005367 assert(!memcmp(cm->counts.inter_compound_mode,
5368 zero_counts.inter_compound_mode,
5369 sizeof(cm->counts.inter_compound_mode)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005370#if CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07005371 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
5372 sizeof(cm->counts.interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005373#if CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005374 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
5375 sizeof(cm->counts.wedge_interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07005376#endif // CONFIG_WEDGE
5377#endif // CONFIG_INTERINTRA
Sarah Parker6fddd182016-11-10 20:57:20 -08005378 assert(!memcmp(cm->counts.compound_interinter,
5379 zero_counts.compound_interinter,
5380 sizeof(cm->counts.compound_interinter)));
Yue Chencb60b182016-10-13 15:18:22 -07005381#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
5382 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
5383 sizeof(cm->counts.motion_mode)));
5384#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07005385#if CONFIG_NCOBMC_ADAPT_WEIGHT && CONFIG_MOTION_VAR
5386 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
5387 sizeof(cm->counts.ncobmc_mode)));
5388#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005389 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
5390 sizeof(cm->counts.intra_inter)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02005391#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07005392 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
5393 sizeof(cm->counts.comp_inter_mode)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02005394#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07005395 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
5396 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07005397#if CONFIG_EXT_COMP_REFS
5398 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
5399 sizeof(cm->counts.comp_ref_type)));
5400 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
5401 sizeof(cm->counts.uni_comp_ref)));
5402#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005403 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
5404 sizeof(cm->counts.single_ref)));
5405 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
5406 sizeof(cm->counts.comp_ref)));
5407#if CONFIG_EXT_REFS
5408 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
5409 sizeof(cm->counts.comp_bwdref)));
5410#endif // CONFIG_EXT_REFS
5411 assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
5412 sizeof(cm->counts.tx_size)));
5413 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005414 assert(
5415 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
5416 assert(
5417 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005418}
5419#endif // NDEBUG
5420
Yaowu Xuf883b422016-08-30 14:01:10 -07005421static struct aom_read_bit_buffer *init_read_bit_buffer(
5422 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
5423 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005424 rb->bit_offset = 0;
5425 rb->error_handler = error_handler;
5426 rb->error_handler_data = &pbi->common;
5427 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005428 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005429 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
5430 rb->bit_buffer = clear_data;
5431 rb->bit_buffer_end = clear_data + n;
5432 } else {
5433 rb->bit_buffer = data;
5434 rb->bit_buffer_end = data_end;
5435 }
5436 return rb;
5437}
5438
5439//------------------------------------------------------------------------------
5440
Yaowu Xuf883b422016-08-30 14:01:10 -07005441void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
5442 int *height) {
5443 *width = aom_rb_read_literal(rb, 16) + 1;
5444 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005445}
5446
Yaowu Xuf883b422016-08-30 14:01:10 -07005447BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
5448 int profile = aom_rb_read_bit(rb);
5449 profile |= aom_rb_read_bit(rb) << 1;
5450 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005451 return (BITSTREAM_PROFILE)profile;
5452}
5453
Yaowu Xu4ff59b52017-04-24 12:41:56 -07005454static void make_update_tile_list_dec(AV1Decoder *pbi, int tile_rows,
5455 int tile_cols, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00005456 int i;
5457 for (i = 0; i < tile_rows * tile_cols; ++i)
5458 ec_ctxs[i] = &pbi->tile_data[i].tctx;
5459}
Thomas Davies028b57f2017-02-22 16:42:11 +00005460
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07005461#if CONFIG_FRAME_SUPERRES
5462void superres_post_decode(AV1Decoder *pbi) {
5463 AV1_COMMON *const cm = &pbi->common;
5464 BufferPool *const pool = cm->buffer_pool;
5465
5466 if (av1_superres_unscaled(cm)) return;
5467
5468 lock_buffer_pool(pool);
5469 av1_superres_upscale(cm, pool);
5470 unlock_buffer_pool(pool);
5471}
5472#endif // CONFIG_FRAME_SUPERRES
5473
Yi Luo10e23002017-07-31 11:54:43 -07005474static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01005475// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
5476// boundary information every frame, since the tile boundaries may
5477// change every frame (particularly when dependent-horztiles is also
5478// enabled); when it is disabled, the only information stored is the frame
5479// boundaries, which only depend on the frame size.
5480#if !CONFIG_LOOPFILTERING_ACROSS_TILES
5481 if (cm->width != cm->last_width || cm->height != cm->last_height)
5482#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
5483 {
Yi Luo10e23002017-07-31 11:54:43 -07005484 int row, col;
5485 for (row = 0; row < cm->mi_rows; ++row) {
5486 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
5487 for (col = 0; col < cm->mi_cols; ++col) {
5488 mi->mbmi.boundary_info = 0;
5489 mi++;
5490 }
5491 }
5492 av1_setup_frame_boundary_info(cm);
5493 }
5494}
5495
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005496size_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi, const uint8_t *data,
5497 const uint8_t *data_end,
5498 const uint8_t **p_data_end) {
5499 AV1_COMMON *const cm = &pbi->common;
5500 MACROBLOCKD *const xd = &pbi->mb;
5501 struct aom_read_bit_buffer rb;
5502 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
5503 size_t first_partition_size;
5504 YV12_BUFFER_CONFIG *new_fb;
5505#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5506 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
5507#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5508
5509#if CONFIG_ADAPT_SCAN
5510 av1_deliver_eob_threshold(cm, xd);
5511#endif
5512#if CONFIG_BITSTREAM_DEBUG
5513 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
5514#endif
5515
5516#if CONFIG_GLOBAL_MOTION
5517 int i;
5518 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
David Barkerd7c8bd52017-09-25 14:47:29 +01005519 cm->global_motion[i] = default_warp_params;
5520 cm->cur_frame->global_motion[i] = default_warp_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005521 }
5522 xd->global_motion = cm->global_motion;
5523#endif // CONFIG_GLOBAL_MOTION
5524
5525 first_partition_size = read_uncompressed_header(
5526 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
5527
5528#if CONFIG_EXT_TILE
5529 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
5530 // or a section of a frame is not allowed.
5531 if (!cm->single_tile_decoding &&
5532 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
5533 pbi->dec_tile_row = -1;
5534 pbi->dec_tile_col = -1;
5535 }
5536#endif // CONFIG_EXT_TILE
5537
5538 pbi->first_partition_size = first_partition_size;
5539 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
5540 new_fb = get_frame_new_buffer(cm);
5541 xd->cur_buf = new_fb;
5542#if CONFIG_INTRABC
5543#if CONFIG_HIGHBITDEPTH
5544 av1_setup_scale_factors_for_frame(
5545 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5546 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5547 cm->use_highbitdepth);
5548#else
5549 av1_setup_scale_factors_for_frame(
5550 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
5551 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
5552#endif // CONFIG_HIGHBITDEPTH
5553#endif // CONFIG_INTRABC
5554
Debargha Mukherjee2eada612017-09-22 15:37:39 -07005555 if (cm->show_existing_frame) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005556 // showing a frame directly
5557 *p_data_end = data + aom_rb_bytes_read(&rb);
5558 return 0;
5559 }
5560
5561 data += aom_rb_bytes_read(&rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07005562 if (first_partition_size)
5563 if (!read_is_valid(data, first_partition_size, data_end))
5564 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5565 "Truncated packet or corrupt header length");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005566
5567 cm->setup_mi(cm);
5568
5569#if CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5570 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
5571 // show_exisiting_frame=1, nor can it take a frame not used as
5572 // a reference, it is probable that by the time it is being
5573 // referred to, the frame buffer it originally points to may
5574 // already get expired and have been reassigned to the current
5575 // newly coded frame. Hence, we need to check whether this is
5576 // the case, and if yes, we have 2 choices:
5577 // (1) Simply disable the use of previous frame mvs; or
5578 // (2) Have cm->prev_frame point to one reference frame buffer,
5579 // e.g. LAST_FRAME.
5580 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
5581 // Reassign the LAST_FRAME buffer to cm->prev_frame.
5582 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
5583 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
5584 : NULL;
5585 }
5586#endif // CONFIG_EXT_REFS || CONFIG_TEMPMV_SIGNALING
5587
5588#if CONFIG_TEMPMV_SIGNALING
5589 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
5590 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5591 "Frame wrongly requests previous frame MVs");
5592 }
5593#else
5594 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
5595#if CONFIG_FRAME_SUPERRES
5596 cm->width == cm->last_width &&
5597 cm->height == cm->last_height &&
5598#else
5599 cm->width == cm->prev_frame->buf.y_crop_width &&
5600 cm->height == cm->prev_frame->buf.y_crop_height &&
5601#endif // CONFIG_FRAME_SUPERRES
5602 !cm->last_intra_only && cm->last_show_frame &&
5603 (cm->last_frame_type != KEY_FRAME);
5604#endif // CONFIG_TEMPMV_SIGNALING
5605
Jingning Hanea255c92017-09-29 08:12:09 -07005606#if CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005607 av1_setup_motion_field(cm);
Jingning Hanea255c92017-09-29 08:12:09 -07005608#endif // CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005609
5610 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
5611#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5612 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
5613 // use the default frame context values
5614 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
5615 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
5616 } else {
5617 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
5618 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
5619 }
5620#else
5621 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
5622 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
5623#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
5624 if (!cm->fc->initialized)
5625 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5626 "Uninitialized entropy context.");
5627
5628 av1_zero(cm->counts);
5629
5630 xd->corrupted = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07005631 if (first_partition_size) {
5632 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
5633 if (new_fb->corrupted)
5634 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5635 "Decode failed. Frame data header is corrupted.");
5636 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005637 return first_partition_size;
5638}
5639
5640void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
5641 const uint8_t *data_end,
5642 const uint8_t **p_data_end, int startTile,
5643 int endTile, int initialize_flag) {
5644 AV1_COMMON *const cm = &pbi->common;
5645 MACROBLOCKD *const xd = &pbi->mb;
5646 int context_updated = 0;
5647
5648#if CONFIG_LOOP_RESTORATION
5649 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5650 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5651 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5652 av1_alloc_restoration_buffers(cm);
5653 }
5654#endif
5655
Cheng Chend8184da2017-09-26 18:15:22 -07005656#if !CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005657 if (cm->lf.filter_level && !cm->skip_loop_filter) {
5658 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
5659 }
5660#endif
5661
5662 // If encoded in frame parallel mode, frame context is ready after decoding
5663 // the frame header.
5664 if (cm->frame_parallel_decode && initialize_flag &&
5665 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
5666 AVxWorker *const worker = pbi->frame_worker_owner;
5667 FrameWorkerData *const frame_worker_data = worker->data1;
5668 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
5669 context_updated = 1;
5670#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5671 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
5672#else
5673 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
5674#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
5675 }
5676 av1_frameworker_lock_stats(worker);
5677 pbi->cur_buf->row = -1;
5678 pbi->cur_buf->col = -1;
5679 frame_worker_data->frame_context_ready = 1;
5680 // Signal the main thread that context is ready.
5681 av1_frameworker_signal_stats(worker);
5682 av1_frameworker_unlock_stats(worker);
5683 }
5684
5685 dec_setup_frame_boundary_info(cm);
5686
5687 if (pbi->max_threads > 1 && !CONFIG_CB4X4 &&
5688#if CONFIG_EXT_TILE
5689 pbi->dec_tile_col < 0 && // Decoding all columns
5690#endif // CONFIG_EXT_TILE
5691 cm->tile_cols > 1) {
5692 // Multi-threaded tile decoder
5693 *p_data_end =
5694 decode_tiles_mt(pbi, data + pbi->first_partition_size, data_end);
5695 if (!xd->corrupted) {
5696 if (!cm->skip_loop_filter) {
5697// If multiple threads are used to decode tiles, then we use those
5698// threads to do parallel loopfiltering.
5699#if CONFIG_LOOPFILTER_LEVEL
5700 av1_loop_filter_frame_mt(
5701 (YV12_BUFFER_CONFIG *)xd->cur_buf, cm, pbi->mb.plane,
5702 cm->lf.filter_level[0], cm->lf.filter_level[1], 0, 0,
5703 pbi->tile_workers, pbi->num_tile_workers, &pbi->lf_row_sync);
5704#else
5705 av1_loop_filter_frame_mt((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
5706 pbi->mb.plane, cm->lf.filter_level, 0, 0,
5707 pbi->tile_workers, pbi->num_tile_workers,
5708 &pbi->lf_row_sync);
5709#endif // CONFIG_LOOPFILTER_LEVEL
5710 }
5711 } else {
5712 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5713 "Decode failed. Frame data is corrupted.");
5714 }
5715 } else {
5716#if CONFIG_OBU
5717 *p_data_end = decode_tiles(pbi, data, data_end, startTile, endTile);
5718#else
5719 *p_data_end = decode_tiles(
5720 pbi, data + pbi->uncomp_hdr_size + pbi->first_partition_size, data_end,
5721 startTile, endTile);
5722#endif
5723 }
5724
5725 if (endTile != cm->tile_rows * cm->tile_cols - 1) {
5726 return;
5727 }
5728
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02005729#if CONFIG_STRIPED_LOOP_RESTORATION
5730 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5731 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5732 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5733 av1_loop_restoration_save_boundary_lines(&pbi->cur_buf->buf, cm);
5734 }
5735#endif
5736
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005737#if CONFIG_CDEF
5738 if (!cm->skip_loop_filter && !cm->all_lossless) {
5739 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
5740 }
5741#endif // CONFIG_CDEF
5742
5743#if CONFIG_FRAME_SUPERRES
5744 superres_post_decode(pbi);
5745#endif // CONFIG_FRAME_SUPERRES
5746
5747#if CONFIG_LOOP_RESTORATION
5748 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
5749 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
5750 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
5751 aom_extend_frame_borders((YV12_BUFFER_CONFIG *)xd->cur_buf);
5752 av1_loop_restoration_frame((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
5753 cm->rst_info, 7, 0, NULL);
5754 }
5755#endif // CONFIG_LOOP_RESTORATION
5756
5757 if (!xd->corrupted) {
5758 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
5759 FRAME_CONTEXT **tile_ctxs = aom_malloc(cm->tile_rows * cm->tile_cols *
5760 sizeof(&pbi->tile_data[0].tctx));
5761 aom_cdf_prob **cdf_ptrs =
5762 aom_malloc(cm->tile_rows * cm->tile_cols *
5763 sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
5764 make_update_tile_list_dec(pbi, cm->tile_rows, cm->tile_cols, tile_ctxs);
5765#if CONFIG_LV_MAP
5766 av1_adapt_coef_probs(cm);
5767#endif // CONFIG_LV_MAP
Angie Chiang85e3b962017-10-01 16:04:43 -07005768#if CONFIG_SYMBOLRATE
5769 av1_dump_symbol_rate(cm);
5770#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005771 av1_adapt_intra_frame_probs(cm);
5772 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
5773 cm->tile_rows * cm->tile_cols);
5774 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
5775 cm->tile_rows * cm->tile_cols);
5776#if CONFIG_PVQ
5777 av1_average_tile_pvq_cdfs(pbi->common.fc, tile_ctxs,
5778 cm->tile_rows * cm->tile_cols);
5779#endif // CONFIG_PVQ
5780#if CONFIG_ADAPT_SCAN
5781 av1_adapt_scan_order(cm);
5782#endif // CONFIG_ADAPT_SCAN
5783
5784 if (!frame_is_intra_only(cm)) {
5785 av1_adapt_inter_frame_probs(cm);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01005786#if !CONFIG_NEW_MULTISYMBOL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005787 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01005788#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005789 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
5790 cdf_ptrs, cm->tile_rows * cm->tile_cols);
5791 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
5792 cm->tile_rows * cm->tile_cols);
5793 }
5794 aom_free(tile_ctxs);
5795 aom_free(cdf_ptrs);
5796 } else {
5797 debug_check_frame_counts(cm);
5798 }
5799 } else {
5800 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
5801 "Decode failed. Frame data is corrupted.");
5802 }
5803
5804#if CONFIG_INSPECTION
5805 if (pbi->inspect_cb != NULL) {
5806 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
5807 }
5808#endif
5809
5810// Non frame parallel update frame context here.
5811#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
5812 if (!context_updated) cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
5813#else
5814 if (!cm->error_resilient_mode && !context_updated)
5815 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
5816#endif
5817}
5818
5819#if CONFIG_OBU
5820
5821static OBU_TYPE read_obu_header(struct aom_read_bit_buffer *rb,
5822 uint32_t *header_size) {
5823 OBU_TYPE obu_type;
5824 int obu_extension_flag;
5825
5826 *header_size = 1;
5827
5828 obu_type = (OBU_TYPE)aom_rb_read_literal(rb, 5);
5829 aom_rb_read_literal(rb, 2); // reserved
5830 obu_extension_flag = aom_rb_read_bit(rb);
5831 if (obu_extension_flag) {
5832 *header_size += 1;
5833 aom_rb_read_literal(rb, 3); // temporal_id
5834 aom_rb_read_literal(rb, 2);
5835 aom_rb_read_literal(rb, 2);
5836 aom_rb_read_literal(rb, 1); // reserved
5837 }
5838
5839 return obu_type;
5840}
5841
5842static uint32_t read_temporal_delimiter_obu() { return 0; }
5843
5844static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
5845 struct aom_read_bit_buffer *rb) {
5846 AV1_COMMON *const cm = &pbi->common;
David Barker5e70a112017-10-03 14:28:17 +01005847 SequenceHeader *const seq_params = &cm->seq_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04005848 uint32_t saved_bit_offset = rb->bit_offset;
5849
5850 cm->profile = av1_read_profile(rb);
5851 aom_rb_read_literal(rb, 4); // level
5852
5853 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
5854 if (seq_params->frame_id_numbers_present_flag) {
5855 seq_params->frame_id_length_minus7 = aom_rb_read_literal(rb, 4);
5856 seq_params->delta_frame_id_length_minus2 = aom_rb_read_literal(rb, 4);
5857 }
5858
5859 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
5860
5861 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
5862}
5863
5864static uint32_t read_frame_header_obu(AV1Decoder *pbi, const uint8_t *data,
5865 const uint8_t *data_end,
5866 const uint8_t **p_data_end) {
5867 size_t header_size;
5868
5869 header_size =
5870 av1_decode_frame_headers_and_setup(pbi, data, data_end, p_data_end);
5871 return (uint32_t)(pbi->uncomp_hdr_size + header_size);
5872}
5873
5874static uint32_t read_tile_group_header(AV1Decoder *pbi,
5875 struct aom_read_bit_buffer *rb,
5876 int *startTile, int *endTile) {
5877 AV1_COMMON *const cm = &pbi->common;
5878 uint32_t saved_bit_offset = rb->bit_offset;
5879
5880 *startTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
5881 *endTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
5882
5883 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
5884}
5885
5886static uint32_t read_one_tile_group_obu(AV1Decoder *pbi,
5887 struct aom_read_bit_buffer *rb,
5888 int is_first_tg, const uint8_t *data,
5889 const uint8_t *data_end,
5890 const uint8_t **p_data_end,
5891 int *is_last_tg) {
5892 AV1_COMMON *const cm = &pbi->common;
5893 int startTile, endTile;
5894 uint32_t header_size, tg_payload_size;
5895
5896 header_size = read_tile_group_header(pbi, rb, &startTile, &endTile);
5897 data += header_size;
5898 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, startTile,
5899 endTile, is_first_tg);
5900 tg_payload_size = (uint32_t)(*p_data_end - data);
5901
5902 // TODO(shan): For now, assume all tile groups received in order
5903 *is_last_tg = endTile == cm->tile_rows * cm->tile_cols - 1;
5904
5905 return header_size + tg_payload_size;
5906}
5907
5908void av1_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
5909 const uint8_t *data_end,
5910 const uint8_t **p_data_end) {
5911 AV1_COMMON *const cm = &pbi->common;
5912 int frame_decoding_finished = 0;
5913 int is_first_tg_obu_received = 1;
5914 int frame_header_received = 0;
5915 int frame_header_size = 0;
5916
5917 // decode frame as a series of OBUs
5918 while (!frame_decoding_finished && !cm->error.error_code) {
5919 struct aom_read_bit_buffer rb;
5920 uint8_t clear_data[80];
5921 uint32_t obu_size, obu_header_size, obu_payload_size = 0;
5922 OBU_TYPE obu_type;
5923
5924 init_read_bit_buffer(pbi, &rb, data + 4, data_end, clear_data);
5925
5926 // every obu is preceded by 4-byte size of obu (obu header + payload size)
5927 // The obu size is only needed for tile group OBUs
5928 obu_size = mem_get_le32(data);
5929 obu_type = read_obu_header(&rb, &obu_header_size);
5930 data += (4 + obu_header_size);
5931
5932 switch (obu_type) {
5933 case OBU_TD: obu_payload_size = read_temporal_delimiter_obu(); break;
5934 case OBU_SEQUENCE_HEADER:
5935 obu_payload_size = read_sequence_header_obu(pbi, &rb);
5936 break;
5937 case OBU_FRAME_HEADER:
5938 // Only decode first frame header received
5939 if (!frame_header_received) {
5940 frame_header_size = obu_payload_size =
5941 read_frame_header_obu(pbi, data, data_end, p_data_end);
5942 frame_header_received = 1;
5943 } else {
5944 obu_payload_size = frame_header_size;
5945 }
5946 if (cm->show_existing_frame) frame_decoding_finished = 1;
5947 break;
5948 case OBU_TILE_GROUP:
5949 obu_payload_size = read_one_tile_group_obu(
5950 pbi, &rb, is_first_tg_obu_received, data, data + obu_size - 1,
5951 p_data_end, &frame_decoding_finished);
5952 is_first_tg_obu_received = 0;
5953 break;
5954 default: break;
5955 }
5956 data += obu_payload_size;
5957 }
5958}
5959#endif