blob: ccbc04d120ecdb639b709bc387d645ed2d9ba7ee [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070022#include "aom_dsp/binary_codes_reader.h"
Jingning Han1aab8182016-06-03 11:09:06 -070023#include "aom_dsp/bitreader.h"
24#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
Angie Chiang85e3b962017-10-01 16:04:43 -070067#include "av1/decoder/symbolrate.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Yue Chen69f18e12016-09-08 14:48:15 -070069#include "av1/common/warped_motion.h"
Yue Chen69f18e12016-09-08 14:48:15 -070070
Yaowu Xuf883b422016-08-30 14:01:10 -070071#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070072#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070073
Luc Trudeaue3980282017-04-25 23:17:21 -040074#if CONFIG_CFL
75#include "av1/common/cfl.h"
76#endif
77
Ola Hugosson1e7f2d02017-09-22 21:36:26 +020078#if CONFIG_STRIPED_LOOP_RESTORATION && !CONFIG_LOOP_RESTORATION
79#error "striped_loop_restoration requires loop_restoration"
80#endif
81
Rupert Swarbrick6c545212017-09-01 17:17:25 +010082#if CONFIG_LOOP_RESTORATION
83static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
84 MACROBLOCKD *xd,
85 aom_reader *const r, int plane,
86 int rtile_idx);
87#endif
88
Thomas Davies80188d12016-10-26 16:08:35 -070089static struct aom_read_bit_buffer *init_read_bit_buffer(
90 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
91 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
92static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
93 size_t partition_size);
94static size_t read_uncompressed_header(AV1Decoder *pbi,
95 struct aom_read_bit_buffer *rb);
96
Yaowu Xuf883b422016-08-30 14:01:10 -070097static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -070098#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +020099 return !frame_is_intra_only(cm);
100#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101 int i;
102 if (frame_is_intra_only(cm)) return 0;
103 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
104 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
105
106 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700107#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108}
109
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111 cm->comp_fwd_ref[0] = LAST_FRAME;
112 cm->comp_fwd_ref[1] = LAST2_FRAME;
113 cm->comp_fwd_ref[2] = LAST3_FRAME;
114 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
115
116 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700117 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
118 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119}
120
121static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
122 return len != 0 && len <= (size_t)(end - start);
123}
124
Yaowu Xuf883b422016-08-30 14:01:10 -0700125static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
126 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700127 return data > max ? max : data;
128}
129
Thomas Daedef636d5c2017-06-29 13:48:27 -0700130static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800131#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800132 TX_MODE tx_mode;
133#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700134 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400135#if CONFIG_VAR_TX_NO_TX_MODE
136 (void)rb;
137 return TX_MODE_SELECT;
138#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800139#if CONFIG_TX64X64
140 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800141 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
142 return tx_mode;
143#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800145#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400146#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147}
148
Thomas Davies2e868ab2017-10-24 10:42:27 +0100149#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700150static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700151 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700153 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700154 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700155 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700157 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700159 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160}
Thomas Davies149eda52017-06-12 18:11:55 +0100161#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800166#if CONFIG_REF_ADAPT
167 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
168#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700169 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800172#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700173 } else {
174 return SINGLE_REFERENCE;
175 }
176}
177
Thomas Davies2e868ab2017-10-24 10:42:27 +0100178#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700179static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100181 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182
183 if (cm->reference_mode == REFERENCE_MODE_SELECT)
184 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700185 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700186
187 if (cm->reference_mode != COMPOUND_REFERENCE) {
188 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100189 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700191 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192 }
193 }
194 }
195
196 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700197#if CONFIG_EXT_COMP_REFS
198 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
199 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
200
Thomas Davies894cc812017-06-22 17:51:33 +0100201 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
202 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700203 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
204 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100205 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700206#endif // CONFIG_EXT_COMP_REFS
207
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100209 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700211 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700213 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 }
215 }
216}
217
Yaowu Xuf883b422016-08-30 14:01:10 -0700218static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700220 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221}
222
Yaowu Xuf883b422016-08-30 14:01:10 -0700223static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100224 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 if (allow_hp) {
226 for (i = 0; i < 2; ++i) {
227 nmv_component *const comp_ctx = &ctx->comps[i];
228 update_mv_probs(&comp_ctx->class0_hp, 1, r);
229 update_mv_probs(&comp_ctx->hp, 1, r);
230 }
231 }
232}
Thomas Davies599395e2017-07-21 18:02:48 +0100233#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700234
235static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu432012f2017-08-17 14:39:29 -0700236#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700237 PREDICTION_MODE mode,
238#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239 const TX_TYPE tx_type,
240 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700241 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700242 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700243 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700244 av1_inverse_transform_block(xd, dqcoeff,
Lester Lu432012f2017-08-17 14:39:29 -0700245#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700246 mode,
247#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700248#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
249 xd->mrc_mask,
250#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Sarah Parker90024e42017-10-06 16:50:47 -0700251#if CONFIG_EXT_TX
252 plane,
253#endif // CONFIG_EXT_TX
Lester Lu708c1ec2017-06-14 14:54:49 -0700254 tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700255 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700256}
257
Lester Lu9c521922017-07-11 11:16:45 -0700258static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
259 const int bsize = xd->mi[0]->mbmi.sb_type;
260 const struct macroblockd_plane *pd = &xd->plane[plane];
Lester Lu9c521922017-07-11 11:16:45 -0700261 const BLOCK_SIZE plane_bsize =
262 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Lester Lu9c521922017-07-11 11:16:45 -0700263 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700264 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700265 const uint8_t txh_unit = tx_size_high_unit[tx_size];
266 return row * max_blocks_wide + col * txh_unit;
267}
268
Alex Converse8aca36d2017-01-31 12:33:15 -0800269static void predict_and_reconstruct_intra_block(
270 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
271 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500272 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700273 const int block_idx = get_block_idx(xd, plane, row, col);
David Barker761b1ac2017-09-25 11:23:03 +0100274 av1_predict_intra_block_facade(cm, xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275
276 if (!mbmi->skip) {
Angie Chiang3d005e42017-04-02 16:31:35 -0700277 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700278#if CONFIG_LV_MAP
279 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700280 int eob;
281 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700282 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700283 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700284 const TX_TYPE tx_type =
285 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700286#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700287 const TX_TYPE tx_type =
288 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700289 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700290 int16_t max_scan_line = 0;
291 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700292 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700293 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700294#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700295 if (eob) {
296 uint8_t *dst =
297 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700298 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700299#if CONFIG_LGT_FROM_PRED
Lester Lu918fe692017-08-17 14:39:29 -0700300 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700301#endif
Hui Su400bf652017-08-15 15:42:19 -0700302 tx_type, tx_size, dst, pd->dst.stride,
303 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700304 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700305 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400306#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400307 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400308 cfl_store_tx(xd, row, col, tx_size, mbmi->sb_type);
Luc Trudeaue3980282017-04-25 23:17:21 -0400309 }
Sebastien Alaiwanc4559ca2017-09-27 09:47:30 +0200310#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700311}
312
Angie Chiangff6d8902016-10-21 11:02:09 -0700313static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
314 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700315 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700316 int blk_row, int blk_col, int block,
317 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 const struct macroblockd_plane *const pd = &xd->plane[plane];
319 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
320 const int tx_row = blk_row >> (1 - pd->subsampling_y);
321 const int tx_col = blk_col >> (1 - pd->subsampling_x);
322 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700323 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700324 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700325 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700326 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
327 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700328
329 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
330
331 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500332 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700333#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700334 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700335 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700336 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700337 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700338 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700339 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700340 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700341#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700342 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700343 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700344 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700345 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700346 const int eob = av1_decode_block_tokens(
347 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
348 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700349#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700350 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700351#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700352 mbmi->mode,
353#endif
354 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800355 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
356 << tx_size_wide_log2[0]],
357 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700358 *eob_total += eob;
359 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700360#if CONFIG_RECT_TX_EXT
361 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
362 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
363 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
364#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700365 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Urvang Joshidff57e02017-09-29 11:15:48 -0700366 assert(IMPLIES(tx_size <= TX_4X4, sub_txs == tx_size));
367 assert(IMPLIES(tx_size > TX_4X4, sub_txs < tx_size));
Yue Chend6bdd462017-07-19 16:05:43 -0700368#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700369 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700370 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700371 int i;
372
373 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700374
375 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700376#if CONFIG_RECT_TX_EXT
377 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
378 const int offsetr =
379 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
380 : blk_row + ((i >> 1) * bsl);
381 const int offsetc =
382 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
383 : blk_col + (i & 0x01) * bsl;
384#else
Jingning Han5f614262016-10-27 14:27:43 -0700385 const int offsetr = blk_row + (i >> 1) * bsl;
386 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700387#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700388
389 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
390
Jingning Han8fd62b72016-10-21 12:55:54 -0700391 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700392 offsetc, block, sub_txs, eob_total);
393 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 }
395 }
396}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397
Angie Chiang44701f22017-02-27 10:36:44 -0800398static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
399 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
400 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700401 const int offset = mi_row * cm->mi_stride + mi_col;
402 int x, y;
403 const TileInfo *const tile = &xd->tile;
404
405 xd->mi = cm->mi_grid_visible + offset;
406 xd->mi[0] = &cm->mi[offset];
407 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
408 // passing bsize from decode_partition().
409 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700410#if CONFIG_RD_DEBUG
411 xd->mi[0]->mbmi.mi_row = mi_row;
412 xd->mi[0]->mbmi.mi_col = mi_col;
413#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400414#if CONFIG_CFL
415 xd->cfl->mi_row = mi_row;
416 xd->cfl->mi_col = mi_col;
417#endif
Yunqing Wangb90a97a2017-10-24 11:50:15 -0700418
419 assert(x_mis && y_mis);
420 for (x = 1; x < x_mis; ++x) xd->mi[x] = xd->mi[0];
421 int idx = cm->mi_stride;
422 for (y = 1; y < y_mis; ++y) {
423 memcpy(&xd->mi[idx], &xd->mi[0], x_mis * sizeof(xd->mi[0]));
424 idx += cm->mi_stride;
425 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700426
Jingning Hanfaad0e12016-12-07 10:54:57 -0800427 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700428 set_skip_context(xd, mi_row, mi_col);
429
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700430 // Distance of Mb to the various image edges. These are specified to 8th pel
431 // as they are always compared to values that are in 1/8th pel units
432 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800433#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700434 cm->dependent_horz_tiles,
435#endif // CONFIG_DEPENDENT_HORZTILES
436 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437
Jingning Han91d9a792017-04-18 12:01:52 -0700438 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
439 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700440}
441
Yue Chen64550b62017-01-12 12:18:22 -0800442static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800443 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700444#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800445 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700446#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800447 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700448 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -0800449 const int bw = mi_size_wide[bsize];
450 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700451 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
452 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -0400453
Michael Bebenita6048d052016-08-25 14:40:54 -0700454#if CONFIG_ACCOUNTING
455 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
456#endif
Yue Chen64550b62017-01-12 12:18:22 -0800457 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700458#if CONFIG_EXT_PARTITION_TYPES
459 xd->mi[0]->mbmi.partition = partition;
460#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700461 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700462 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
463 const BLOCK_SIZE uv_subsize =
464 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
465 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -0700466 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 "Invalid block size.");
468 }
469
Angie Chiangd0916d92017-03-10 17:54:18 -0800470 int reader_corrupted_flag = aom_reader_has_error(r);
471 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -0800472}
473
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700474#if CONFIG_NCOBMC_ADAPT_WEIGHT
475static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
476 int mi_row, int mi_col) {
477 const int offset = mi_row * cm->mi_stride + mi_col;
478 xd->mi = cm->mi_grid_visible + offset;
479 xd->mi[0] = &cm->mi[offset];
480}
481
482static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
483 int mi_col, int bsize, int mode) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700484 uint8_t *pred_buf[4][MAX_MB_PLANE];
485 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
486 // target block in pxl
487 int pxl_row = mi_row << MI_SIZE_LOG2;
488 int pxl_col = mi_col << MI_SIZE_LOG2;
489
490 int plane;
491#if CONFIG_HIGHBITDEPTH
492 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
493 int len = sizeof(uint16_t);
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700494 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE,
495 len);
496 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE,
497 len);
498 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE,
499 len);
500 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE,
501 len);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700502 } else {
503#endif // CONFIG_HIGHBITDEPTH
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700504 ASSIGN_ALIGNED_PTRS(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE);
505 ASSIGN_ALIGNED_PTRS(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE);
506 ASSIGN_ALIGNED_PTRS(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE);
507 ASSIGN_ALIGNED_PTRS(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700508#if CONFIG_HIGHBITDEPTH
509 }
510#endif
511 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
512 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
513 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
514 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
515 pred_stride, mode);
516 }
517}
518
519static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
520 int bsize, const int mi_row, const int mi_col,
521 const NCOBMC_MODE modes) {
522 const int mi_width = mi_size_wide[bsize];
523 const int mi_height = mi_size_high[bsize];
524
525 assert(bsize >= BLOCK_8X8);
526
527 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
528 cm->mi_cols);
529 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
530}
531
532static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
533 MACROBLOCKD *const xd, int mi_row,
534 int mi_col, BLOCK_SIZE bsize) {
535 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
536 const int mi_width = mi_size_wide[bsize];
537 const int mi_height = mi_size_high[bsize];
538 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
539 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
540 if (mi_width > mi_height) {
541 // horizontal partition
542 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
543 xd->mi += hbs;
544 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
545 mbmi->ncobmc_mode[1]);
546 } else if (mi_height > mi_width) {
547 // vertical partition
548 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
549 xd->mi += hbs * xd->mi_stride;
550 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
551 mbmi->ncobmc_mode[1]);
552 } else {
553 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
554 }
555 set_mode_info_offsets(cm, xd, mi_row, mi_col);
556 // restore dst buffer and mode info
557 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
558 mi_col);
559}
560#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
561
Yue Chen64550b62017-01-12 12:18:22 -0800562static void decode_token_and_recon_block(AV1Decoder *const pbi,
563 MACROBLOCKD *const xd, int mi_row,
564 int mi_col, aom_reader *r,
565 BLOCK_SIZE bsize) {
566 AV1_COMMON *const cm = &pbi->common;
567 const int bw = mi_size_wide[bsize];
568 const int bh = mi_size_high[bsize];
569 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
570 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -0800571
Angie Chiang44701f22017-02-27 10:36:44 -0800572 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
573 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Hui Su9fa96232017-10-23 15:46:04 -0700574#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400575 CFL_CTX *const cfl = xd->cfl;
576 cfl->is_chroma_reference = is_chroma_reference(
577 mi_row, mi_col, bsize, cfl->subsampling_x, cfl->subsampling_y);
Hui Su9fa96232017-10-23 15:46:04 -0700578#endif // CONFIG_CFL
Yue Chen19e7aa82016-11-30 14:05:39 -0800579
Arild Fuldseth07441162016-08-15 15:07:52 +0200580 if (cm->delta_q_present_flag) {
581 int i;
582 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -0700583#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -0700584 const int current_qindex =
585 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -0700586#else
Cheng Chen49d30e62017-08-28 20:59:27 -0700587 const int current_qindex = xd->current_qindex;
588#endif // CONFIG_EXT_DELTA_Q
589 int j;
590 for (j = 0; j < MAX_MB_PLANE; ++j) {
591 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
592 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
593
594 xd->plane[j].seg_dequant[i][0] =
595 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
596 xd->plane[j].seg_dequant[i][1] =
597 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
598 }
Arild Fuldseth07441162016-08-15 15:07:52 +0200599 }
600 }
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -0700601 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Hand39cc722016-12-02 14:03:26 -0800602
Yaowu Xuc27fc142016-08-22 16:08:15 -0700603 if (!is_inter_block(mbmi)) {
604 int plane;
Yushin Choa8810392017-09-06 15:16:14 -0700605
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606 for (plane = 0; plane <= 1; ++plane) {
607 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -0700608 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700609 }
Yushin Choa8810392017-09-06 15:16:14 -0700610
Yaowu Xuc27fc142016-08-22 16:08:15 -0700611 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
612 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -0700613 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -0700614 const int stepr = tx_size_high_unit[tx_size];
615 const int stepc = tx_size_wide_unit[tx_size];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800616 const BLOCK_SIZE plane_bsize =
617 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700618 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -0800619 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
620 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hand3a64432017-04-06 17:04:17 -0700621 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
622 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800623 continue;
Jingning Han5b701742017-07-19 14:39:07 -0700624 int blk_row, blk_col;
625 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
626 int mu_blocks_wide =
627 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
628 int mu_blocks_high =
629 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
630 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
631 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800632
Jingning Han5b701742017-07-19 14:39:07 -0700633 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -0400634 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -0700635 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -0700636 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
637
638 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
639 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
640 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
641 blk_row, blk_col, tx_size);
642 }
643 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700644 }
645 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -0800646 int ref;
647
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200648#if CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700649 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
650#else
651 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200652#endif // CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700653 {
Zoe Liu85b66462017-04-20 14:28:19 -0700654 const MV_REFERENCE_FRAME frame =
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200655#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700656 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
657#else
Yushin Cho127c5832017-07-28 16:39:04 -0700658 mbmi->ref_frame[ref];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200659#endif // CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -0700660 if (frame < LAST_FRAME) {
661#if CONFIG_INTRABC
662 assert(is_intrabc_block(mbmi));
663 assert(frame == INTRA_FRAME);
664 assert(ref == 0);
665#else
666 assert(0);
667#endif // CONFIG_INTRABC
668 } else {
669 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -0800670
Alex Converse28744302017-04-13 14:46:22 -0700671 xd->block_refs[ref] = ref_buf;
672 if ((!av1_is_valid_scale(&ref_buf->sf)))
673 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
674 "Reference frame has invalid dimensions");
675 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
676 &ref_buf->sf);
677 }
Yue Chen9ab6d712017-01-12 15:50:46 -0800678 }
Yue Chen69f18e12016-09-08 14:48:15 -0700679
Jingning Hanc44009c2017-05-06 11:36:49 -0700680 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Sarah Parker4c10a3c2017-04-10 19:37:59 -0700681
Yue Chencb60b182016-10-13 15:18:22 -0700682 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -0800683#if CONFIG_NCOBMC
684 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
685#else
Yue Chen894fcce2016-10-21 16:50:52 -0700686 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -0800687#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700688 }
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700689#if CONFIG_NCOBMC_ADAPT_WEIGHT
690 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
691 int plane;
692 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
693 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
694 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
695 }
696 }
697#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700698 // Reconstruction
699 if (!mbmi->skip) {
700 int eobtotal = 0;
701 int plane;
702
703 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
704 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800705 const BLOCK_SIZE plane_bsize =
706 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Hanbafee8d2016-12-02 10:25:03 -0800707 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
708 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700709 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800710
Jingning Hand3a64432017-04-06 17:04:17 -0700711 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
712 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800713 continue;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800714
Jingning Hanc2b797f2017-07-19 09:37:11 -0700715 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
716 int mu_blocks_wide =
717 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
718 int mu_blocks_high =
719 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
720
721 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
722 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
723
Rupert Swarbrick4e7b7d62017-09-28 17:30:44 +0100724 const TX_SIZE max_tx_size = get_vartx_max_txsize(
725 mbmi, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hanf64062f2016-11-02 16:22:18 -0700726 const int bh_var_tx = tx_size_high_unit[max_tx_size];
727 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -0700728 int block = 0;
729 int step =
730 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -0700731
732 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
733 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
734 int blk_row, blk_col;
735 const int unit_height =
736 AOMMIN(mu_blocks_high + row, max_blocks_high);
737 const int unit_width =
738 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
739 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
740 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
741 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
742 blk_row, blk_col, block, max_tx_size,
743 &eobtotal);
744 block += step;
745 }
746 }
Jingning Hana65f3052017-06-23 10:52:05 -0700747 }
748 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700750 }
751 }
Hui Su9fa96232017-10-23 15:46:04 -0700752#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400753 if (mbmi->uv_mode != UV_CFL_PRED) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400754 if (!cfl->is_chroma_reference && is_inter_block(mbmi)) {
755 cfl_store_block(xd, mbmi->sb_type, mbmi->tx_size);
756 }
757 }
Hui Su9fa96232017-10-23 15:46:04 -0700758#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759
Angie Chiangd0916d92017-03-10 17:54:18 -0800760 int reader_corrupted_flag = aom_reader_has_error(r);
761 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762}
763
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100764#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -0800765static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
766 int mi_row, int mi_col, aom_reader *r,
767 BLOCK_SIZE bsize) {
768 AV1_COMMON *const cm = &pbi->common;
769 const int hbs = mi_size_wide[bsize] >> 1;
Yue Chen9ab6d712017-01-12 15:50:46 -0800770#if CONFIG_EXT_PARTITION_TYPES
771 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
772#endif
773 PARTITION_TYPE partition;
774 BLOCK_SIZE subsize;
775 const int has_rows = (mi_row + hbs) < cm->mi_rows;
776 const int has_cols = (mi_col + hbs) < cm->mi_cols;
777
778 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
779
780 partition = get_partition(cm, mi_row, mi_col, bsize);
781 subsize = subsize_lookup[partition][bsize];
782
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700783 switch (partition) {
784 case PARTITION_NONE:
785 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
786 break;
787 case PARTITION_HORZ:
788 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
789 if (has_rows)
790 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
791 break;
792 case PARTITION_VERT:
793 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
794 if (has_cols)
795 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
796 break;
797 case PARTITION_SPLIT:
798 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
799 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
800 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
801 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
802 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800803#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100804#if CONFIG_EXT_PARTITION_TYPES_AB
805#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
806#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700807 case PARTITION_HORZ_A:
808 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
809 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
810 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
811 break;
812 case PARTITION_HORZ_B:
813 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
814 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
815 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
816 bsize2);
817 break;
818 case PARTITION_VERT_A:
819 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
820 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
821 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
822 break;
823 case PARTITION_VERT_B:
824 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
825 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
826 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
827 bsize2);
828 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800829#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700830 default: assert(0 && "Invalid partition type");
Yue Chen9ab6d712017-01-12 15:50:46 -0800831 }
832}
833#endif
834
Yue Chen64550b62017-01-12 12:18:22 -0800835static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800836 int mi_row, int mi_col, aom_reader *r,
837#if CONFIG_EXT_PARTITION_TYPES
838 PARTITION_TYPE partition,
839#endif // CONFIG_EXT_PARTITION_TYPES
840 BLOCK_SIZE bsize) {
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200841 decode_mbmi_block(pbi, xd, mi_row, mi_col, r,
Yue Chen64550b62017-01-12 12:18:22 -0800842#if CONFIG_EXT_PARTITION_TYPES
843 partition,
844#endif
845 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -0700846
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100847#if !(NC_MODE_INFO)
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200848 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -0800849#endif
Yue Chen64550b62017-01-12 12:18:22 -0800850}
851
Yaowu Xuf883b422016-08-30 14:01:10 -0700852static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
853 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700854 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -0800855 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -0800856#if CONFIG_UNPOISON_PARTITION_CTX
857 const int ctx =
858 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800859#else
Jingning Han1beb0102016-12-07 11:08:30 -0800860 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800861#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +0000863 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
864 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700865
Jingning Han5fe79db2017-03-27 15:10:30 -0700866 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -0700867
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100868 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100870 const int num_partition_types =
871 (mi_width_log2_lookup[bsize] > mi_width_log2_lookup[BLOCK_8X8])
872 ? EXT_PARTITION_TYPES
873 : PARTITION_TYPES;
Alex Converse57795a42017-03-14 12:18:25 -0700874#else
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100875 const int num_partition_types = PARTITION_TYPES;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100877 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, num_partition_types,
878 ACCT_STR);
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100879 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700880 assert(bsize > BLOCK_8X8);
881 aom_cdf_prob cdf[2];
882 partition_gather_vert_alike(cdf, partition_cdf);
883 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
884 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
885 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100886 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700887 assert(bsize > BLOCK_8X8);
888 aom_cdf_prob cdf[2];
889 partition_gather_horz_alike(cdf, partition_cdf);
890 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
891 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100892 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100894 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700895
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896 return p;
897}
898
Yaowu Xuc27fc142016-08-22 16:08:15 -0700899// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -0700900static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700901 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -0700902 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700903 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -0800904 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700905 const int hbs = num_8x8_wh >> 1;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100906#if CONFIG_EXT_PARTITION_TYPES && CONFIG_EXT_PARTITION_TYPES_AB
907 const int qbs = num_8x8_wh >> 2;
908#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700909 PARTITION_TYPE partition;
910 BLOCK_SIZE subsize;
911#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick93c39e92017-07-12 11:11:02 +0100912 const int quarter_step = num_8x8_wh / 4;
913 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100914#if !CONFIG_EXT_PARTITION_TYPES_AB
915 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
916#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700917#endif
918 const int has_rows = (mi_row + hbs) < cm->mi_rows;
919 const int has_cols = (mi_col + hbs) < cm->mi_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700920
921 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
922
Jingning Hancd959762017-03-27 14:49:59 -0700923 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
924 : read_partition(cm, xd, mi_row, mi_col, r,
925 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700926 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700927
Rupert Swarbrick415c8f12017-10-09 16:26:23 +0100928 // Check the bitstream is conformant: if there is subsampling on the
929 // chroma planes, subsize must subsample to a valid block size.
930 const struct macroblockd_plane *const pd_u = &xd->plane[1];
931 if (get_plane_block_size(subsize, pd_u) == BLOCK_INVALID) {
932 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
933 "Block size %dx%d invalid with this subsampling mode",
934 block_size_wide[subsize], block_size_high[subsize]);
935 }
936
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100937#define DEC_BLOCK_STX_ARG
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100938#if CONFIG_EXT_PARTITION_TYPES
939#define DEC_BLOCK_EPT_ARG partition,
940#else
941#define DEC_BLOCK_EPT_ARG
942#endif
943#define DEC_BLOCK(db_r, db_c, db_subsize) \
944 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
945 DEC_BLOCK_EPT_ARG(db_subsize))
946#define DEC_PARTITION(db_r, db_c, db_subsize) \
947 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
948
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700949 switch (partition) {
950 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
951 case PARTITION_HORZ:
952 DEC_BLOCK(mi_row, mi_col, subsize);
953 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
954 break;
955 case PARTITION_VERT:
956 DEC_BLOCK(mi_row, mi_col, subsize);
957 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
958 break;
959 case PARTITION_SPLIT:
960 DEC_PARTITION(mi_row, mi_col, subsize);
961 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
962 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
963 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
964 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700965#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100966#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700967 case PARTITION_HORZ_A:
968 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
969 DEC_BLOCK(mi_row + qbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
970 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
971 break;
972 case PARTITION_HORZ_B:
973 DEC_BLOCK(mi_row, mi_col, subsize);
974 DEC_BLOCK(mi_row + hbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
975 if (mi_row + 3 * qbs < cm->mi_rows)
976 DEC_BLOCK(mi_row + 3 * qbs, mi_col,
977 get_subsize(bsize, PARTITION_HORZ_4));
978 break;
979 case PARTITION_VERT_A:
980 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_VERT_4));
981 DEC_BLOCK(mi_row, mi_col + qbs, get_subsize(bsize, PARTITION_VERT_4));
982 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
983 break;
984 case PARTITION_VERT_B:
985 DEC_BLOCK(mi_row, mi_col, subsize);
986 DEC_BLOCK(mi_row, mi_col + hbs, get_subsize(bsize, PARTITION_VERT_4));
987 if (mi_col + 3 * qbs < cm->mi_cols)
988 DEC_BLOCK(mi_row, mi_col + 3 * qbs,
989 get_subsize(bsize, PARTITION_VERT_4));
990 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100991#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700992 case PARTITION_HORZ_A:
993 DEC_BLOCK(mi_row, mi_col, bsize2);
994 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
995 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
996 break;
997 case PARTITION_HORZ_B:
998 DEC_BLOCK(mi_row, mi_col, subsize);
999 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1000 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1001 break;
1002 case PARTITION_VERT_A:
1003 DEC_BLOCK(mi_row, mi_col, bsize2);
1004 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1005 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1006 break;
1007 case PARTITION_VERT_B:
1008 DEC_BLOCK(mi_row, mi_col, subsize);
1009 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
1010 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1011 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001012#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001013 case PARTITION_HORZ_4:
1014 for (i = 0; i < 4; ++i) {
1015 int this_mi_row = mi_row + i * quarter_step;
1016 if (i > 0 && this_mi_row >= cm->mi_rows) break;
1017 DEC_BLOCK(this_mi_row, mi_col, subsize);
1018 }
1019 break;
1020 case PARTITION_VERT_4:
1021 for (i = 0; i < 4; ++i) {
1022 int this_mi_col = mi_col + i * quarter_step;
1023 if (i > 0 && this_mi_col >= cm->mi_cols) break;
1024 DEC_BLOCK(mi_row, this_mi_col, subsize);
1025 }
1026 break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001027#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001028 default: assert(0 && "Invalid partition type");
Yaowu Xuc27fc142016-08-22 16:08:15 -07001029 }
1030
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001031#undef DEC_PARTITION
1032#undef DEC_BLOCK
1033#undef DEC_BLOCK_EPT_ARG
1034#undef DEC_BLOCK_STX_ARG
1035
Yaowu Xuc27fc142016-08-22 16:08:15 -07001036#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07001037 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001038#else
1039 // update partition context
1040 if (bsize >= BLOCK_8X8 &&
1041 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08001042 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01001043#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07001044
Cheng Chenf572cd32017-08-25 18:34:51 -07001045#if CONFIG_LPF_SB
1046 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001047 int filt_lvl;
1048 if (mi_row == 0 && mi_col == 0) {
1049 filt_lvl = aom_read_literal(r, 6, ACCT_STR);
Cheng Chen41d37c22017-09-08 19:00:21 -07001050 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
1051 cm->mi_grid_visible[0]->mbmi.delta = 0;
1052 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07001053 } else {
1054 int prev_mi_row, prev_mi_col;
1055 if (mi_col - MAX_MIB_SIZE < 0) {
1056 prev_mi_row = mi_row - MAX_MIB_SIZE;
1057 prev_mi_col = mi_col;
1058 } else {
1059 prev_mi_row = mi_row;
1060 prev_mi_col = mi_col - MAX_MIB_SIZE;
1061 }
Cheng Chenc7855b12017-09-05 10:49:08 -07001062
Cheng Chen41d37c22017-09-08 19:00:21 -07001063 MB_MODE_INFO *curr_mbmi =
1064 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
1065 MB_MODE_INFO *prev_mbmi =
1066 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
1067 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07001068
Cheng Chen41d37c22017-09-08 19:00:21 -07001069 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
1070 const int reuse_prev_lvl = aom_read_symbol(
1071 r, xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2, ACCT_STR);
1072 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
1073
Cheng Chenc7855b12017-09-05 10:49:08 -07001074 if (reuse_prev_lvl) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001075 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001076 curr_mbmi->delta = 0;
1077 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001078 } else {
Cheng Chen41d37c22017-09-08 19:00:21 -07001079 const int delta_ctx = prev_mbmi->delta;
1080 unsigned int delta = aom_read_symbol(
1081 r, xd->tile_ctx->lpf_delta_cdf[delta_ctx], DELTA_RANGE, ACCT_STR);
1082 curr_mbmi->delta = delta;
Cheng Chenf89ca3e2017-09-07 14:47:47 -07001083 delta *= LPF_STEP;
Cheng Chenc7855b12017-09-05 10:49:08 -07001084
1085 if (delta) {
Cheng Chen41d37c22017-09-08 19:00:21 -07001086 const int sign_ctx = prev_mbmi->sign;
1087 const int sign = aom_read_symbol(
1088 r, xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2, ACCT_STR);
1089 curr_mbmi->sign = sign;
Cheng Chenc7855b12017-09-05 10:49:08 -07001090 filt_lvl = sign ? prev_lvl + delta : prev_lvl - delta;
1091 } else {
1092 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001093 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001094 }
Cheng Chena4b27de2017-08-31 16:05:19 -07001095 }
1096 }
Cheng Chen5589d712017-09-05 12:03:25 -07001097
1098 av1_loop_filter_sb_level_init(cm, mi_row, mi_col, filt_lvl);
Cheng Chenf572cd32017-08-25 18:34:51 -07001099 }
1100#endif
1101
Jean-Marc Valin01435132017-02-18 14:12:53 -05001102#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07001103 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07001104 int width_step = mi_size_wide[BLOCK_64X64];
1105 int height_step = mi_size_wide[BLOCK_64X64];
1106 int w, h;
1107 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
1108 h += height_step) {
1109 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
1110 w += width_step) {
1111 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
1112 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1113 ->mbmi.cdef_strength =
1114 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
1115 else
1116 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1117 ->mbmi.cdef_strength = -1;
1118 }
Yaowu Xud71be782016-10-14 08:47:03 -07001119 }
1120 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05001121#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001122#if CONFIG_LOOP_RESTORATION
1123 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001124 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001125 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1126 &rcol0, &rcol1, &rrow0, &rrow1,
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001127 &tile_tl_idx)) {
1128 const int rstride = cm->rst_info[plane].horz_units_per_tile;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001129 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1130 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001131 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001132 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
1133 }
1134 }
1135 }
1136 }
1137#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001138}
1139
Yaowu Xuc27fc142016-08-22 16:08:15 -07001140static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
1141 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07001142 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08001143 aom_reader *r,
1144#if CONFIG_ANS && ANS_MAX_SYMBOLS
1145 int window_size,
1146#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
1147 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001148 // Validate the calculated partition length. If the buffer
1149 // described by the partition can't be fully read, then restrict
1150 // it to the portion that can be (for EC mode) or throw an error.
1151 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001152 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001153 "Truncated packet or corrupt tile length");
1154
Alex Converse2cdf0d82016-12-13 13:53:09 -08001155#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08001156 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08001157#endif
Alex Converse346440b2017-01-03 13:47:37 -08001158 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07001159 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160 "Failed to allocate bool decoder %d", 1);
1161}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001162
Yaowu Xuf883b422016-08-30 14:01:10 -07001163static void setup_segmentation(AV1_COMMON *const cm,
1164 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165 struct segmentation *const seg = &cm->seg;
1166 int i, j;
1167
1168 seg->update_map = 0;
1169 seg->update_data = 0;
Ryandd8df162017-09-27 15:40:13 -07001170 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001171
Yaowu Xuf883b422016-08-30 14:01:10 -07001172 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001173 if (!seg->enabled) return;
1174
1175 // Segmentation map update
1176 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1177 seg->update_map = 1;
1178 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001179 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001180 }
1181 if (seg->update_map) {
1182 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1183 seg->temporal_update = 0;
1184 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001185 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001186 }
1187 }
1188
1189 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07001190 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001192 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001193
Yaowu Xuf883b422016-08-30 14:01:10 -07001194 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001195
1196 for (i = 0; i < MAX_SEGMENTS; i++) {
1197 for (j = 0; j < SEG_LVL_MAX; j++) {
1198 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07001199 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001200 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001201 av1_enable_segfeature(seg, i, j);
1202 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
1203 if (av1_is_segfeature_signed(j))
1204 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001205 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001206 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001207 }
1208 }
1209 }
1210}
1211
1212#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001213static void decode_restoration_mode(AV1_COMMON *cm,
1214 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001215 int p;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001216 RestorationInfo *rsi;
1217 for (p = 0; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001218 rsi = &cm->rst_info[p];
1219 if (aom_rb_read_bit(rb)) {
1220 rsi->frame_restoration_type =
1221 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
1222 } else {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001223 rsi->frame_restoration_type =
1224 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001225 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001226 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001227 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1228 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1229 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001230 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
1231 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1232 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001233 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1234 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1235 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001236 rsi = &cm->rst_info[0];
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001237 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
1238 if (rsi->restoration_unit_size != (RESTORATION_TILESIZE_MAX >> 2)) {
1239 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001240 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001241 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001242 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
1243 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1244 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001245 cm->rst_info[1].restoration_unit_size =
1246 cm->rst_info[0].restoration_unit_size >> (aom_rb_read_bit(rb) * s);
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001247 } else {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001248 cm->rst_info[1].restoration_unit_size =
1249 cm->rst_info[0].restoration_unit_size;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001250 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001251 cm->rst_info[2].restoration_unit_size = cm->rst_info[1].restoration_unit_size;
Debargha Mukherjee7a5587a2017-08-31 07:41:30 -07001252
1253 cm->rst_info[0].procunit_width = cm->rst_info[0].procunit_height =
1254 RESTORATION_PROC_UNIT_SIZE;
1255 cm->rst_info[1].procunit_width = cm->rst_info[2].procunit_width =
1256 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_x;
1257 cm->rst_info[1].procunit_height = cm->rst_info[2].procunit_height =
1258 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_y;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001259}
1260
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001261static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001262 WienerInfo *ref_wiener_info, aom_reader *rb) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001263 memset(wiener_info->vfilter, 0, sizeof(wiener_info->vfilter));
1264 memset(wiener_info->hfilter, 0, sizeof(wiener_info->hfilter));
1265
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001266 if (wiener_win == WIENER_WIN)
1267 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
1268 aom_read_primitive_refsubexpfin(
1269 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1270 WIENER_FILT_TAP0_SUBEXP_K,
1271 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1272 WIENER_FILT_TAP0_MINV;
1273 else
1274 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001275 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001276 aom_read_primitive_refsubexpfin(
1277 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1278 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001279 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001280 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001281 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001282 aom_read_primitive_refsubexpfin(
1283 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1284 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001285 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001286 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001287 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001288 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001289 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
1290 wiener_info->vfilter[2]);
1291
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001292 if (wiener_win == WIENER_WIN)
1293 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
1294 aom_read_primitive_refsubexpfin(
1295 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1296 WIENER_FILT_TAP0_SUBEXP_K,
1297 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1298 WIENER_FILT_TAP0_MINV;
1299 else
1300 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001301 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001302 aom_read_primitive_refsubexpfin(
1303 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1304 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001305 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001306 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001307 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001308 aom_read_primitive_refsubexpfin(
1309 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1310 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001311 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001312 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001313 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001314 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001315 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
1316 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001317 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001318}
1319
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001320static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
1321 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001322 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
1323 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001324 aom_read_primitive_refsubexpfin(
1325 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001326 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001327 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001328 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001329 aom_read_primitive_refsubexpfin(
1330 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001331 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001332 SGRPROJ_PRJ_MIN1;
1333 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001334}
1335
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001336static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
1337 MACROBLOCKD *xd,
1338 aom_reader *const r, int plane,
1339 int rtile_idx) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001340 const RestorationInfo *rsi = &cm->rst_info[plane];
1341 RestorationUnitInfo *rui = &rsi->unit_info[rtile_idx];
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001342 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001343
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001344 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
1345 WienerInfo *wiener_info = xd->wiener_info + plane;
1346 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001347
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001348 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001349 rui->restoration_type =
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001350 aom_read_symbol(r, xd->tile_ctx->switchable_restore_cdf,
1351 RESTORE_SWITCHABLE_TYPES, ACCT_STR);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001352 switch (rui->restoration_type) {
1353 case RESTORE_WIENER:
1354 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
1355 break;
1356 case RESTORE_SGRPROJ:
1357 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
1358 break;
1359 default: assert(rui->restoration_type == RESTORE_NONE); break;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001360 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001361 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001362#if CONFIG_NEW_MULTISYMBOL
1363 if (aom_read_symbol(r, xd->tile_ctx->wiener_restore_cdf, 2, ACCT_STR)) {
1364#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001365 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001366#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001367 rui->restoration_type = RESTORE_WIENER;
1368 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001369 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001370 rui->restoration_type = RESTORE_NONE;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001371 }
1372 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001373#if CONFIG_NEW_MULTISYMBOL
1374 if (aom_read_symbol(r, xd->tile_ctx->sgrproj_restore_cdf, 2, ACCT_STR)) {
1375#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001376 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001377#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001378 rui->restoration_type = RESTORE_SGRPROJ;
1379 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001380 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001381 rui->restoration_type = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001382 }
1383 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001384}
1385#endif // CONFIG_LOOP_RESTORATION
1386
Yaowu Xuf883b422016-08-30 14:01:10 -07001387static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001388 struct loopfilter *lf = &cm->lf;
Cheng Chenf572cd32017-08-25 18:34:51 -07001389#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07001390#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07001391 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
1392 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
1393 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07001394 lf->filter_level_u = aom_rb_read_literal(rb, 6);
1395 lf->filter_level_v = aom_rb_read_literal(rb, 6);
1396 }
Cheng Chen179479f2017-08-04 10:56:39 -07001397#else
1398 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07001399#endif
Cheng Chenf572cd32017-08-25 18:34:51 -07001400#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07001401 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001402
1403 // Read in loop filter deltas applied at the MB level based on mode or ref
1404 // frame.
1405 lf->mode_ref_delta_update = 0;
1406
Yaowu Xuf883b422016-08-30 14:01:10 -07001407 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001409 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410 if (lf->mode_ref_delta_update) {
1411 int i;
1412
1413 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001414 if (aom_rb_read_bit(rb))
1415 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001416
1417 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001418 if (aom_rb_read_bit(rb))
1419 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001420 }
1421 }
1422}
1423
Jean-Marc Valin01435132017-02-18 14:12:53 -05001424#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01001425static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001426 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001427#if CONFIG_CDEF_SINGLEPASS
1428 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
1429#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02001430 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
1431 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001432#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001433 cm->cdef_bits = aom_rb_read_literal(rb, 2);
1434 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
1435 for (i = 0; i < cm->nb_cdef_strengths; i++) {
1436 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02001437 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
1438 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
1439 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001440 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441}
Jean-Marc Valin01435132017-02-18 14:12:53 -05001442#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001443
Yaowu Xuf883b422016-08-30 14:01:10 -07001444static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
1445 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001446}
1447
Yaowu Xuf883b422016-08-30 14:01:10 -07001448static void setup_quantization(AV1_COMMON *const cm,
1449 struct aom_read_bit_buffer *rb) {
1450 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001451 cm->y_dc_delta_q = read_delta_q(rb);
1452 cm->uv_dc_delta_q = read_delta_q(rb);
1453 cm->uv_ac_delta_q = read_delta_q(rb);
1454 cm->dequant_bit_depth = cm->bit_depth;
1455#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07001456 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001457 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001458 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
1459 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001460 } else {
1461 cm->min_qmlevel = 0;
1462 cm->max_qmlevel = 0;
1463 }
1464#endif
1465}
1466
Alex Converse05a3e7d2017-05-16 12:20:07 -07001467// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07001468static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001469#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001470 const int using_qm = cm->using_qmatrix;
1471 const int minqm = cm->min_qmlevel;
1472 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001473#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07001474 // When segmentation is disabled, only the first value is used. The
1475 // remaining are don't cares.
1476 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
1477 for (int i = 0; i < max_segments; ++i) {
1478 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
1479 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
1480 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
1481 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001482 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07001483 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001484 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001485#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001486 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
1487 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
1488 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07001489 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001490 const int qmlevel = (lossless || using_qm == 0)
1491 ? NUM_QM_LEVELS - 1
1492 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01001493 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Thomas Daviesdd3cf832017-10-20 15:49:57 +01001494 cm->y_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 0, j);
1495 cm->uv_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 1, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001496 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07001497#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07001498#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07001499 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
1500 for (int b = 0; b < COEF_BANDS; ++b) {
1501 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
1502 cm->y_dequant_nuq[i][dq][b], NULL, dq);
1503 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
1504 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001505 }
1506 }
1507#endif // CONFIG_NEW_QUANT
1508 }
1509}
1510
Angie Chiang5678ad92016-11-21 09:38:40 -08001511static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001512 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07001513 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001514}
1515
Yaowu Xuf883b422016-08-30 14:01:10 -07001516static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001517#if CONFIG_FRAME_SUPERRES
1518 cm->render_width = cm->superres_upscaled_width;
1519 cm->render_height = cm->superres_upscaled_height;
1520#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001521 cm->render_width = cm->width;
1522 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001523#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07001524 if (aom_rb_read_bit(rb))
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001525#if CONFIG_FRAME_SIZE
1526 av1_read_frame_size(rb, 16, 16, &cm->render_width, &cm->render_height);
1527#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001528 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001529#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001530}
1531
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001532#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001533// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001534static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
1535 int *width, int *height) {
1536 cm->superres_upscaled_width = *width;
1537 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07001538 if (aom_rb_read_bit(rb)) {
Urvang Joshide71d142017-10-05 12:12:15 -07001539 cm->superres_scale_denominator =
Fergus Simpsone7508412017-03-14 18:14:09 -07001540 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
Urvang Joshide71d142017-10-05 12:12:15 -07001541 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001542 // Don't edit cm->width or cm->height directly, or the buffers won't get
1543 // resized correctly
Urvang Joshi69fde2e2017-10-09 15:34:18 -07001544 av1_calculate_scaled_superres_size(width, height,
1545 cm->superres_scale_denominator);
Fergus Simpsone7508412017-03-14 18:14:09 -07001546 } else {
1547 // 1:1 scaling - ie. no scaling, scale not provided
Urvang Joshide71d142017-10-05 12:12:15 -07001548 cm->superres_scale_denominator = SCALE_NUMERATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07001549 }
1550}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001551#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001552
Yaowu Xuf883b422016-08-30 14:01:10 -07001553static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001554#if CONFIG_SIZE_LIMIT
1555 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07001556 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001557 "Dimensions of %dx%d beyond allowed size of %dx%d.",
1558 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1559#endif
1560 if (cm->width != width || cm->height != height) {
1561 const int new_mi_rows =
1562 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1563 const int new_mi_cols =
1564 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1565
Yaowu Xuf883b422016-08-30 14:01:10 -07001566 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567 // dimensions as well as the overall size.
1568 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001569 if (av1_alloc_context_buffers(cm, width, height))
1570 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001571 "Failed to allocate context buffers");
1572 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001573 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001574 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001575 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576 cm->width = width;
1577 cm->height = height;
1578 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01001579
1580 ensure_mv_buffer(cm->cur_frame, cm);
1581 cm->cur_frame->width = cm->width;
1582 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001583}
1584
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001585#if CONFIG_FRAME_SIZE
1586static void setup_frame_size(AV1_COMMON *cm, int frame_size_override_flag,
1587 struct aom_read_bit_buffer *rb) {
1588#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001589static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001590#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001591 int width, height;
1592 BufferPool *const pool = cm->buffer_pool;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001593#if CONFIG_FRAME_SIZE
1594 if (frame_size_override_flag) {
1595 int num_bits_width = cm->seq_params.num_bits_width;
1596 int num_bits_height = cm->seq_params.num_bits_height;
1597 av1_read_frame_size(rb, num_bits_width, num_bits_height, &width, &height);
1598 } else {
1599 width = cm->seq_params.max_frame_width;
1600 height = cm->seq_params.max_frame_height;
1601 }
1602#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001603 av1_read_frame_size(rb, &width, &height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001604#endif
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001605#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001606 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001607#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001608 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001609 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001610
1611 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001612 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001613 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1614 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001615#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001616 cm->use_highbitdepth,
1617#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001618 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001619 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1620 pool->cb_priv)) {
1621 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001622 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001623 "Failed to allocate frame buffer");
1624 }
1625 unlock_buffer_pool(pool);
1626
1627 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1628 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1629 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1630 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001631#if CONFIG_COLORSPACE_HEADERS
1632 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1633 cm->transfer_function;
1634 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1635 cm->chroma_sample_position;
1636#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001637 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1638 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1639 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1640}
1641
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07001642static void setup_sb_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
1643 (void)rb;
1644#if CONFIG_EXT_PARTITION
1645 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
1646#else
1647 set_sb_size(cm, BLOCK_64X64);
1648#endif // CONFIG_EXT_PARTITION
1649}
1650
Yaowu Xuf883b422016-08-30 14:01:10 -07001651static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07001653 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001654 int this_xss, int this_yss) {
1655 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1656 ref_yss == this_yss;
1657}
1658
Yaowu Xuf883b422016-08-30 14:01:10 -07001659static void setup_frame_size_with_refs(AV1_COMMON *cm,
1660 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001661 int width, height;
1662 int found = 0, i;
1663 int has_valid_ref_frame = 0;
1664 BufferPool *const pool = cm->buffer_pool;
1665 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001666 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001667 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
1668 width = buf->y_crop_width;
1669 height = buf->y_crop_height;
1670 cm->render_width = buf->render_width;
1671 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001672#if CONFIG_FRAME_SUPERRES
1673 setup_superres(cm, rb, &width, &height);
1674#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001675 found = 1;
1676 break;
1677 }
1678 }
1679
1680 if (!found) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001681#if CONFIG_FRAME_SIZE
1682 int num_bits_width = cm->seq_params.num_bits_width;
1683 int num_bits_height = cm->seq_params.num_bits_height;
1684 av1_read_frame_size(rb, num_bits_width, num_bits_height, &width, &height);
1685#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001686 av1_read_frame_size(rb, &width, &height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001687#endif
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001688#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001689 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001690#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001691 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001692 }
1693
1694 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001695 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001696 "Invalid frame size");
1697
1698 // Check to make sure at least one of frames that this frame references
1699 // has valid dimensions.
1700 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1701 RefBuffer *const ref_frame = &cm->frame_refs[i];
1702 has_valid_ref_frame |=
1703 valid_ref_frame_size(ref_frame->buf->y_crop_width,
1704 ref_frame->buf->y_crop_height, width, height);
1705 }
1706 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07001707 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001708 "Referenced frame has invalid size");
1709 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1710 RefBuffer *const ref_frame = &cm->frame_refs[i];
1711 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
1712 ref_frame->buf->subsampling_x,
1713 ref_frame->buf->subsampling_y, cm->bit_depth,
1714 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07001715 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001716 "Referenced frame has incompatible color format");
1717 }
1718
1719 resize_context_buffers(cm, width, height);
1720
1721 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001722 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001723 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1724 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001725#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001726 cm->use_highbitdepth,
1727#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001728 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001729 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1730 pool->cb_priv)) {
1731 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001732 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001733 "Failed to allocate frame buffer");
1734 }
1735 unlock_buffer_pool(pool);
1736
1737 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1738 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1739 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1740 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001741#if CONFIG_COLORSPACE_HEADERS
1742 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1743 cm->transfer_function;
1744 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1745 cm->chroma_sample_position;
1746#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001747 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1748 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1749 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1750}
1751
David Barker1a191122017-09-06 15:24:16 +01001752static void read_tile_group_range(AV1Decoder *pbi,
1753 struct aom_read_bit_buffer *const rb) {
1754 AV1_COMMON *const cm = &pbi->common;
1755 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
1756 const int num_tiles =
1757 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
1758 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
1759 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
1760 if (pbi->tg_start + pbi->tg_size > num_tiles)
1761 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1762 "Tile group extends past last tile in frame");
1763}
1764
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001765#if CONFIG_MAX_TILE
1766
1767// Same function as av1_read_uniform but reading from uncompresses header wb
1768static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
1769 const int l = get_unsigned_bits(n);
1770 const int m = (1 << l) - n;
1771 const int v = aom_rb_read_literal(rb, l - 1);
1772 assert(l != 0);
1773 if (v < m)
1774 return v;
1775 else
1776 return (v << 1) - m + aom_rb_read_literal(rb, 1);
1777}
1778
1779static void read_tile_info_max_tile(AV1_COMMON *const cm,
1780 struct aom_read_bit_buffer *const rb) {
1781 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
1782 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
1783 int width_sb = width_mi >> MAX_MIB_SIZE_LOG2;
1784 int height_sb = height_mi >> MAX_MIB_SIZE_LOG2;
1785 int start_sb, size_sb, i;
1786
1787 av1_get_tile_limits(cm);
1788 cm->uniform_tile_spacing_flag = aom_rb_read_bit(rb);
1789
1790 // Read tile columns
1791 if (cm->uniform_tile_spacing_flag) {
1792 cm->log2_tile_cols = cm->min_log2_tile_cols;
1793 while (cm->log2_tile_cols < cm->max_log2_tile_cols) {
1794 if (!aom_rb_read_bit(rb)) {
1795 break;
1796 }
1797 cm->log2_tile_cols++;
1798 }
1799 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001800 for (i = 0, start_sb = 0; width_sb > 0 && i < MAX_TILE_COLS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001801 size_sb = 1 + rb_read_uniform(rb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB));
1802 cm->tile_col_start_sb[i] = start_sb;
1803 start_sb += size_sb;
1804 width_sb -= size_sb;
1805 }
1806 cm->tile_cols = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001807 cm->tile_col_start_sb[i] = start_sb + width_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001808 }
1809 av1_calculate_tile_cols(cm);
1810
1811 // Read tile rows
1812 if (cm->uniform_tile_spacing_flag) {
1813 cm->log2_tile_rows = cm->min_log2_tile_rows;
1814 while (cm->log2_tile_rows < cm->max_log2_tile_rows) {
1815 if (!aom_rb_read_bit(rb)) {
1816 break;
1817 }
1818 cm->log2_tile_rows++;
1819 }
1820 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001821 for (i = 0, start_sb = 0; height_sb > 0 && i < MAX_TILE_ROWS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001822 size_sb =
1823 1 + rb_read_uniform(rb, AOMMIN(height_sb, cm->max_tile_height_sb));
1824 cm->tile_row_start_sb[i] = start_sb;
1825 start_sb += size_sb;
1826 height_sb -= size_sb;
1827 }
1828 cm->tile_rows = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001829 cm->tile_row_start_sb[i] = start_sb + height_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001830 }
1831 av1_calculate_tile_rows(cm);
1832}
1833#endif
1834
Yaowu Xuf883b422016-08-30 14:01:10 -07001835static void read_tile_info(AV1Decoder *const pbi,
1836 struct aom_read_bit_buffer *const rb) {
1837 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001838#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001839 cm->single_tile_decoding = 0;
1840 if (cm->large_scale_tile) {
1841 struct loopfilter *lf = &cm->lf;
1842
Rupert Swarbrick566155f2017-10-27 11:59:17 +01001843// Figure out single_tile_decoding by loopfilter_level.
1844#if CONFIG_LOOPFILTER_LEVEL
1845 const int no_loopfilter = !(lf->filter_level[0] || lf->filter_level[1]);
1846#else
1847 const int no_loopfilter = !lf->filter_level;
1848#endif
1849 cm->single_tile_decoding = no_loopfilter ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001850// Read the tile width/height
1851#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001852 if (cm->sb_size == BLOCK_128X128) {
1853 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
1854 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
1855 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001856#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001857 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
1858 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
1859#if CONFIG_EXT_PARTITION
1860 }
1861#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001862
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001863 cm->tile_width <<= cm->mib_size_log2;
1864 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001865
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001866 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
1867 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001868
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001869 // Get the number of tiles
1870 cm->tile_cols = 1;
1871 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001872
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001873 cm->tile_rows = 1;
1874 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001875
Yunqing Wang42015d12017-10-17 15:43:49 -07001876#if CONFIG_DEPENDENT_HORZTILES
1877 cm->dependent_horz_tiles = 0;
1878#endif
1879#if CONFIG_LOOPFILTERING_ACROSS_TILES
1880 if (cm->tile_cols * cm->tile_rows > 1)
1881 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1882 else
1883 cm->loop_filter_across_tiles_enabled = 1;
1884#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
1885
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001886 if (cm->tile_cols * cm->tile_rows > 1) {
1887 // Read the number of bytes used to store tile size
1888 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1889 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1890 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001891 } else {
1892#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001893
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001894#if CONFIG_MAX_TILE
1895 read_tile_info_max_tile(cm, rb);
1896#else
1897 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
1898 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001899
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001900 // columns
1901 max_ones = max_log2_tile_cols - min_log2_tile_cols;
1902 cm->log2_tile_cols = min_log2_tile_cols;
1903 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001904
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001905 if (cm->log2_tile_cols > 6)
1906 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1907 "Invalid number of tile columns");
1908
1909 // rows
1910 cm->log2_tile_rows = aom_rb_read_bit(rb);
1911 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
1912
Rupert Swarbrick5a010aa2017-09-26 16:16:48 +01001913 cm->tile_width =
1914 get_tile_size(cm->mi_cols, cm->log2_tile_cols, &cm->tile_cols);
1915 cm->tile_height =
1916 get_tile_size(cm->mi_rows, cm->log2_tile_rows, &cm->tile_rows);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001917
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001918#endif // CONFIG_MAX_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001919#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001920 if (cm->tile_rows > 1)
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001921 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
1922 else
1923 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001924#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001925#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07001926 if (cm->tile_cols * cm->tile_rows > 1)
1927 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1928 else
1929 cm->loop_filter_across_tiles_enabled = 1;
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001930#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08001931
Thomas Daviesb25ba502017-07-18 10:18:24 +01001932 // tile size magnitude
1933 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001934#if CONFIG_EXT_TILE
1935 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001936#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00001937
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001938// each tile group header is in its own tile group OBU
1939#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07001940 // Store an index to the location of the tile group information
1941 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01001942 read_tile_group_range(pbi, rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001943#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001944}
1945
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001946static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001947 switch (sz) {
1948 case 1: return src[0];
1949 case 2: return mem_get_le16(src);
1950 case 3: return mem_get_le24(src);
1951 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07001952 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001953 }
1954}
1955
1956#if CONFIG_EXT_TILE
1957// Reads the next tile returning its size and adjusting '*data' accordingly
1958// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001959static void get_ls_tile_buffer(
1960 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
1961 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
1962 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
1963 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001964 size_t size;
1965
1966 size_t copy_size = 0;
1967 const uint8_t *copy_data = NULL;
1968
1969 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001970 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001971 "Truncated packet or corrupt tile length");
1972 if (decrypt_cb) {
1973 uint8_t be_data[4];
1974 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
1975
1976 // Only read number of bytes in cm->tile_size_bytes.
1977 size = mem_get_varsize(be_data, tile_size_bytes);
1978 } else {
1979 size = mem_get_varsize(*data, tile_size_bytes);
1980 }
1981
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001982 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
1983 // mode.
1984 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001985 // The remaining bits in the top byte signal the row offset
1986 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
1987
1988 // Currently, only use tiles in same column as reference tiles.
1989 copy_data = tile_buffers[row - offset][col].data;
1990 copy_size = tile_buffers[row - offset][col].size;
1991 size = 0;
1992 }
1993
1994 *data += tile_size_bytes;
1995
1996 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07001997 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001998 "Truncated packet or corrupt tile size");
1999
2000 if (size > 0) {
2001 tile_buffers[row][col].data = *data;
2002 tile_buffers[row][col].size = size;
2003 } else {
2004 tile_buffers[row][col].data = copy_data;
2005 tile_buffers[row][col].size = copy_size;
2006 }
2007
2008 *data += size;
2009
2010 tile_buffers[row][col].raw_data_end = *data;
2011}
2012
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002013static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07002014 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002015 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002016 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002017 const int tile_cols = cm->tile_cols;
2018 const int tile_rows = cm->tile_rows;
2019 const int have_tiles = tile_cols * tile_rows > 1;
2020
2021 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07002022 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002023 tile_buffers[0][0].data = data;
2024 tile_buffers[0][0].size = tile_size;
2025 tile_buffers[0][0].raw_data_end = NULL;
2026 } else {
2027 // We locate only the tile buffers that are required, which are the ones
2028 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
2029 // need the last (bottom right) tile buffer, as we need to know where the
2030 // end of the compressed frame buffer is for proper superframe decoding.
2031
2032 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
2033 const uint8_t *const data_start = data;
2034
Yaowu Xuf883b422016-08-30 14:01:10 -07002035 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002036 const int single_row = pbi->dec_tile_row >= 0;
2037 const int tile_rows_start = single_row ? dec_tile_row : 0;
2038 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07002039 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002040 const int single_col = pbi->dec_tile_col >= 0;
2041 const int tile_cols_start = single_col ? dec_tile_col : 0;
2042 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2043
2044 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
2045 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002046 const int tile_copy_mode =
2047 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
2048 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002049 size_t tile_col_size;
2050 int r, c;
2051
2052 // Read tile column sizes for all columns (we need the last tile buffer)
2053 for (c = 0; c < tile_cols; ++c) {
2054 const int is_last = c == tile_cols - 1;
2055 if (!is_last) {
2056 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
2057 data += tile_col_size_bytes;
2058 tile_col_data_end[c] = data + tile_col_size;
2059 } else {
2060 tile_col_size = data_end - data;
2061 tile_col_data_end[c] = data_end;
2062 }
2063 data += tile_col_size;
2064 }
2065
2066 data = data_start;
2067
2068 // Read the required tile sizes.
2069 for (c = tile_cols_start; c < tile_cols_end; ++c) {
2070 const int is_last = c == tile_cols - 1;
2071
2072 if (c > 0) data = tile_col_data_end[c - 1];
2073
2074 if (!is_last) data += tile_col_size_bytes;
2075
2076 // Get the whole of the last column, otherwise stop at the required tile.
2077 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
2078 tile_buffers[r][c].col = c;
2079
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002080 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2081 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2082 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002083 }
2084 }
2085
2086 // If we have not read the last column, then read it to get the last tile.
2087 if (tile_cols_end != tile_cols) {
2088 c = tile_cols - 1;
2089
2090 data = tile_col_data_end[c - 1];
2091
2092 for (r = 0; r < tile_rows; ++r) {
2093 tile_buffers[r][c].col = c;
2094
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002095 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2096 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2097 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002098 }
2099 }
2100 }
2101}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002102#endif // CONFIG_EXT_TILE
2103
Yaowu Xuc27fc142016-08-22 16:08:15 -07002104// Reads the next tile returning its size and adjusting '*data' accordingly
2105// based on 'is_last'.
2106static void get_tile_buffer(const uint8_t *const data_end,
2107 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07002108 struct aom_internal_error_info *error_info,
2109 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002110 void *decrypt_state, TileBufferDec *const buf) {
2111 size_t size;
2112
2113 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08002114 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002115 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002116 "Truncated packet or corrupt tile length");
2117
2118 if (decrypt_cb) {
2119 uint8_t be_data[4];
2120 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2121 size = mem_get_varsize(be_data, tile_size_bytes);
2122 } else {
2123 size = mem_get_varsize(*data, tile_size_bytes);
2124 }
2125 *data += tile_size_bytes;
2126
2127 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002128 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002129 "Truncated packet or corrupt tile size");
2130 } else {
Soo-Chul Han38427e82017-09-27 15:06:13 -04002131#if !CONFIG_OBU || CONFIG_ADD_4BYTES_OBUSIZE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002132 size = data_end - *data;
Soo-Chul Han38427e82017-09-27 15:06:13 -04002133#else
2134 size = mem_get_varsize(*data, tile_size_bytes);
2135 *data += tile_size_bytes;
2136#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002137 }
2138
2139 buf->data = *data;
2140 buf->size = size;
2141
2142 *data += size;
2143}
2144
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002145static void get_tile_buffers(AV1Decoder *pbi, const uint8_t *data,
2146 const uint8_t *data_end,
2147 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
2148 int startTile, int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002149 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07002150 int r, c;
2151 const int tile_cols = cm->tile_cols;
2152 const int tile_rows = cm->tile_rows;
2153 int tc = 0;
2154 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002155 struct aom_read_bit_buffer rb_tg_hdr;
2156 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002157#if !CONFIG_OBU
James Zern6efba482017-04-20 20:53:49 -07002158 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002159 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002160#else
2161 const int tg_size_bit_offset = 0;
2162#endif
2163
Fangwen Fu73126c02017-02-08 22:37:47 -08002164#if CONFIG_DEPENDENT_HORZTILES
2165 int tile_group_start_col = 0;
2166 int tile_group_start_row = 0;
2167#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002168
Thomas Davies4822e142017-10-10 11:30:36 +01002169#if CONFIG_SIMPLE_BWD_ADAPT
2170 size_t max_tile_size = 0;
2171 cm->largest_tile_id = 0;
2172#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002173 for (r = 0; r < tile_rows; ++r) {
2174 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07002175 TileBufferDec *const buf = &tile_buffers[r][c];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002176#if CONFIG_OBU
2177 const int is_last = (tc == endTile);
2178 const size_t hdr_offset = 0;
2179#else
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002180 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07002181 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002182#endif
2183
2184 if (tc < startTile || tc > endTile) continue;
Thomas Davies80188d12016-10-26 16:08:35 -07002185
Rupert Swarbrickcd757392017-09-01 13:57:53 +01002186 if (data + hdr_offset >= data_end)
2187 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2188 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07002189 buf->col = c;
2190 if (hdr_offset) {
2191 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
2192 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01002193 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08002194#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01002195 tile_group_start_row = r;
2196 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08002197#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002198 }
2199 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
2200 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002201 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
2202 &pbi->common.error, &data, pbi->decrypt_cb,
2203 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08002204#if CONFIG_DEPENDENT_HORZTILES
2205 cm->tile_group_start_row[r][c] = tile_group_start_row;
2206 cm->tile_group_start_col[r][c] = tile_group_start_col;
2207#endif
Thomas Davies4822e142017-10-10 11:30:36 +01002208#if CONFIG_SIMPLE_BWD_ADAPT
2209 if (buf->size > max_tile_size) {
2210 max_tile_size = buf->size;
2211 cm->largest_tile_id = r * tile_cols + c;
2212 }
2213#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002214 }
2215 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002216}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002217
David Barker5c06a642017-08-18 13:18:16 +01002218#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002219static void dec_setup_across_tile_boundary_info(
2220 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02002221 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
2222 tile_info->mi_col_start >= tile_info->mi_col_end)
2223 return;
2224
David Barker5c06a642017-08-18 13:18:16 +01002225 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07002226 av1_setup_across_tile_boundary_info(cm, tile_info);
2227 }
2228}
David Barker5c06a642017-08-18 13:18:16 +01002229#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002230
Yaowu Xuf883b422016-08-30 14:01:10 -07002231static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002232 const uint8_t *data_end, int startTile,
2233 int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002234 AV1_COMMON *const cm = &pbi->common;
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002235#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002236 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002237#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002238 const int tile_cols = cm->tile_cols;
2239 const int tile_rows = cm->tile_rows;
2240 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07002241 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002242#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07002243 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002244 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002245 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002246 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002247#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002248 int tile_rows_start;
2249 int tile_rows_end;
2250 int tile_cols_start;
2251 int tile_cols_end;
2252 int inv_col_order;
2253 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002254 int tile_row, tile_col;
2255
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002256#if CONFIG_EXT_TILE
2257 if (cm->large_scale_tile) {
2258 tile_rows_start = single_row ? dec_tile_row : 0;
2259 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
2260 tile_cols_start = single_col ? dec_tile_col : 0;
2261 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2262 inv_col_order = pbi->inv_tile_order && !single_col;
2263 inv_row_order = pbi->inv_tile_order && !single_row;
2264 } else {
2265#endif // CONFIG_EXT_TILE
2266 tile_rows_start = 0;
2267 tile_rows_end = tile_rows;
2268 tile_cols_start = 0;
2269 tile_cols_end = tile_cols;
2270 inv_col_order = pbi->inv_tile_order;
2271 inv_row_order = pbi->inv_tile_order;
2272#if CONFIG_EXT_TILE
2273 }
2274#endif // CONFIG_EXT_TILE
2275
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002276#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 if (cm->lf.filter_level && !cm->skip_loop_filter &&
2278 pbi->lf_worker.data1 == NULL) {
2279 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07002280 aom_memalign(32, sizeof(LFWorkerData)));
2281 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002282 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002283 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284 "Loop filter thread creation failed");
2285 }
2286 }
2287
2288 if (cm->lf.filter_level && !cm->skip_loop_filter) {
2289 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
2290 // Be sure to sync as we might be resuming after a failed frame decode.
2291 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07002292 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
2293 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002294 }
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002295#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002296
2297 assert(tile_rows <= MAX_TILE_ROWS);
2298 assert(tile_cols <= MAX_TILE_COLS);
2299
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002300#if CONFIG_EXT_TILE
2301 if (cm->large_scale_tile)
2302 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
2303 else
2304#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002305 get_tile_buffers(pbi, data, data_end, tile_buffers, startTile, endTile);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002306
2307 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002308 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002309 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07002310 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002311 pbi->allocated_tiles = n_tiles;
2312 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002313#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002314 if (pbi->acct_enabled) {
2315 aom_accounting_reset(&pbi->accounting);
2316 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002317#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002318 // Load all tile information into tile_data.
2319 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2320 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2321 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
2322 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
2323
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002324 if (tile_row * cm->tile_cols + tile_col < startTile ||
2325 tile_row * cm->tile_cols + tile_col > endTile)
2326 continue;
2327
Yaowu Xuc27fc142016-08-22 16:08:15 -07002328 td->cm = cm;
2329 td->xd = pbi->mb;
2330 td->xd.corrupted = 0;
2331 td->xd.counts =
2332 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
2333 ? &cm->counts
2334 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07002335 av1_zero(td->dqcoeff);
2336 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002337 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08002338 &td->bit_reader,
2339#if CONFIG_ANS && ANS_MAX_SYMBOLS
2340 1 << cm->ans_window_size_log2,
2341#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2342 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07002343#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002344 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002345 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002346 } else {
David Barkerd971f402016-10-25 13:52:07 +01002347 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002348 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002349#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002350 av1_init_macroblockd(cm, &td->xd,
Luc Trudeauf8164152017-04-11 16:20:51 -04002351#if CONFIG_CFL
2352 &td->cfl,
2353#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002354 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07002355
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00002356 // Initialise the tile context from the frame context
2357 td->tctx = *cm->fc;
2358 td->xd.tile_ctx = &td->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002359 td->xd.plane[0].color_index_map = td->color_index_map[0];
2360 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07002361#if CONFIG_MRC_TX
2362 td->xd.mrc_mask = td->mrc_mask;
2363#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002364 }
2365 }
2366
2367 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2368 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
2369 int mi_row = 0;
2370 TileInfo tile_info;
2371
Yaowu Xuf883b422016-08-30 14:01:10 -07002372 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002373
2374 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2375 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
2376 TileData *const td = pbi->tile_data + tile_cols * row + col;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002377
2378 if (tile_row * cm->tile_cols + tile_col < startTile ||
2379 tile_row * cm->tile_cols + tile_col > endTile)
2380 continue;
2381
Michael Bebenita6048d052016-08-25 14:40:54 -07002382#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002383 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002384 td->bit_reader.accounting->last_tell_frac =
2385 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002386 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002387#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002388
Yaowu Xuf883b422016-08-30 14:01:10 -07002389 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002390
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002391#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002392 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
2393 if (!cm->dependent_horz_tiles || tile_row == 0 ||
2394 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002395 av1_zero_above_context(cm, tile_info.mi_col_start,
2396 tile_info.mi_col_end);
2397 }
2398#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002399 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002400#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002401#if CONFIG_LOOP_RESTORATION
2402 for (int p = 0; p < MAX_MB_PLANE; ++p) {
2403 set_default_wiener(td->xd.wiener_info + p);
2404 set_default_sgrproj(td->xd.sgrproj_info + p);
2405 }
2406#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002407
David Barker5c06a642017-08-18 13:18:16 +01002408#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002409 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01002410#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07002411
Yaowu Xuc27fc142016-08-22 16:08:15 -07002412 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
2413 mi_row += cm->mib_size) {
2414 int mi_col;
2415
Yaowu Xuf883b422016-08-30 14:01:10 -07002416 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002417
2418 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
2419 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002420#if CONFIG_NCOBMC_ADAPT_WEIGHT
2421 alloc_ncobmc_pred_buffer(&td->xd);
2422 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
2423#endif
Angie Chiangd9af8ac2017-10-25 10:48:53 -07002424#if CONFIG_SYMBOLRATE
2425 av1_record_superblock(td->xd.counts);
2426#endif
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002427 decode_partition(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2428 cm->sb_size);
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002429#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002430 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2431 cm->sb_size);
2432#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002433#if CONFIG_NCOBMC_ADAPT_WEIGHT
2434 free_ncobmc_pred_buffer(&td->xd);
2435#endif
Cheng Chen5ad5b282017-10-05 16:36:06 -07002436#if CONFIG_LPF_SB
2437 if (USE_LOOP_FILTER_SUPERBLOCK) {
2438 // apply deblocking filtering right after each superblock is decoded
2439 const int guess_filter_lvl = FAKE_FILTER_LEVEL;
2440 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2441 guess_filter_lvl, 0, 1, mi_row, mi_col);
2442 }
2443#endif // CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07002444 }
Angie Chiangd0916d92017-03-10 17:54:18 -08002445 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002446 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07002447 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002448 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002449 }
2450 }
2451
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002452#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002453 assert(mi_row > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002454#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002455
Yaowu Xuc27fc142016-08-22 16:08:15 -07002456 // After loopfiltering, the last 7 row pixels in each superblock row may
2457 // still be changed by the longest loopfilter of the next superblock row.
2458 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002459 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002460 }
2461
Cheng Chen5ad5b282017-10-05 16:36:06 -07002462#if CONFIG_INTRABC
2463// When intraBC is on, do loop filtering per superblock,
2464// instead of do it after the whole frame has been encoded,
2465// as is in the else branch
2466#else
Cheng Chene94df5c2017-07-19 17:25:33 -07002467// Loopfilter the whole frame.
Cheng Chenf572cd32017-08-25 18:34:51 -07002468#if CONFIG_LPF_SB
2469 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2470 cm->lf.filter_level, 0, 0, 0, 0);
2471#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002472#if CONFIG_OBU
2473 if (endTile == cm->tile_rows * cm->tile_cols - 1)
2474#endif
David Barker3dffa272017-10-18 17:07:26 +01002475#if CONFIG_LOOPFILTER_LEVEL
2476 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
2477 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2478 cm->lf.filter_level[0], cm->lf.filter_level[1], 0,
2479 0);
2480 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2481 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
2482 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2483 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
2484 }
2485#else
2486 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2487 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07002488#endif // CONFIG_LOOPFILTER_LEVEL
Cheng Chenf572cd32017-08-25 18:34:51 -07002489#endif // CONFIG_LPF_SB
Cheng Chen5ad5b282017-10-05 16:36:06 -07002490#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002491 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002492 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002493
2494#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002495 if (cm->large_scale_tile) {
2496 if (n_tiles == 1) {
2497#if CONFIG_ANS
2498 return data_end;
2499#else
2500 // Find the end of the single tile buffer
2501 return aom_reader_find_end(&pbi->tile_data->bit_reader);
2502#endif // CONFIG_ANS
2503 } else {
2504 // Return the end of the last tile buffer
2505 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
2506 }
2507 } else {
2508#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002509#if CONFIG_ANS
2510 return data_end;
2511#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002512#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002513 {
2514 // Get last tile data.
2515 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002516 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002518#else
2519 TileData *const td = pbi->tile_data + endTile;
2520 return aom_reader_find_end(&td->bit_reader);
2521#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002522#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002523#if CONFIG_EXT_TILE
2524 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002525#endif // CONFIG_EXT_TILE
2526}
2527
Yaowu Xuc27fc142016-08-22 16:08:15 -07002528static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002529 AV1_COMMON *const cm = (AV1_COMMON *)data;
2530 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002531}
2532
Yaowu Xuf883b422016-08-30 14:01:10 -07002533static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002534 struct aom_read_bit_buffer *rb,
2535 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002536 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002537 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002538 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002539 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002540 }
2541
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02002542#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002543 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07002544#else
2545 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002546#endif
anorkin76fb1262017-03-22 15:12:12 -07002547#if CONFIG_COLORSPACE_HEADERS
2548 cm->color_space = aom_rb_read_literal(rb, 5);
2549 cm->transfer_function = aom_rb_read_literal(rb, 5);
2550#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002551 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07002552#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002553 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002554 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07002555 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002556 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002557 cm->subsampling_x = aom_rb_read_bit(rb);
2558 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002559 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07002560 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002561 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07002562 if (aom_rb_read_bit(rb))
2563 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002564 "Reserved bit set");
2565 } else {
2566 cm->subsampling_y = cm->subsampling_x = 1;
2567 }
anorkin76fb1262017-03-22 15:12:12 -07002568#if CONFIG_COLORSPACE_HEADERS
2569 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
2570 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
2571 }
2572#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002573 } else {
2574 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
2575 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
2576 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
2577 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002578 if (aom_rb_read_bit(rb))
2579 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002580 "Reserved bit set");
2581 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002582 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002583 "4:4:4 color not supported in profile 0 or 2");
2584 }
2585 }
2586}
2587
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002588#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002589void read_sequence_header(SequenceHeader *seq_params,
2590 struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002591#if CONFIG_FRAME_SIZE
2592 int num_bits_width = aom_rb_read_literal(rb, 4) + 1;
2593 int num_bits_height = aom_rb_read_literal(rb, 4) + 1;
2594 int max_frame_width = aom_rb_read_literal(rb, num_bits_width) + 1;
2595 int max_frame_height = aom_rb_read_literal(rb, num_bits_height) + 1;
2596
2597 seq_params->num_bits_width = num_bits_width;
2598 seq_params->num_bits_height = num_bits_height;
2599 seq_params->max_frame_width = max_frame_width;
2600 seq_params->max_frame_height = max_frame_height;
2601#endif
2602
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002603 /* Placeholder for actually reading from the bitstream */
David Barker5e70a112017-10-03 14:28:17 +01002604 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
2605 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002606 // We must always have delta_frame_id_length < frame_id_length,
2607 // in order for a frame to be referenced with a unique delta.
2608 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002609 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002610 seq_params->frame_id_length =
2611 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
David Barker5e70a112017-10-03 14:28:17 +01002612 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002613}
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002614#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002615
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002616static void read_compound_tools(AV1_COMMON *cm,
2617 struct aom_read_bit_buffer *rb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002618 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
2619 cm->allow_interintra_compound = aom_rb_read_bit(rb);
2620 } else {
2621 cm->allow_interintra_compound = 0;
2622 }
Zoe Liu85b66462017-04-20 14:28:19 -07002623#if CONFIG_COMPOUND_SINGLEREF
2624 if (!frame_is_intra_only(cm)) {
2625#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002626 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07002627#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002628 cm->allow_masked_compound = aom_rb_read_bit(rb);
2629 } else {
2630 cm->allow_masked_compound = 0;
2631 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002632}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002633
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002634#if CONFIG_VAR_REFS
2635static void check_valid_ref_frames(AV1_COMMON *cm) {
2636 MV_REFERENCE_FRAME ref_frame;
2637 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
2638 // reference frames: Current encoder invalid ALTREF when ALTREF
2639 // is the same as LAST, but invalid all the other references
2640 // when they are the same as ALTREF.
2641 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2642 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
2643
2644 if (ref_buf->idx != INVALID_IDX) {
2645 ref_buf->is_valid = 1;
2646
2647 MV_REFERENCE_FRAME ref;
2648 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
2649 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
2650 if (buf->is_valid && buf->idx == ref_buf->idx) {
2651 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
2652 ref_buf->is_valid = 0;
2653 break;
2654 } else {
2655 buf->is_valid = 0;
2656 }
2657 }
2658 }
2659 } else {
2660 ref_buf->is_valid = 0;
2661 }
2662 }
2663}
2664#endif // CONFIG_VAR_REFS
2665
Sarah Parker3e579a62017-08-23 16:53:20 -07002666static int read_global_motion_params(WarpedMotionParams *params,
David Barkerd7c8bd52017-09-25 14:47:29 +01002667 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07002668 struct aom_read_bit_buffer *rb,
2669 int allow_hp) {
2670 TransformationType type = aom_rb_read_bit(rb);
2671 if (type != IDENTITY) {
2672#if GLOBAL_TRANS_TYPES > 4
2673 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
2674#else
2675 if (aom_rb_read_bit(rb))
2676 type = ROTZOOM;
2677 else
2678 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
2679#endif // GLOBAL_TRANS_TYPES > 4
2680 }
2681
2682 int trans_bits;
2683 int trans_dec_factor;
2684 int trans_prec_diff;
David Barkerd7c8bd52017-09-25 14:47:29 +01002685 *params = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002686 params->wmtype = type;
2687 switch (type) {
Sarah Parker3e579a62017-08-23 16:53:20 -07002688 case AFFINE:
2689 case ROTZOOM:
2690 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
2691 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2692 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2693 (1 << GM_ALPHA_PREC_BITS)) *
2694 GM_ALPHA_DECODE_FACTOR +
2695 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002696 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
2697 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2698 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
2699 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002700 if (type >= AFFINE) {
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002701 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
2702 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2703 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
2704 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002705 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
2706 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2707 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2708 (1 << GM_ALPHA_PREC_BITS)) *
2709 GM_ALPHA_DECODE_FACTOR +
2710 (1 << WARPEDMODEL_PREC_BITS);
2711 } else {
2712 params->wmmat[4] = -params->wmmat[3];
2713 params->wmmat[5] = params->wmmat[2];
2714 }
2715 // fallthrough intended
2716 case TRANSLATION:
2717 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2718 : GM_ABS_TRANS_BITS;
2719 trans_dec_factor = (type == TRANSLATION)
2720 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
2721 : GM_TRANS_DECODE_FACTOR;
2722 trans_prec_diff = (type == TRANSLATION)
2723 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2724 : GM_TRANS_PREC_DIFF;
2725 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
2726 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2727 (ref_params->wmmat[0] >> trans_prec_diff)) *
2728 trans_dec_factor;
2729 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
2730 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2731 (ref_params->wmmat[1] >> trans_prec_diff)) *
2732 trans_dec_factor;
2733 case IDENTITY: break;
2734 default: assert(0);
2735 }
2736 if (params->wmtype <= AFFINE) {
2737 int good_shear_params = get_shear_params(params);
2738 if (!good_shear_params) return 0;
2739 }
2740
2741 return 1;
2742}
2743
2744static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
2745 int frame;
2746 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002747 const WarpedMotionParams *ref_params =
2748 cm->error_resilient_mode ? &default_warp_params
2749 : &cm->prev_frame->global_motion[frame];
Sarah Parker3e579a62017-08-23 16:53:20 -07002750 int good_params = read_global_motion_params(
David Barkerd7c8bd52017-09-25 14:47:29 +01002751 &cm->global_motion[frame], ref_params, rb, cm->allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002752 if (!good_params)
2753 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2754 "Invalid shear parameters for global motion.");
2755
2756 // TODO(sarahparker, debargha): The logic in the commented out code below
2757 // does not work currently and causes mismatches when resize is on. Fix it
2758 // before turning the optimization back on.
2759 /*
2760 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
2761 if (cm->width == ref_buf->y_crop_width &&
2762 cm->height == ref_buf->y_crop_height) {
2763 read_global_motion_params(&cm->global_motion[frame],
2764 &cm->prev_frame->global_motion[frame], rb,
2765 cm->allow_high_precision_mv);
2766 } else {
David Barkerd7c8bd52017-09-25 14:47:29 +01002767 cm->global_motion[frame] = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002768 }
2769 */
2770 /*
2771 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
2772 frame, cm->current_video_frame, cm->show_frame,
2773 cm->global_motion[frame].wmmat[0],
2774 cm->global_motion[frame].wmmat[1],
2775 cm->global_motion[frame].wmmat[2],
2776 cm->global_motion[frame].wmmat[3]);
2777 */
2778 }
David Barkercba7da72017-09-14 11:24:27 +01002779 memcpy(cm->cur_frame->global_motion, cm->global_motion,
2780 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Sarah Parker3e579a62017-08-23 16:53:20 -07002781}
Sarah Parker3e579a62017-08-23 16:53:20 -07002782
Yaowu Xuf883b422016-08-30 14:01:10 -07002783static size_t read_uncompressed_header(AV1Decoder *pbi,
2784 struct aom_read_bit_buffer *rb) {
2785 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002786 MACROBLOCKD *const xd = &pbi->mb;
2787 BufferPool *const pool = cm->buffer_pool;
2788 RefCntBuffer *const frame_bufs = pool->frame_bufs;
2789 int i, mask, ref_index = 0;
2790 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002791
Yaowu Xuc27fc142016-08-22 16:08:15 -07002792 cm->last_frame_type = cm->frame_type;
2793 cm->last_intra_only = cm->intra_only;
2794
Yaowu Xuc27fc142016-08-22 16:08:15 -07002795 // NOTE: By default all coded frames to be used as a reference
2796 cm->is_reference_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002797
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002798#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002799 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
2800 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002801 "Invalid frame marker");
2802
Yaowu Xuf883b422016-08-30 14:01:10 -07002803 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02002804
2805 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
2806 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
2807
2808 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002809 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002810 "Unsupported bitstream profile");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002811#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002812
Yunqing Wangc2502b52017-07-19 17:44:18 -07002813#if CONFIG_EXT_TILE
2814 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
2815#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002816 if (cm->large_scale_tile) cm->seq_params.frame_id_numbers_present_flag = 0;
Yunqing Wangc2502b52017-07-19 17:44:18 -07002817#endif // CONFIG_REFERENCE_BUFFER
2818#endif // CONFIG_EXT_TILE
2819
Yaowu Xuf883b422016-08-30 14:01:10 -07002820 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002821
2822 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08002823 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01002824 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
2825 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08002826#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002827 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002828 int frame_id_length = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002829 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
2830 /* Compare display_frame_id with ref_frame_id and check valid for
2831 * referencing */
2832 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
2833 cm->valid_for_referencing[existing_frame_idx] == 0)
2834 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2835 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002836 }
2837#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002838 lock_buffer_pool(pool);
2839 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
2840 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002841 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002842 "Buffer %d does not contain a decoded frame",
2843 frame_to_show);
2844 }
2845 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2846 unlock_buffer_pool(pool);
2847
Cheng Chen13fc8192017-08-19 11:49:28 -07002848#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002849 cm->lf.filter_level[0] = 0;
2850 cm->lf.filter_level[1] = 0;
2851#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002852 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07002853#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002854 cm->show_frame = 1;
2855 pbi->refresh_frame_flags = 0;
2856
2857 if (cm->frame_parallel_decode) {
2858 for (i = 0; i < REF_FRAMES; ++i)
2859 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
2860 }
2861
2862 return 0;
2863 }
2864
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002865#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002866 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002867 cm->show_frame = aom_rb_read_bit(rb);
2868 if (cm->frame_type != KEY_FRAME)
2869 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002870#else
2871 cm->frame_type = (FRAME_TYPE)aom_rb_read_literal(rb, 2); // 2 bits
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002872 cm->show_frame = aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002873 cm->intra_only = cm->frame_type == INTRA_ONLY_FRAME;
2874#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002875 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002876#if CONFIG_REFERENCE_BUFFER
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002877#if !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002878 if (frame_is_intra_only(cm)) read_sequence_header(&cm->seq_params, rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002879#endif // !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002880 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002881 int frame_id_length = cm->seq_params.frame_id_length;
2882 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002883 int prev_frame_id = 0;
2884 if (cm->frame_type != KEY_FRAME) {
2885 prev_frame_id = cm->current_frame_id;
2886 }
2887 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002888
David Barker5e70a112017-10-03 14:28:17 +01002889 if (cm->frame_type != KEY_FRAME) {
2890 int diff_frame_id;
2891 if (cm->current_frame_id > prev_frame_id) {
2892 diff_frame_id = cm->current_frame_id - prev_frame_id;
2893 } else {
2894 diff_frame_id =
2895 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002896 }
David Barker5e70a112017-10-03 14:28:17 +01002897 /* Check current_frame_id for conformance */
2898 if (prev_frame_id == cm->current_frame_id ||
2899 diff_frame_id >= (1 << (frame_id_length - 1))) {
2900 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2901 "Invalid value of current_frame_id");
2902 }
2903 }
2904 /* Check if some frames need to be marked as not valid for referencing */
2905 for (i = 0; i < REF_FRAMES; i++) {
2906 if (cm->frame_type == KEY_FRAME) {
2907 cm->valid_for_referencing[i] = 0;
2908 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
2909 if (cm->ref_frame_id[i] > cm->current_frame_id ||
2910 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002911 cm->valid_for_referencing[i] = 0;
David Barker5e70a112017-10-03 14:28:17 +01002912 } else {
2913 if (cm->ref_frame_id[i] > cm->current_frame_id &&
2914 cm->ref_frame_id[i] <
2915 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
2916 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002917 }
2918 }
2919 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002920#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002921
2922#if CONFIG_FRAME_SIZE
2923 int frame_size_override_flag = aom_rb_read_literal(rb, 1);
2924#endif
2925
Yaowu Xuc27fc142016-08-22 16:08:15 -07002926 if (cm->frame_type == KEY_FRAME) {
Jingning Hand8a15a62017-10-30 10:53:42 -07002927 cm->current_video_frame = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002928#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002929 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002930#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002931 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2932
2933 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2934 cm->frame_refs[i].idx = INVALID_IDX;
2935 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002936#if CONFIG_VAR_REFS
2937 cm->frame_refs[i].is_valid = 0;
2938#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002939 }
2940
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002941#if CONFIG_FRAME_SIZE
2942 setup_frame_size(cm, frame_size_override_flag, rb);
2943#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002944 setup_frame_size(cm, rb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002945#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07002946 setup_sb_size(cm, rb);
2947
Yaowu Xuc27fc142016-08-22 16:08:15 -07002948 if (pbi->need_resync) {
2949 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
2950 pbi->need_resync = 0;
2951 }
Alex Converseeb780e72016-12-13 12:46:41 -08002952#if CONFIG_ANS && ANS_MAX_SYMBOLS
2953 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
2954#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07002955 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002956#if CONFIG_AMVR
2957 if (cm->allow_screen_content_tools) {
2958 if (aom_rb_read_bit(rb)) {
RogerZhou10a03802017-10-26 11:49:48 -07002959 cm->seq_force_integer_mv = 2;
RogerZhou3b635242017-09-19 10:06:46 -07002960 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002961 cm->seq_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002962 }
2963 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002964 cm->seq_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07002965 }
2966#endif
Fangwen Fu930c51c2017-05-07 20:39:17 -07002967#if CONFIG_TEMPMV_SIGNALING
2968 cm->use_prev_frame_mvs = 0;
2969#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002970 } else {
hui su24f7b072016-10-12 11:36:24 -07002971 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07002972#if CONFIG_TEMPMV_SIGNALING
2973 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
2974#endif
2975#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
2976// The only way to reset all frame contexts to their default values is with a
2977// keyframe.
2978#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002979 if (cm->error_resilient_mode) {
2980 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
2981 } else {
2982 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002983 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002984 ? RESET_FRAME_CONTEXT_ALL
2985 : RESET_FRAME_CONTEXT_CURRENT;
2986 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002987 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002988 ? RESET_FRAME_CONTEXT_CURRENT
2989 : RESET_FRAME_CONTEXT_NONE;
2990 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07002991 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002992 ? RESET_FRAME_CONTEXT_ALL
2993 : RESET_FRAME_CONTEXT_CURRENT;
2994 }
2995 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07002996#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002997
2998 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002999#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02003000 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003001#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003002
Yaowu Xuf883b422016-08-30 14:01:10 -07003003 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003004#if CONFIG_FRAME_SIZE
3005 setup_frame_size(cm, frame_size_override_flag, rb);
3006#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003007 setup_frame_size(cm, rb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003008#endif
Pavel Frolovea3dd3a2017-09-25 16:06:19 +03003009 setup_sb_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003010 if (pbi->need_resync) {
3011 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3012 pbi->need_resync = 0;
3013 }
Alex Converseeb780e72016-12-13 12:46:41 -08003014#if CONFIG_ANS && ANS_MAX_SYMBOLS
3015 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
3016#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003017 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003018#if CONFIG_OBU
3019 pbi->refresh_frame_flags = (cm->frame_type == S_FRAME)
3020 ? ~(1 << REF_FRAMES)
3021 : aom_rb_read_literal(rb, REF_FRAMES);
3022#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003023 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003024#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003025
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026 if (!pbi->refresh_frame_flags) {
3027 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
3028 // will not be used as a reference
3029 cm->is_reference_frame = 0;
3030 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031
3032 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003033 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003034 const int idx = cm->ref_frame_map[ref];
Rupert Swarbrick5eb471c2017-10-02 16:06:54 +01003035
3036 // Most of the time, streams start with a keyframe. In that case,
3037 // ref_frame_map will have been filled in at that point and will not
3038 // contain any -1's. However, streams are explicitly allowed to start
3039 // with an intra-only frame, so long as they don't then signal a
3040 // reference to a slot that hasn't been set yet. That's what we are
3041 // checking here.
3042 if (idx == -1)
3043 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3044 "Inter frame requests nonexistent reference");
3045
Yaowu Xuc27fc142016-08-22 16:08:15 -07003046 RefBuffer *const ref_frame = &cm->frame_refs[i];
3047 ref_frame->idx = idx;
3048 ref_frame->buf = &frame_bufs[idx].buf;
Zoe Liu17af2742017-10-06 10:36:42 -07003049#if CONFIG_FRAME_SIGN_BIAS
3050#if CONFIG_OBU
3051 // NOTE: For the scenario of (cm->frame_type != S_FRAME),
3052 // ref_frame_sign_bias will be reset based on frame offsets.
3053 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
3054#endif // CONFIG_OBU
3055#else // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003056#if CONFIG_OBU
3057 cm->ref_frame_sign_bias[LAST_FRAME + i] =
3058 (cm->frame_type == S_FRAME) ? 0 : aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003059#else // !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07003060 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003061#endif // CONFIG_OBU
3062#endif // CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003063#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003064 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003065 int frame_id_length = cm->seq_params.frame_id_length;
3066 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003067 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
3068 int ref_frame_id =
3069 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
3070 (1 << frame_id_length)) %
3071 (1 << frame_id_length));
3072 /* Compare values derived from delta_frame_id_minus1 and
3073 * refresh_frame_flags. Also, check valid for referencing */
3074 if (ref_frame_id != cm->ref_frame_id[ref] ||
3075 cm->valid_for_referencing[ref] == 0)
3076 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3077 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003078 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003079#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003080 }
3081
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07003082#if CONFIG_VAR_REFS
3083 check_valid_ref_frames(cm);
3084#endif // CONFIG_VAR_REFS
3085
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003086#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003087 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003088 setup_frame_size_with_refs(cm, rb);
3089 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003090 setup_frame_size(cm, frame_size_override_flag, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003091 }
3092#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003093 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003094#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003095
RogerZhou3b635242017-09-19 10:06:46 -07003096#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003097 if (cm->seq_force_integer_mv == 2) {
3098 cm->cur_frame_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07003099 } else {
RogerZhou10a03802017-10-26 11:49:48 -07003100 cm->cur_frame_force_integer_mv = cm->seq_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003101 }
RogerZhou10a03802017-10-26 11:49:48 -07003102
3103 if (cm->cur_frame_force_integer_mv) {
3104 cm->allow_high_precision_mv = 0;
3105 } else {
3106 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
3107 }
3108#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003109 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
RogerZhou10a03802017-10-26 11:49:48 -07003110#endif
Angie Chiang5678ad92016-11-21 09:38:40 -08003111 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003112#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003113 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08003114 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003115 else
3116 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08003117#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003118 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3119 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003120#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07003121 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003122 &ref_buf->sf, ref_buf->buf->y_crop_width,
3123 ref_buf->buf->y_crop_height, cm->width, cm->height,
3124 cm->use_highbitdepth);
3125#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003126 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003127 &ref_buf->sf, ref_buf->buf->y_crop_width,
3128 ref_buf->buf->y_crop_height, cm->width, cm->height);
3129#endif
3130 }
3131 }
3132 }
Jingning Hanc723b342017-08-24 11:19:46 -07003133
Jingning Hanea255c92017-09-29 08:12:09 -07003134#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003135 if (cm->show_frame == 0) {
3136 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
3137 } else {
3138 cm->frame_offset = cm->current_video_frame;
3139 }
Zoe Liu17af2742017-10-06 10:36:42 -07003140 av1_setup_frame_buf_refs(cm);
3141
3142#if CONFIG_FRAME_SIGN_BIAS
3143#if CONFIG_OBU
3144 if (cm->frame_type != S_FRAME)
3145#endif // CONFIG_OBU
3146 av1_setup_frame_sign_bias(cm);
Zoe Liu17af2742017-10-06 10:36:42 -07003147#endif // CONFIG_FRAME_SIGN_BIAS
3148#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003149
Fangwen Fu8d164de2016-12-14 13:40:54 -08003150#if CONFIG_TEMPMV_SIGNALING
3151 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
3152#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003153
3154#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003155 if (cm->seq_params.frame_id_numbers_present_flag) {
3156 /* If bitmask is set, update reference frame id values and
3157 mark frames as valid for reference */
3158 int refresh_frame_flags =
3159 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
3160 for (i = 0; i < REF_FRAMES; i++) {
3161 if ((refresh_frame_flags >> i) & 1) {
3162 cm->ref_frame_id[i] = cm->current_frame_id;
3163 cm->valid_for_referencing[i] = 1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003164 }
3165 }
3166 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003167#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003168
Yaowu Xuc27fc142016-08-22 16:08:15 -07003169 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003170 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003171#if CONFIG_COLORSPACE_HEADERS
3172 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
3173 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
3174#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003175 get_frame_new_buffer(cm)->color_range = cm->color_range;
3176 get_frame_new_buffer(cm)->render_width = cm->render_width;
3177 get_frame_new_buffer(cm)->render_height = cm->render_height;
3178
3179 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003180 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003181 "Keyframe / intra-only frame required to reset decoder"
3182 " state");
3183 }
3184
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003185#if CONFIG_EXT_TILE
3186 const int might_bwd_adapt =
3187 !(cm->error_resilient_mode || cm->large_scale_tile);
3188#else
3189 const int might_bwd_adapt = !cm->error_resilient_mode;
3190#endif // CONFIG_EXT_TILE
3191 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003192 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003193 ? REFRESH_FRAME_CONTEXT_FORWARD
3194 : REFRESH_FRAME_CONTEXT_BACKWARD;
3195 } else {
3196 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
3197 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003198#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003199 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07003200 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07003201 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003202#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003203
3204 // Generate next_ref_frame_map.
3205 lock_buffer_pool(pool);
3206 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
3207 if (mask & 1) {
3208 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
3209 ++frame_bufs[cm->new_fb_idx].ref_count;
3210 } else {
3211 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3212 }
3213 // Current thread holds the reference frame.
3214 if (cm->ref_frame_map[ref_index] >= 0)
3215 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3216 ++ref_index;
3217 }
3218
3219 for (; ref_index < REF_FRAMES; ++ref_index) {
3220 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3221
3222 // Current thread holds the reference frame.
3223 if (cm->ref_frame_map[ref_index] >= 0)
3224 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3225 }
3226 unlock_buffer_pool(pool);
3227 pbi->hold_ref_buf = 1;
3228
3229 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003230 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003231
Yaowu Xuc27fc142016-08-22 16:08:15 -07003232 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003233 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003234 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003235
hui su0d103572017-03-01 17:58:01 -08003236#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07003237 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003238 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
3239 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
3240 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
3241 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07003242#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3243 if (cm->frame_refs[0].idx <= 0) {
3244 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
3245 }
3246#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003247 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07003248#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003249 }
hui su0d103572017-03-01 17:58:01 -08003250#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003251
3252 setup_segmentation(cm, rb);
3253
Arild Fuldseth07441162016-08-15 15:07:52 +02003254 {
Thomas Davies28444be2017-10-13 18:12:25 +01003255 int delta_q_allowed = 1;
3256#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003257 struct segmentation *const seg = &cm->seg;
3258 int segment_quantizer_active = 0;
3259 for (i = 0; i < MAX_SEGMENTS; i++) {
3260 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3261 segment_quantizer_active = 1;
3262 }
3263 }
Thomas Davies28444be2017-10-13 18:12:25 +01003264 delta_q_allowed = !segment_quantizer_active;
3265#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003266
Thomas Daviesf6936102016-09-05 16:51:31 +01003267 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07003268#if CONFIG_EXT_DELTA_Q
3269 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01003270 cm->delta_lf_present_flag = 0;
Cheng Chen880166a2017-10-02 17:48:48 -07003271#if CONFIG_LOOPFILTER_LEVEL
3272 cm->delta_lf_multi = 0;
3273#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003274#endif
Thomas Davies28444be2017-10-13 18:12:25 +01003275 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003276 cm->delta_q_present_flag = aom_rb_read_bit(rb);
3277 } else {
3278 cm->delta_q_present_flag = 0;
3279 }
3280 if (cm->delta_q_present_flag) {
3281 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01003282 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07003283#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003284 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
3285 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07003286 xd->prev_delta_lf_from_base = 0;
3287 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07003288#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003289 cm->delta_lf_multi = aom_rb_read_bit(rb);
Cheng Chena97394f2017-09-27 15:05:14 -07003290 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3291 xd->prev_delta_lf[lf_id] = 0;
3292#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003293 }
3294#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003295 }
3296 }
RogerZhou3b635242017-09-19 10:06:46 -07003297#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003298 xd->cur_frame_force_integer_mv = cm->cur_frame_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003299#endif
Thomas Davies3ab20b42017-09-19 10:30:53 +01003300
Urvang Joshi454280d2016-10-14 16:51:44 -07003301 for (i = 0; i < MAX_SEGMENTS; ++i) {
3302 const int qindex = cm->seg.enabled
3303 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
3304 : cm->base_qindex;
3305 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
3306 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3307 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003308 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003309 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003310 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07003311#if CONFIG_CDEF
3312 if (!cm->all_lossless) {
3313 setup_cdef(cm, rb);
3314 }
3315#endif
3316#if CONFIG_LOOP_RESTORATION
3317 decode_restoration_mode(cm, rb);
3318#endif // CONFIG_LOOP_RESTORATION
3319 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003320 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee6f3c8982017-09-22 21:14:01 -07003321 if (cm->reference_mode != SINGLE_REFERENCE) setup_compound_reference_mode(cm);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003322 read_compound_tools(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003323
Sarah Parkere68a3e42017-02-16 14:03:24 -08003324#if CONFIG_EXT_TX
3325 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
3326#endif // CONFIG_EXT_TX
3327
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003328#if CONFIG_ADAPT_SCAN
3329 cm->use_adapt_scan = aom_rb_read_bit(rb);
3330 // TODO(angiebird): call av1_init_scan_order only when use_adapt_scan
3331 // switches from 1 to 0
3332 if (cm->use_adapt_scan == 0) av1_init_scan_order(cm);
3333#endif // CONFIG_ADAPT_SCAN
3334
Pavel Frolov57c36e12017-09-12 15:00:40 +03003335 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3336 // show_exisiting_frame=1, nor can it take a frame not used as
3337 // a reference, it is probable that by the time it is being
3338 // referred to, the frame buffer it originally points to may
3339 // already get expired and have been reassigned to the current
3340 // newly coded frame. Hence, we need to check whether this is
3341 // the case, and if yes, we have 2 choices:
3342 // (1) Simply disable the use of previous frame mvs; or
3343 // (2) Have cm->prev_frame point to one reference frame buffer,
3344 // e.g. LAST_FRAME.
3345 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3346 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3347 cm->prev_frame =
3348 cm->frame_refs[LAST_FRAME - LAST_FRAME].idx != INVALID_IDX
3349 ? &cm->buffer_pool
3350 ->frame_bufs[cm->frame_refs[LAST_FRAME - LAST_FRAME].idx]
3351 : NULL;
3352 }
Pavel Frolov57c36e12017-09-12 15:00:40 +03003353
3354#if CONFIG_TEMPMV_SIGNALING
3355 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3356 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3357 "Frame wrongly requests previous frame MVs");
3358 }
3359#else
3360 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3361#if CONFIG_FRAME_SUPERRES
3362 cm->width == cm->last_width &&
3363 cm->height == cm->last_height &&
3364#else
3365 cm->width == cm->prev_frame->buf.y_crop_width &&
3366 cm->height == cm->prev_frame->buf.y_crop_height &&
3367#endif // CONFIG_FRAME_SUPERRES
3368 !cm->last_intra_only && cm->last_show_frame &&
3369 (cm->last_frame_type != KEY_FRAME);
3370#endif // CONFIG_TEMPMV_SIGNALING
3371
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003372 if (!frame_is_intra_only(cm)) read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003373
Yaowu Xuc27fc142016-08-22 16:08:15 -07003374 read_tile_info(pbi, rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003375 if (use_compressed_header(cm)) {
3376 sz = aom_rb_read_literal(rb, 16);
3377 if (sz == 0)
3378 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3379 "Invalid header size");
3380 } else {
3381 sz = 0;
3382 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003383 return sz;
3384}
3385
Yaowu Xuf883b422016-08-30 14:01:10 -07003386static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003387 size_t partition_size) {
Thomas Davies2e868ab2017-10-24 10:42:27 +01003388#if CONFIG_NEW_MULTISYMBOL
Thomas Daviese7154832017-10-03 10:12:17 +01003389 (void)pbi;
3390 (void)data;
3391 (void)partition_size;
3392 return 0;
3393#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003394 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07003395 aom_reader r;
Ryanf0e39192017-10-09 09:45:13 -07003396
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003397#if ((CONFIG_RECT_TX_EXT) || (!CONFIG_NEW_MULTISYMBOL || CONFIG_LV_MAP) || \
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02003398 (CONFIG_COMPOUND_SINGLEREF))
Thomas Davies599395e2017-07-21 18:02:48 +01003399 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies599395e2017-07-21 18:02:48 +01003400#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003401
Alex Converse2cdf0d82016-12-13 13:53:09 -08003402#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08003403 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08003404#endif
Alex Converse346440b2017-01-03 13:47:37 -08003405 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
3406 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07003407 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003408 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003409
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003410#if CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -07003411 if (cm->tx_mode == TX_MODE_SELECT)
3412 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07003413#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003414
Thomas Davies985bfc32017-06-27 16:51:26 +01003415#if !CONFIG_NEW_MULTISYMBOL
David Barker16c64e32017-08-23 16:54:59 +01003416 if (cm->tx_mode == TX_MODE_SELECT)
Ryanf0e39192017-10-09 09:45:13 -07003417 for (int i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
David Barker16c64e32017-08-23 16:54:59 +01003418 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Ryanf0e39192017-10-09 09:45:13 -07003419 for (int i = 0; i < SKIP_CONTEXTS; ++i)
Thomas Davies61e3e372017-04-04 16:10:23 +01003420 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
Cheng Chen0a7f2f52017-10-10 15:16:09 -07003421
3422#if CONFIG_JNT_COMP
3423 for (int i = 0; i < COMP_INDEX_CONTEXTS; ++i)
3424 av1_diff_update_prob(&r, &fc->compound_index_probs[i], ACCT_STR);
3425#endif // CONFIG_JNT_COMP
Thomas Davies61e3e372017-04-04 16:10:23 +01003426#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003427
Debargha Mukherjee801cc922017-09-22 17:22:50 -07003428 if (!frame_is_intra_only(cm)) {
Thomas Davies149eda52017-06-12 18:11:55 +01003429#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003430 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01003431#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003432
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003433 if (cm->reference_mode != COMPOUND_REFERENCE &&
3434 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01003435#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003436 for (int i = 0; i < BLOCK_SIZE_GROUPS; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003437 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003438 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003439 }
3440 }
Thomas Daviescff91712017-07-07 11:49:55 +01003441#endif
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003442#if !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003443#if CONFIG_EXT_PARTITION_TYPES
3444 int block_sizes_to_update = BLOCK_SIZES_ALL;
3445#else
3446 int block_sizes_to_update = BLOCK_SIZES;
3447#endif
Ryanf0e39192017-10-09 09:45:13 -07003448 for (int i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003449 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003450 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003451 }
3452 }
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003453#endif // !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003454 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003455
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003456#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003457 for (int i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07003458 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003459#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003460
David Barker037ee412017-09-19 12:43:46 +01003461#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003462 read_frame_reference_mode_probs(cm, &r);
David Barker037ee412017-09-19 12:43:46 +01003463#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003464
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003465#if CONFIG_COMPOUND_SINGLEREF
Ryanf0e39192017-10-09 09:45:13 -07003466 for (int i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
Zoe Liu85b66462017-04-20 14:28:19 -07003467 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003468#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003469
Thomas Davies599395e2017-07-21 18:02:48 +01003470#if !CONFIG_NEW_MULTISYMBOL
RogerZhou3b635242017-09-19 10:06:46 -07003471#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003472 if (cm->cur_frame_force_integer_mv == 0) {
RogerZhou3b635242017-09-19 10:06:46 -07003473#endif
Ryanf0e39192017-10-09 09:45:13 -07003474 for (int i = 0; i < NMV_CONTEXTS; ++i)
RogerZhou3b635242017-09-19 10:06:46 -07003475 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
3476#if CONFIG_AMVR
3477 }
3478#endif
Thomas Davies599395e2017-07-21 18:02:48 +01003479#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003480 }
3481
Yaowu Xuf883b422016-08-30 14:01:10 -07003482 return aom_reader_has_error(&r);
Thomas Davies2e868ab2017-10-24 10:42:27 +01003483#endif // CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003484}
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003485
Yaowu Xuc27fc142016-08-22 16:08:15 -07003486#ifdef NDEBUG
3487#define debug_check_frame_counts(cm) (void)0
3488#else // !NDEBUG
3489// Counts should only be incremented when frame_parallel_decoding_mode and
3490// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07003491static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003492 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003493 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003494 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
3495 cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003496 assert(!memcmp(cm->counts.partition, zero_counts.partition,
3497 sizeof(cm->counts.partition)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003498 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
3499 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003500 assert(!memcmp(cm->counts.inter_compound_mode,
3501 zero_counts.inter_compound_mode,
3502 sizeof(cm->counts.inter_compound_mode)));
3503 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
3504 sizeof(cm->counts.interintra)));
3505 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
3506 sizeof(cm->counts.wedge_interintra)));
Sarah Parker6fddd182016-11-10 20:57:20 -08003507 assert(!memcmp(cm->counts.compound_interinter,
3508 zero_counts.compound_interinter,
3509 sizeof(cm->counts.compound_interinter)));
Yue Chencb60b182016-10-13 15:18:22 -07003510 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
3511 sizeof(cm->counts.motion_mode)));
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01003512#if CONFIG_NCOBMC_ADAPT_WEIGHT
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07003513 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
3514 sizeof(cm->counts.ncobmc_mode)));
3515#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003516 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
3517 sizeof(cm->counts.intra_inter)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003518#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003519 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
3520 sizeof(cm->counts.comp_inter_mode)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003521#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07003522 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
3523 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07003524#if CONFIG_EXT_COMP_REFS
3525 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
3526 sizeof(cm->counts.comp_ref_type)));
3527 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
3528 sizeof(cm->counts.uni_comp_ref)));
3529#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003530 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
3531 sizeof(cm->counts.single_ref)));
3532 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
3533 sizeof(cm->counts.comp_ref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003534 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
3535 sizeof(cm->counts.comp_bwdref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003536 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003537 assert(
3538 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
3539 assert(
3540 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003541}
3542#endif // NDEBUG
3543
Yaowu Xuf883b422016-08-30 14:01:10 -07003544static struct aom_read_bit_buffer *init_read_bit_buffer(
3545 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
3546 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003547 rb->bit_offset = 0;
3548 rb->error_handler = error_handler;
3549 rb->error_handler_data = &pbi->common;
3550 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003551 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003552 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
3553 rb->bit_buffer = clear_data;
3554 rb->bit_buffer_end = clear_data + n;
3555 } else {
3556 rb->bit_buffer = data;
3557 rb->bit_buffer_end = data_end;
3558 }
3559 return rb;
3560}
3561
3562//------------------------------------------------------------------------------
3563
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003564#if CONFIG_FRAME_SIZE
3565void av1_read_frame_size(struct aom_read_bit_buffer *rb, int num_bits_width,
3566 int num_bits_height, int *width, int *height) {
3567 *width = aom_rb_read_literal(rb, num_bits_width) + 1;
3568 *height = aom_rb_read_literal(rb, num_bits_height) + 1;
3569#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003570void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
3571 int *height) {
3572 *width = aom_rb_read_literal(rb, 16) + 1;
3573 *height = aom_rb_read_literal(rb, 16) + 1;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003574#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003575}
3576
Yaowu Xuf883b422016-08-30 14:01:10 -07003577BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
3578 int profile = aom_rb_read_bit(rb);
3579 profile |= aom_rb_read_bit(rb) << 1;
3580 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003581 return (BITSTREAM_PROFILE)profile;
3582}
3583
Thomas Davies4822e142017-10-10 11:30:36 +01003584static void make_update_tile_list_dec(AV1Decoder *pbi, int start_tile,
3585 int num_tile, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00003586 int i;
Thomas Davies4822e142017-10-10 11:30:36 +01003587 for (i = start_tile; i < start_tile + num_tile; ++i)
3588 ec_ctxs[i - start_tile] = &pbi->tile_data[i].tctx;
Thomas Davies028b57f2017-02-22 16:42:11 +00003589}
Thomas Davies028b57f2017-02-22 16:42:11 +00003590
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003591#if CONFIG_FRAME_SUPERRES
3592void superres_post_decode(AV1Decoder *pbi) {
3593 AV1_COMMON *const cm = &pbi->common;
3594 BufferPool *const pool = cm->buffer_pool;
3595
3596 if (av1_superres_unscaled(cm)) return;
3597
3598 lock_buffer_pool(pool);
3599 av1_superres_upscale(cm, pool);
3600 unlock_buffer_pool(pool);
3601}
3602#endif // CONFIG_FRAME_SUPERRES
3603
Yi Luo10e23002017-07-31 11:54:43 -07003604static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01003605// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
3606// boundary information every frame, since the tile boundaries may
3607// change every frame (particularly when dependent-horztiles is also
3608// enabled); when it is disabled, the only information stored is the frame
3609// boundaries, which only depend on the frame size.
3610#if !CONFIG_LOOPFILTERING_ACROSS_TILES
3611 if (cm->width != cm->last_width || cm->height != cm->last_height)
3612#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
3613 {
Yi Luo10e23002017-07-31 11:54:43 -07003614 int row, col;
3615 for (row = 0; row < cm->mi_rows; ++row) {
3616 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
3617 for (col = 0; col < cm->mi_cols; ++col) {
3618 mi->mbmi.boundary_info = 0;
3619 mi++;
3620 }
3621 }
3622 av1_setup_frame_boundary_info(cm);
3623 }
3624}
3625
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003626size_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi, const uint8_t *data,
3627 const uint8_t *data_end,
3628 const uint8_t **p_data_end) {
3629 AV1_COMMON *const cm = &pbi->common;
3630 MACROBLOCKD *const xd = &pbi->mb;
3631 struct aom_read_bit_buffer rb;
3632 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
3633 size_t first_partition_size;
3634 YV12_BUFFER_CONFIG *new_fb;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003635 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003636
3637#if CONFIG_ADAPT_SCAN
3638 av1_deliver_eob_threshold(cm, xd);
3639#endif
3640#if CONFIG_BITSTREAM_DEBUG
3641 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
3642#endif
3643
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003644 int i;
3645 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003646 cm->global_motion[i] = default_warp_params;
3647 cm->cur_frame->global_motion[i] = default_warp_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003648 }
3649 xd->global_motion = cm->global_motion;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003650
3651 first_partition_size = read_uncompressed_header(
3652 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
3653
3654#if CONFIG_EXT_TILE
3655 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
3656 // or a section of a frame is not allowed.
3657 if (!cm->single_tile_decoding &&
3658 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
3659 pbi->dec_tile_row = -1;
3660 pbi->dec_tile_col = -1;
3661 }
3662#endif // CONFIG_EXT_TILE
3663
3664 pbi->first_partition_size = first_partition_size;
3665 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
3666 new_fb = get_frame_new_buffer(cm);
3667 xd->cur_buf = new_fb;
3668#if CONFIG_INTRABC
3669#if CONFIG_HIGHBITDEPTH
3670 av1_setup_scale_factors_for_frame(
3671 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3672 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3673 cm->use_highbitdepth);
3674#else
3675 av1_setup_scale_factors_for_frame(
3676 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3677 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
3678#endif // CONFIG_HIGHBITDEPTH
3679#endif // CONFIG_INTRABC
3680
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003681 if (cm->show_existing_frame) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003682 // showing a frame directly
3683 *p_data_end = data + aom_rb_bytes_read(&rb);
3684 return 0;
3685 }
3686
3687 data += aom_rb_bytes_read(&rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003688 if (first_partition_size)
3689 if (!read_is_valid(data, first_partition_size, data_end))
3690 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3691 "Truncated packet or corrupt header length");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003692
3693 cm->setup_mi(cm);
3694
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003695 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3696 // show_exisiting_frame=1, nor can it take a frame not used as
3697 // a reference, it is probable that by the time it is being
3698 // referred to, the frame buffer it originally points to may
3699 // already get expired and have been reassigned to the current
3700 // newly coded frame. Hence, we need to check whether this is
3701 // the case, and if yes, we have 2 choices:
3702 // (1) Simply disable the use of previous frame mvs; or
3703 // (2) Have cm->prev_frame point to one reference frame buffer,
3704 // e.g. LAST_FRAME.
3705 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3706 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3707 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
3708 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
3709 : NULL;
3710 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003711
3712#if CONFIG_TEMPMV_SIGNALING
3713 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3714 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3715 "Frame wrongly requests previous frame MVs");
3716 }
3717#else
3718 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3719#if CONFIG_FRAME_SUPERRES
3720 cm->width == cm->last_width &&
3721 cm->height == cm->last_height &&
3722#else
3723 cm->width == cm->prev_frame->buf.y_crop_width &&
3724 cm->height == cm->prev_frame->buf.y_crop_height &&
3725#endif // CONFIG_FRAME_SUPERRES
3726 !cm->last_intra_only && cm->last_show_frame &&
3727 (cm->last_frame_type != KEY_FRAME);
3728#endif // CONFIG_TEMPMV_SIGNALING
3729
Zoe Liuf704a1c2017-10-02 16:55:59 -07003730#if CONFIG_EXT_SKIP
3731 av1_setup_skip_mode_allowed(cm);
3732#if 0
3733 printf("\nDECODER: Frame=%d, frame_offset=%d, show_frame=%d, "
3734 "is_skip_mode_allowed=%d, ref_frame_idx=(%d,%d)\n",
3735 cm->current_video_frame, cm->frame_offset, cm->show_frame,
3736 cm->is_skip_mode_allowed, cm->ref_frame_idx_0, cm->ref_frame_idx_1);
3737#endif // 0
3738#endif // CONFIG_EXT_SKIP
3739
Jingning Hanea255c92017-09-29 08:12:09 -07003740#if CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003741 av1_setup_motion_field(cm);
Jingning Hanea255c92017-09-29 08:12:09 -07003742#endif // CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003743
3744 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
3745#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3746 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
3747 // use the default frame context values
3748 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3749 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3750 } else {
3751 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
3752 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
3753 }
3754#else
3755 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
3756 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
3757#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3758 if (!cm->fc->initialized)
3759 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3760 "Uninitialized entropy context.");
3761
3762 av1_zero(cm->counts);
3763
3764 xd->corrupted = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003765 if (first_partition_size) {
3766 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
3767 if (new_fb->corrupted)
3768 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3769 "Decode failed. Frame data header is corrupted.");
3770 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003771 return first_partition_size;
3772}
3773
3774void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
3775 const uint8_t *data_end,
3776 const uint8_t **p_data_end, int startTile,
3777 int endTile, int initialize_flag) {
3778 AV1_COMMON *const cm = &pbi->common;
3779 MACROBLOCKD *const xd = &pbi->mb;
3780 int context_updated = 0;
3781
3782#if CONFIG_LOOP_RESTORATION
3783 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3784 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3785 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3786 av1_alloc_restoration_buffers(cm);
3787 }
3788#endif
3789
Cheng Chend8184da2017-09-26 18:15:22 -07003790#if !CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003791 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3792 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
3793 }
3794#endif
3795
3796 // If encoded in frame parallel mode, frame context is ready after decoding
3797 // the frame header.
3798 if (cm->frame_parallel_decode && initialize_flag &&
3799 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
3800 AVxWorker *const worker = pbi->frame_worker_owner;
3801 FrameWorkerData *const frame_worker_data = worker->data1;
3802 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
3803 context_updated = 1;
3804#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3805 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3806#else
3807 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3808#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3809 }
3810 av1_frameworker_lock_stats(worker);
3811 pbi->cur_buf->row = -1;
3812 pbi->cur_buf->col = -1;
3813 frame_worker_data->frame_context_ready = 1;
3814 // Signal the main thread that context is ready.
3815 av1_frameworker_signal_stats(worker);
3816 av1_frameworker_unlock_stats(worker);
3817 }
3818
3819 dec_setup_frame_boundary_info(cm);
3820
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003821#if CONFIG_OBU
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003822 *p_data_end = decode_tiles(pbi, data, data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003823#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003824 *p_data_end =
3825 decode_tiles(pbi, data + pbi->uncomp_hdr_size + pbi->first_partition_size,
3826 data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003827#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003828
3829 if (endTile != cm->tile_rows * cm->tile_cols - 1) {
3830 return;
3831 }
3832
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02003833#if CONFIG_STRIPED_LOOP_RESTORATION
3834 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3835 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3836 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3837 av1_loop_restoration_save_boundary_lines(&pbi->cur_buf->buf, cm);
3838 }
3839#endif
3840
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003841#if CONFIG_CDEF
3842 if (!cm->skip_loop_filter && !cm->all_lossless) {
3843 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
3844 }
3845#endif // CONFIG_CDEF
3846
3847#if CONFIG_FRAME_SUPERRES
3848 superres_post_decode(pbi);
3849#endif // CONFIG_FRAME_SUPERRES
3850
3851#if CONFIG_LOOP_RESTORATION
3852 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3853 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3854 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3855 aom_extend_frame_borders((YV12_BUFFER_CONFIG *)xd->cur_buf);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01003856 av1_loop_restoration_filter_frame((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
3857 cm->rst_info, 7, NULL);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003858 }
3859#endif // CONFIG_LOOP_RESTORATION
3860
3861 if (!xd->corrupted) {
3862 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies4822e142017-10-10 11:30:36 +01003863#if CONFIG_SIMPLE_BWD_ADAPT
3864 const int num_bwd_ctxs = 1;
3865#else
3866 const int num_bwd_ctxs = cm->tile_rows * cm->tile_cols;
3867#endif
3868 FRAME_CONTEXT **tile_ctxs =
3869 aom_malloc(num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx));
3870 aom_cdf_prob **cdf_ptrs = aom_malloc(
3871 num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
3872#if CONFIG_SIMPLE_BWD_ADAPT
3873 make_update_tile_list_dec(pbi, cm->largest_tile_id, num_bwd_ctxs,
3874 tile_ctxs);
3875#else
3876 make_update_tile_list_dec(pbi, 0, num_bwd_ctxs, tile_ctxs);
3877#endif
Angie Chiang85e3b962017-10-01 16:04:43 -07003878#if CONFIG_SYMBOLRATE
3879 av1_dump_symbol_rate(cm);
3880#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003881 av1_adapt_intra_frame_probs(cm);
3882 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003883 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003884 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003885 num_bwd_ctxs);
Debargha Mukherjee43061b32017-10-13 16:50:17 -07003886 av1_average_tile_loopfilter_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
3887 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003888#if CONFIG_ADAPT_SCAN
3889 av1_adapt_scan_order(cm);
3890#endif // CONFIG_ADAPT_SCAN
3891
3892 if (!frame_is_intra_only(cm)) {
3893 av1_adapt_inter_frame_probs(cm);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003894#if !CONFIG_NEW_MULTISYMBOL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003895 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003896#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003897 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies4822e142017-10-10 11:30:36 +01003898 cdf_ptrs, num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003899 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003900 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003901 }
3902 aom_free(tile_ctxs);
3903 aom_free(cdf_ptrs);
3904 } else {
3905 debug_check_frame_counts(cm);
3906 }
3907 } else {
3908 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3909 "Decode failed. Frame data is corrupted.");
3910 }
3911
3912#if CONFIG_INSPECTION
3913 if (pbi->inspect_cb != NULL) {
3914 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
3915 }
3916#endif
3917
3918// Non frame parallel update frame context here.
3919#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3920 if (!context_updated) cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3921#else
3922 if (!cm->error_resilient_mode && !context_updated)
3923 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3924#endif
3925}
3926
3927#if CONFIG_OBU
3928
3929static OBU_TYPE read_obu_header(struct aom_read_bit_buffer *rb,
3930 uint32_t *header_size) {
3931 OBU_TYPE obu_type;
3932 int obu_extension_flag;
3933
3934 *header_size = 1;
3935
Soo-Chul Han38427e82017-09-27 15:06:13 -04003936 // first bit is obu_forbidden_bit (0) according to R19
3937 aom_rb_read_bit(rb);
3938
3939 obu_type = (OBU_TYPE)aom_rb_read_literal(rb, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003940 aom_rb_read_literal(rb, 2); // reserved
3941 obu_extension_flag = aom_rb_read_bit(rb);
3942 if (obu_extension_flag) {
3943 *header_size += 1;
3944 aom_rb_read_literal(rb, 3); // temporal_id
3945 aom_rb_read_literal(rb, 2);
3946 aom_rb_read_literal(rb, 2);
3947 aom_rb_read_literal(rb, 1); // reserved
3948 }
3949
3950 return obu_type;
3951}
3952
3953static uint32_t read_temporal_delimiter_obu() { return 0; }
3954
3955static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
3956 struct aom_read_bit_buffer *rb) {
3957 AV1_COMMON *const cm = &pbi->common;
David Barker5e70a112017-10-03 14:28:17 +01003958 SequenceHeader *const seq_params = &cm->seq_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003959 uint32_t saved_bit_offset = rb->bit_offset;
3960
3961 cm->profile = av1_read_profile(rb);
3962 aom_rb_read_literal(rb, 4); // level
3963
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003964#if CONFIG_FRAME_SIZE
3965 int num_bits_width = aom_rb_read_literal(rb, 4) + 1;
3966 int num_bits_height = aom_rb_read_literal(rb, 4) + 1;
3967 int max_frame_width = aom_rb_read_literal(rb, num_bits_width) + 1;
3968 int max_frame_height = aom_rb_read_literal(rb, num_bits_height) + 1;
3969
3970 seq_params->num_bits_width = num_bits_width;
3971 seq_params->num_bits_height = num_bits_height;
3972 seq_params->max_frame_width = max_frame_width;
3973 seq_params->max_frame_height = max_frame_height;
3974#endif
3975
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003976 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
3977 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003978 // We must always have delta_frame_id_length < frame_id_length,
3979 // in order for a frame to be referenced with a unique delta.
3980 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003981 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003982 seq_params->frame_id_length =
3983 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003984 }
3985
3986 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
3987
3988 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
3989}
3990
3991static uint32_t read_frame_header_obu(AV1Decoder *pbi, const uint8_t *data,
3992 const uint8_t *data_end,
3993 const uint8_t **p_data_end) {
3994 size_t header_size;
3995
3996 header_size =
3997 av1_decode_frame_headers_and_setup(pbi, data, data_end, p_data_end);
3998 return (uint32_t)(pbi->uncomp_hdr_size + header_size);
3999}
4000
4001static uint32_t read_tile_group_header(AV1Decoder *pbi,
4002 struct aom_read_bit_buffer *rb,
4003 int *startTile, int *endTile) {
4004 AV1_COMMON *const cm = &pbi->common;
4005 uint32_t saved_bit_offset = rb->bit_offset;
4006
4007 *startTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4008 *endTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4009
4010 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
4011}
4012
4013static uint32_t read_one_tile_group_obu(AV1Decoder *pbi,
4014 struct aom_read_bit_buffer *rb,
4015 int is_first_tg, const uint8_t *data,
4016 const uint8_t *data_end,
4017 const uint8_t **p_data_end,
4018 int *is_last_tg) {
4019 AV1_COMMON *const cm = &pbi->common;
4020 int startTile, endTile;
4021 uint32_t header_size, tg_payload_size;
4022
4023 header_size = read_tile_group_header(pbi, rb, &startTile, &endTile);
4024 data += header_size;
4025 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, startTile,
4026 endTile, is_first_tg);
4027 tg_payload_size = (uint32_t)(*p_data_end - data);
4028
4029 // TODO(shan): For now, assume all tile groups received in order
4030 *is_last_tg = endTile == cm->tile_rows * cm->tile_cols - 1;
4031
4032 return header_size + tg_payload_size;
4033}
4034
Soo-Chul Han38427e82017-09-27 15:06:13 -04004035static void read_metadata_private_data(const uint8_t *data, uint32_t sz) {
4036 int i;
4037
4038 for (i = 0; i < (int)sz; i++) {
4039 mem_get_le16(data);
4040 data += 2;
4041 }
4042}
4043
4044static void read_metadata_hdr_cll(const uint8_t *data) {
4045 mem_get_le16(data);
4046 mem_get_le16(data + 2);
4047}
4048
4049static void read_metadata_hdr_mdcv(const uint8_t *data) {
4050 int i;
4051
4052 for (i = 0; i < 3; i++) {
4053 mem_get_le16(data);
4054 data += 2;
4055 mem_get_le16(data);
4056 data += 2;
4057 }
4058
4059 mem_get_le16(data);
4060 data += 2;
4061 mem_get_le16(data);
4062 data += 2;
4063 mem_get_le16(data);
4064 data += 2;
4065 mem_get_le16(data);
4066}
4067
4068static uint32_t read_metadata(const uint8_t *data, uint32_t sz) {
4069 METADATA_TYPE metadata_type;
4070
4071 metadata_type = (METADATA_TYPE)mem_get_le16(data);
4072
4073 if (metadata_type == METADATA_TYPE_PRIVATE_DATA) {
4074 read_metadata_private_data(data + 2, sz - 2);
4075 } else if (metadata_type == METADATA_TYPE_HDR_CLL) {
4076 read_metadata_hdr_cll(data + 2);
4077 } else if (metadata_type == METADATA_TYPE_HDR_MDCV) {
4078 read_metadata_hdr_mdcv(data + 2);
4079 }
4080
4081 return sz;
4082}
4083
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004084void av1_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
4085 const uint8_t *data_end,
4086 const uint8_t **p_data_end) {
4087 AV1_COMMON *const cm = &pbi->common;
4088 int frame_decoding_finished = 0;
4089 int is_first_tg_obu_received = 1;
4090 int frame_header_received = 0;
4091 int frame_header_size = 0;
4092
4093 // decode frame as a series of OBUs
4094 while (!frame_decoding_finished && !cm->error.error_code) {
4095 struct aom_read_bit_buffer rb;
4096 uint8_t clear_data[80];
4097 uint32_t obu_size, obu_header_size, obu_payload_size = 0;
4098 OBU_TYPE obu_type;
4099
Soo-Chul Han38427e82017-09-27 15:06:13 -04004100 init_read_bit_buffer(pbi, &rb, data + PRE_OBU_SIZE_BYTES, data_end,
4101 clear_data);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004102
Soo-Chul Han38427e82017-09-27 15:06:13 -04004103// every obu is preceded by PRE_OBU_SIZE_BYTES-byte size of obu (obu header +
4104// payload size)
4105// The obu size is only needed for tile group OBUs
4106#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004107 obu_size = mem_get_le32(data);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004108#else
4109 obu_size = data_end - data;
4110#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004111 obu_type = read_obu_header(&rb, &obu_header_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004112 data += (PRE_OBU_SIZE_BYTES + obu_header_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004113
4114 switch (obu_type) {
4115 case OBU_TD: obu_payload_size = read_temporal_delimiter_obu(); break;
4116 case OBU_SEQUENCE_HEADER:
4117 obu_payload_size = read_sequence_header_obu(pbi, &rb);
4118 break;
4119 case OBU_FRAME_HEADER:
4120 // Only decode first frame header received
4121 if (!frame_header_received) {
4122 frame_header_size = obu_payload_size =
4123 read_frame_header_obu(pbi, data, data_end, p_data_end);
4124 frame_header_received = 1;
4125 } else {
4126 obu_payload_size = frame_header_size;
4127 }
4128 if (cm->show_existing_frame) frame_decoding_finished = 1;
4129 break;
4130 case OBU_TILE_GROUP:
4131 obu_payload_size = read_one_tile_group_obu(
4132 pbi, &rb, is_first_tg_obu_received, data, data + obu_size - 1,
4133 p_data_end, &frame_decoding_finished);
4134 is_first_tg_obu_received = 0;
4135 break;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004136 case OBU_METADATA:
4137 obu_payload_size = read_metadata(data, obu_size);
4138 break;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004139 default: break;
4140 }
4141 data += obu_payload_size;
4142 }
4143}
4144#endif