blob: 0b124b33c4daf25292899b79caa4ec14be593e77 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070022#include "aom_dsp/binary_codes_reader.h"
Jingning Han1aab8182016-06-03 11:09:06 -070023#include "aom_dsp/bitreader.h"
24#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
Angie Chiang85e3b962017-10-01 16:04:43 -070067#include "av1/decoder/symbolrate.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Yue Chen69f18e12016-09-08 14:48:15 -070069#include "av1/common/warped_motion.h"
Yue Chen69f18e12016-09-08 14:48:15 -070070
Yaowu Xuf883b422016-08-30 14:01:10 -070071#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070072#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070073
Luc Trudeaue3980282017-04-25 23:17:21 -040074#if CONFIG_CFL
75#include "av1/common/cfl.h"
76#endif
77
Ola Hugosson1e7f2d02017-09-22 21:36:26 +020078#if CONFIG_STRIPED_LOOP_RESTORATION && !CONFIG_LOOP_RESTORATION
79#error "striped_loop_restoration requires loop_restoration"
80#endif
81
Rupert Swarbrick6c545212017-09-01 17:17:25 +010082#if CONFIG_LOOP_RESTORATION
83static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
84 MACROBLOCKD *xd,
85 aom_reader *const r, int plane,
86 int rtile_idx);
87#endif
88
Thomas Davies80188d12016-10-26 16:08:35 -070089static struct aom_read_bit_buffer *init_read_bit_buffer(
90 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
91 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
92static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
93 size_t partition_size);
94static size_t read_uncompressed_header(AV1Decoder *pbi,
95 struct aom_read_bit_buffer *rb);
96
Yaowu Xuf883b422016-08-30 14:01:10 -070097static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -070098#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +020099 return !frame_is_intra_only(cm);
100#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101 int i;
102 if (frame_is_intra_only(cm)) return 0;
103 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
104 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
105
106 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700107#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108}
109
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111 cm->comp_fwd_ref[0] = LAST_FRAME;
112 cm->comp_fwd_ref[1] = LAST2_FRAME;
113 cm->comp_fwd_ref[2] = LAST3_FRAME;
114 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
115
116 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700117 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
118 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119}
120
121static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
122 return len != 0 && len <= (size_t)(end - start);
123}
124
Yaowu Xuf883b422016-08-30 14:01:10 -0700125static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
126 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700127 return data > max ? max : data;
128}
129
Thomas Daedef636d5c2017-06-29 13:48:27 -0700130static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800131#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800132 TX_MODE tx_mode;
133#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700134 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400135#if CONFIG_VAR_TX_NO_TX_MODE
136 (void)rb;
137 return TX_MODE_SELECT;
138#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800139#if CONFIG_TX64X64
140 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800141 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
142 return tx_mode;
143#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800145#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400146#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147}
148
Thomas Davies2e868ab2017-10-24 10:42:27 +0100149#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700150static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700151 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700153 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700154 for (i = 0; i < GLOBALMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700155 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700157 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700159 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160}
Thomas Davies149eda52017-06-12 18:11:55 +0100161#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800166#if CONFIG_REF_ADAPT
167 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
168#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700169 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800172#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700173 } else {
174 return SINGLE_REFERENCE;
175 }
176}
177
Thomas Davies2e868ab2017-10-24 10:42:27 +0100178#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700179static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100181 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182
183 if (cm->reference_mode == REFERENCE_MODE_SELECT)
184 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700185 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700186
187 if (cm->reference_mode != COMPOUND_REFERENCE) {
188 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100189 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700191 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192 }
193 }
194 }
195
196 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700197#if CONFIG_EXT_COMP_REFS
198 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
199 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
200
Thomas Davies894cc812017-06-22 17:51:33 +0100201 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
202 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700203 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
204 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100205 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700206#endif // CONFIG_EXT_COMP_REFS
207
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100209 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700211 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700213 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 }
215 }
216}
217
Yaowu Xuf883b422016-08-30 14:01:10 -0700218static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700220 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221}
222
Yaowu Xuf883b422016-08-30 14:01:10 -0700223static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100224 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 if (allow_hp) {
226 for (i = 0; i < 2; ++i) {
227 nmv_component *const comp_ctx = &ctx->comps[i];
228 update_mv_probs(&comp_ctx->class0_hp, 1, r);
229 update_mv_probs(&comp_ctx->hp, 1, r);
230 }
231 }
232}
Thomas Davies599395e2017-07-21 18:02:48 +0100233#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700234
235static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu432012f2017-08-17 14:39:29 -0700236#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700237 PREDICTION_MODE mode,
238#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239 const TX_TYPE tx_type,
240 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700241 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700242 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700243 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700244 av1_inverse_transform_block(xd, dqcoeff,
Lester Lu432012f2017-08-17 14:39:29 -0700245#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700246 mode,
247#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700248#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
249 xd->mrc_mask,
250#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +0100251 plane, tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700252 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253}
254
Lester Lu9c521922017-07-11 11:16:45 -0700255static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
256 const int bsize = xd->mi[0]->mbmi.sb_type;
257 const struct macroblockd_plane *pd = &xd->plane[plane];
Lester Lu9c521922017-07-11 11:16:45 -0700258 const BLOCK_SIZE plane_bsize =
259 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Lester Lu9c521922017-07-11 11:16:45 -0700260 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700261 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700262 const uint8_t txh_unit = tx_size_high_unit[tx_size];
263 return row * max_blocks_wide + col * txh_unit;
264}
265
Alex Converse8aca36d2017-01-31 12:33:15 -0800266static void predict_and_reconstruct_intra_block(
267 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
268 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500269 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700270 const int block_idx = get_block_idx(xd, plane, row, col);
David Barker761b1ac2017-09-25 11:23:03 +0100271 av1_predict_intra_block_facade(cm, xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272
273 if (!mbmi->skip) {
Angie Chiang3d005e42017-04-02 16:31:35 -0700274 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700275#if CONFIG_LV_MAP
276 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700277 int eob;
278 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700279 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700280 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700281 const TX_TYPE tx_type =
282 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700283#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700284 const TX_TYPE tx_type =
285 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700286 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700287 int16_t max_scan_line = 0;
288 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700289 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700290 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700291#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700292 if (eob) {
293 uint8_t *dst =
294 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700295 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700296#if CONFIG_LGT_FROM_PRED
Lester Lu918fe692017-08-17 14:39:29 -0700297 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700298#endif
Hui Su400bf652017-08-15 15:42:19 -0700299 tx_type, tx_size, dst, pd->dst.stride,
300 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700301 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400303#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400304 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400305 cfl_store_tx(xd, row, col, tx_size, mbmi->sb_type);
Luc Trudeaue3980282017-04-25 23:17:21 -0400306 }
Sebastien Alaiwanc4559ca2017-09-27 09:47:30 +0200307#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308}
309
Angie Chiangff6d8902016-10-21 11:02:09 -0700310static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
311 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700312 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700313 int blk_row, int blk_col, int block,
314 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700315 const struct macroblockd_plane *const pd = &xd->plane[plane];
316 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
317 const int tx_row = blk_row >> (1 - pd->subsampling_y);
318 const int tx_col = blk_col >> (1 - pd->subsampling_x);
319 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700320 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700322 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700323 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
324 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700325
326 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
327
328 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500329 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700330#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700331 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700332 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700333 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700334 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700335 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700336 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700337 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700338#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700339 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700340 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700341 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700342 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700343 const int eob = av1_decode_block_tokens(
344 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
345 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700346#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700347 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700348#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700349 mbmi->mode,
350#endif
351 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800352 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
353 << tx_size_wide_log2[0]],
354 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700355 *eob_total += eob;
356 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700357#if CONFIG_RECT_TX_EXT
358 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
359 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
360 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
361#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700362 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Urvang Joshidff57e02017-09-29 11:15:48 -0700363 assert(IMPLIES(tx_size <= TX_4X4, sub_txs == tx_size));
364 assert(IMPLIES(tx_size > TX_4X4, sub_txs < tx_size));
Yue Chend6bdd462017-07-19 16:05:43 -0700365#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700366 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700367 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368 int i;
369
370 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700371
372 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700373#if CONFIG_RECT_TX_EXT
374 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
375 const int offsetr =
376 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
377 : blk_row + ((i >> 1) * bsl);
378 const int offsetc =
379 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
380 : blk_col + (i & 0x01) * bsl;
381#else
Jingning Han5f614262016-10-27 14:27:43 -0700382 const int offsetr = blk_row + (i >> 1) * bsl;
383 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700384#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700385
386 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
387
Jingning Han8fd62b72016-10-21 12:55:54 -0700388 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700389 offsetc, block, sub_txs, eob_total);
390 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700391 }
392 }
393}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394
Angie Chiang44701f22017-02-27 10:36:44 -0800395static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
396 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
397 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700398 const int offset = mi_row * cm->mi_stride + mi_col;
399 int x, y;
400 const TileInfo *const tile = &xd->tile;
401
402 xd->mi = cm->mi_grid_visible + offset;
403 xd->mi[0] = &cm->mi[offset];
404 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
405 // passing bsize from decode_partition().
406 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700407#if CONFIG_RD_DEBUG
408 xd->mi[0]->mbmi.mi_row = mi_row;
409 xd->mi[0]->mbmi.mi_col = mi_col;
410#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400411#if CONFIG_CFL
412 xd->cfl->mi_row = mi_row;
413 xd->cfl->mi_col = mi_col;
414#endif
Yunqing Wangb90a97a2017-10-24 11:50:15 -0700415
416 assert(x_mis && y_mis);
417 for (x = 1; x < x_mis; ++x) xd->mi[x] = xd->mi[0];
418 int idx = cm->mi_stride;
419 for (y = 1; y < y_mis; ++y) {
420 memcpy(&xd->mi[idx], &xd->mi[0], x_mis * sizeof(xd->mi[0]));
421 idx += cm->mi_stride;
422 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423
Jingning Hanfaad0e12016-12-07 10:54:57 -0800424 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700425 set_skip_context(xd, mi_row, mi_col);
426
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700427 // Distance of Mb to the various image edges. These are specified to 8th pel
428 // as they are always compared to values that are in 1/8th pel units
429 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800430#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700431 cm->dependent_horz_tiles,
432#endif // CONFIG_DEPENDENT_HORZTILES
433 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700434
Jingning Han91d9a792017-04-18 12:01:52 -0700435 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
436 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437}
438
Yue Chen64550b62017-01-12 12:18:22 -0800439static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800440 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800442 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700443#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800444 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700445 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -0800446 const int bw = mi_size_wide[bsize];
447 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700448 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
449 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -0400450
Michael Bebenita6048d052016-08-25 14:40:54 -0700451#if CONFIG_ACCOUNTING
452 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
453#endif
Yue Chen64550b62017-01-12 12:18:22 -0800454 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700455#if CONFIG_EXT_PARTITION_TYPES
456 xd->mi[0]->mbmi.partition = partition;
457#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700458 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700459 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
460 const BLOCK_SIZE uv_subsize =
461 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
462 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -0700463 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464 "Invalid block size.");
465 }
466
Angie Chiangd0916d92017-03-10 17:54:18 -0800467 int reader_corrupted_flag = aom_reader_has_error(r);
468 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -0800469}
470
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700471#if CONFIG_NCOBMC_ADAPT_WEIGHT
472static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
473 int mi_row, int mi_col) {
474 const int offset = mi_row * cm->mi_stride + mi_col;
475 xd->mi = cm->mi_grid_visible + offset;
476 xd->mi[0] = &cm->mi[offset];
477}
478
479static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
480 int mi_col, int bsize, int mode) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700481 uint8_t *pred_buf[4][MAX_MB_PLANE];
482 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
483 // target block in pxl
484 int pxl_row = mi_row << MI_SIZE_LOG2;
485 int pxl_col = mi_col << MI_SIZE_LOG2;
486
487 int plane;
488#if CONFIG_HIGHBITDEPTH
489 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
490 int len = sizeof(uint16_t);
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700491 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE,
492 len);
493 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE,
494 len);
495 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE,
496 len);
497 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE,
498 len);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700499 } else {
500#endif // CONFIG_HIGHBITDEPTH
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700501 ASSIGN_ALIGNED_PTRS(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE);
502 ASSIGN_ALIGNED_PTRS(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE);
503 ASSIGN_ALIGNED_PTRS(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE);
504 ASSIGN_ALIGNED_PTRS(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700505#if CONFIG_HIGHBITDEPTH
506 }
507#endif
508 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
509 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
510 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
511 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
512 pred_stride, mode);
513 }
514}
515
516static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
517 int bsize, const int mi_row, const int mi_col,
518 const NCOBMC_MODE modes) {
519 const int mi_width = mi_size_wide[bsize];
520 const int mi_height = mi_size_high[bsize];
521
522 assert(bsize >= BLOCK_8X8);
523
524 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
525 cm->mi_cols);
526 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
527}
528
529static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
530 MACROBLOCKD *const xd, int mi_row,
531 int mi_col, BLOCK_SIZE bsize) {
532 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
533 const int mi_width = mi_size_wide[bsize];
534 const int mi_height = mi_size_high[bsize];
535 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
536 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
537 if (mi_width > mi_height) {
538 // horizontal partition
539 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
540 xd->mi += hbs;
541 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
542 mbmi->ncobmc_mode[1]);
543 } else if (mi_height > mi_width) {
544 // vertical partition
545 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
546 xd->mi += hbs * xd->mi_stride;
547 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
548 mbmi->ncobmc_mode[1]);
549 } else {
550 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
551 }
552 set_mode_info_offsets(cm, xd, mi_row, mi_col);
553 // restore dst buffer and mode info
554 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
555 mi_col);
556}
557#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
558
Yue Chen64550b62017-01-12 12:18:22 -0800559static void decode_token_and_recon_block(AV1Decoder *const pbi,
560 MACROBLOCKD *const xd, int mi_row,
561 int mi_col, aom_reader *r,
562 BLOCK_SIZE bsize) {
563 AV1_COMMON *const cm = &pbi->common;
564 const int bw = mi_size_wide[bsize];
565 const int bh = mi_size_high[bsize];
566 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
567 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -0800568
Angie Chiang44701f22017-02-27 10:36:44 -0800569 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
570 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Hui Su9fa96232017-10-23 15:46:04 -0700571#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400572 CFL_CTX *const cfl = xd->cfl;
573 cfl->is_chroma_reference = is_chroma_reference(
574 mi_row, mi_col, bsize, cfl->subsampling_x, cfl->subsampling_y);
Hui Su9fa96232017-10-23 15:46:04 -0700575#endif // CONFIG_CFL
Yue Chen19e7aa82016-11-30 14:05:39 -0800576
Arild Fuldseth07441162016-08-15 15:07:52 +0200577 if (cm->delta_q_present_flag) {
578 int i;
579 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -0700580#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -0700581 const int current_qindex =
582 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -0700583#else
Cheng Chen49d30e62017-08-28 20:59:27 -0700584 const int current_qindex = xd->current_qindex;
585#endif // CONFIG_EXT_DELTA_Q
586 int j;
587 for (j = 0; j < MAX_MB_PLANE; ++j) {
588 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
589 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
590
591 xd->plane[j].seg_dequant[i][0] =
592 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
593 xd->plane[j].seg_dequant[i][1] =
594 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
595 }
Arild Fuldseth07441162016-08-15 15:07:52 +0200596 }
597 }
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -0700598 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Hand39cc722016-12-02 14:03:26 -0800599
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600 if (!is_inter_block(mbmi)) {
601 int plane;
Yushin Choa8810392017-09-06 15:16:14 -0700602
Yaowu Xuc27fc142016-08-22 16:08:15 -0700603 for (plane = 0; plane <= 1; ++plane) {
604 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -0700605 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606 }
Yushin Choa8810392017-09-06 15:16:14 -0700607
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
609 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -0700610 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -0700611 const int stepr = tx_size_high_unit[tx_size];
612 const int stepc = tx_size_wide_unit[tx_size];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800613 const BLOCK_SIZE plane_bsize =
614 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700615 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -0800616 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
617 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hand3a64432017-04-06 17:04:17 -0700618 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
619 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800620 continue;
Jingning Han5b701742017-07-19 14:39:07 -0700621 int blk_row, blk_col;
622 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
623 int mu_blocks_wide =
624 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
625 int mu_blocks_high =
626 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
627 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
628 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800629
Jingning Han5b701742017-07-19 14:39:07 -0700630 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -0400631 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -0700632 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -0700633 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
634
635 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
636 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
637 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
638 blk_row, blk_col, tx_size);
639 }
640 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 }
642 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -0800643 int ref;
644
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200645#if CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700646 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
647#else
648 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200649#endif // CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700650 {
Zoe Liu85b66462017-04-20 14:28:19 -0700651 const MV_REFERENCE_FRAME frame =
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200652#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700653 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
654#else
Yushin Cho127c5832017-07-28 16:39:04 -0700655 mbmi->ref_frame[ref];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200656#endif // CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -0700657 if (frame < LAST_FRAME) {
658#if CONFIG_INTRABC
659 assert(is_intrabc_block(mbmi));
660 assert(frame == INTRA_FRAME);
661 assert(ref == 0);
662#else
663 assert(0);
664#endif // CONFIG_INTRABC
665 } else {
666 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -0800667
Alex Converse28744302017-04-13 14:46:22 -0700668 xd->block_refs[ref] = ref_buf;
669 if ((!av1_is_valid_scale(&ref_buf->sf)))
670 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
671 "Reference frame has invalid dimensions");
672 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
673 &ref_buf->sf);
674 }
Yue Chen9ab6d712017-01-12 15:50:46 -0800675 }
Yue Chen69f18e12016-09-08 14:48:15 -0700676
Jingning Hanc44009c2017-05-06 11:36:49 -0700677 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Sarah Parker4c10a3c2017-04-10 19:37:59 -0700678
Yue Chencb60b182016-10-13 15:18:22 -0700679 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -0800680#if CONFIG_NCOBMC
681 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
682#else
Yue Chen894fcce2016-10-21 16:50:52 -0700683 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -0800684#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700685 }
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700686#if CONFIG_NCOBMC_ADAPT_WEIGHT
687 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
688 int plane;
689 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
690 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
691 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
692 }
693 }
694#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700695 // Reconstruction
696 if (!mbmi->skip) {
697 int eobtotal = 0;
698 int plane;
699
700 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
701 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800702 const BLOCK_SIZE plane_bsize =
703 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Hanbafee8d2016-12-02 10:25:03 -0800704 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
705 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700706 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800707
Jingning Hand3a64432017-04-06 17:04:17 -0700708 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
709 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800710 continue;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800711
Jingning Hanc2b797f2017-07-19 09:37:11 -0700712 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
713 int mu_blocks_wide =
714 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
715 int mu_blocks_high =
716 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
717
718 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
719 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
720
Rupert Swarbrick4e7b7d62017-09-28 17:30:44 +0100721 const TX_SIZE max_tx_size = get_vartx_max_txsize(
722 mbmi, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hanf64062f2016-11-02 16:22:18 -0700723 const int bh_var_tx = tx_size_high_unit[max_tx_size];
724 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -0700725 int block = 0;
726 int step =
727 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -0700728
729 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
730 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
731 int blk_row, blk_col;
732 const int unit_height =
733 AOMMIN(mu_blocks_high + row, max_blocks_high);
734 const int unit_width =
735 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
736 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
737 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
738 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
739 blk_row, blk_col, block, max_tx_size,
740 &eobtotal);
741 block += step;
742 }
743 }
Jingning Hana65f3052017-06-23 10:52:05 -0700744 }
745 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700746 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700747 }
748 }
Hui Su9fa96232017-10-23 15:46:04 -0700749#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400750 if (mbmi->uv_mode != UV_CFL_PRED) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400751 if (!cfl->is_chroma_reference && is_inter_block(mbmi)) {
752 cfl_store_block(xd, mbmi->sb_type, mbmi->tx_size);
753 }
754 }
Hui Su9fa96232017-10-23 15:46:04 -0700755#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756
Angie Chiangd0916d92017-03-10 17:54:18 -0800757 int reader_corrupted_flag = aom_reader_has_error(r);
758 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759}
760
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100761#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -0800762static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
763 int mi_row, int mi_col, aom_reader *r,
764 BLOCK_SIZE bsize) {
765 AV1_COMMON *const cm = &pbi->common;
766 const int hbs = mi_size_wide[bsize] >> 1;
Yue Chen9ab6d712017-01-12 15:50:46 -0800767#if CONFIG_EXT_PARTITION_TYPES
768 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
769#endif
770 PARTITION_TYPE partition;
771 BLOCK_SIZE subsize;
772 const int has_rows = (mi_row + hbs) < cm->mi_rows;
773 const int has_cols = (mi_col + hbs) < cm->mi_cols;
774
775 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
776
777 partition = get_partition(cm, mi_row, mi_col, bsize);
778 subsize = subsize_lookup[partition][bsize];
779
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700780 switch (partition) {
781 case PARTITION_NONE:
782 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
783 break;
784 case PARTITION_HORZ:
785 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
786 if (has_rows)
787 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
788 break;
789 case PARTITION_VERT:
790 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
791 if (has_cols)
792 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
793 break;
794 case PARTITION_SPLIT:
795 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
796 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
797 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
798 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
799 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800800#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100801#if CONFIG_EXT_PARTITION_TYPES_AB
802#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
803#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700804 case PARTITION_HORZ_A:
805 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
806 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
807 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
808 break;
809 case PARTITION_HORZ_B:
810 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
811 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
812 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
813 bsize2);
814 break;
815 case PARTITION_VERT_A:
816 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
817 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
818 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
819 break;
820 case PARTITION_VERT_B:
821 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
822 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
823 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
824 bsize2);
825 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800826#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700827 default: assert(0 && "Invalid partition type");
Yue Chen9ab6d712017-01-12 15:50:46 -0800828 }
829}
830#endif
831
Yue Chen64550b62017-01-12 12:18:22 -0800832static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800833 int mi_row, int mi_col, aom_reader *r,
834#if CONFIG_EXT_PARTITION_TYPES
835 PARTITION_TYPE partition,
836#endif // CONFIG_EXT_PARTITION_TYPES
837 BLOCK_SIZE bsize) {
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200838 decode_mbmi_block(pbi, xd, mi_row, mi_col, r,
Yue Chen64550b62017-01-12 12:18:22 -0800839#if CONFIG_EXT_PARTITION_TYPES
840 partition,
841#endif
842 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -0700843
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100844#if !(NC_MODE_INFO)
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200845 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -0800846#endif
Yue Chen64550b62017-01-12 12:18:22 -0800847}
848
Yaowu Xuf883b422016-08-30 14:01:10 -0700849static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
850 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700851 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -0800852 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -0800853#if CONFIG_UNPOISON_PARTITION_CTX
854 const int ctx =
855 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800856#else
Jingning Han1beb0102016-12-07 11:08:30 -0800857 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800858#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +0000860 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
861 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862
Jingning Han5fe79db2017-03-27 15:10:30 -0700863 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -0700864
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100865 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100867 const int num_partition_types =
868 (mi_width_log2_lookup[bsize] > mi_width_log2_lookup[BLOCK_8X8])
869 ? EXT_PARTITION_TYPES
870 : PARTITION_TYPES;
Alex Converse57795a42017-03-14 12:18:25 -0700871#else
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100872 const int num_partition_types = PARTITION_TYPES;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100874 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, num_partition_types,
875 ACCT_STR);
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100876 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700877 assert(bsize > BLOCK_8X8);
878 aom_cdf_prob cdf[2];
879 partition_gather_vert_alike(cdf, partition_cdf);
880 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
881 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
882 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100883 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700884 assert(bsize > BLOCK_8X8);
885 aom_cdf_prob cdf[2];
886 partition_gather_horz_alike(cdf, partition_cdf);
887 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
888 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100889 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100891 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 return p;
894}
895
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -0700897static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700898 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -0700899 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700900 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -0800901 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902 const int hbs = num_8x8_wh >> 1;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100903#if CONFIG_EXT_PARTITION_TYPES && CONFIG_EXT_PARTITION_TYPES_AB
904 const int qbs = num_8x8_wh >> 2;
905#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700906 PARTITION_TYPE partition;
907 BLOCK_SIZE subsize;
908#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick93c39e92017-07-12 11:11:02 +0100909 const int quarter_step = num_8x8_wh / 4;
910 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100911#if !CONFIG_EXT_PARTITION_TYPES_AB
912 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
913#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700914#endif
915 const int has_rows = (mi_row + hbs) < cm->mi_rows;
916 const int has_cols = (mi_col + hbs) < cm->mi_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700917
918 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
919
Jingning Hancd959762017-03-27 14:49:59 -0700920 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
921 : read_partition(cm, xd, mi_row, mi_col, r,
922 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700924
Rupert Swarbrick415c8f12017-10-09 16:26:23 +0100925 // Check the bitstream is conformant: if there is subsampling on the
926 // chroma planes, subsize must subsample to a valid block size.
927 const struct macroblockd_plane *const pd_u = &xd->plane[1];
928 if (get_plane_block_size(subsize, pd_u) == BLOCK_INVALID) {
929 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
930 "Block size %dx%d invalid with this subsampling mode",
931 block_size_wide[subsize], block_size_high[subsize]);
932 }
933
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100934#define DEC_BLOCK_STX_ARG
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100935#if CONFIG_EXT_PARTITION_TYPES
936#define DEC_BLOCK_EPT_ARG partition,
937#else
938#define DEC_BLOCK_EPT_ARG
939#endif
940#define DEC_BLOCK(db_r, db_c, db_subsize) \
941 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
942 DEC_BLOCK_EPT_ARG(db_subsize))
943#define DEC_PARTITION(db_r, db_c, db_subsize) \
944 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
945
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700946 switch (partition) {
947 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
948 case PARTITION_HORZ:
949 DEC_BLOCK(mi_row, mi_col, subsize);
950 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
951 break;
952 case PARTITION_VERT:
953 DEC_BLOCK(mi_row, mi_col, subsize);
954 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
955 break;
956 case PARTITION_SPLIT:
957 DEC_PARTITION(mi_row, mi_col, subsize);
958 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
959 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
960 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
961 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700962#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100963#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700964 case PARTITION_HORZ_A:
965 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
966 DEC_BLOCK(mi_row + qbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
967 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
968 break;
969 case PARTITION_HORZ_B:
970 DEC_BLOCK(mi_row, mi_col, subsize);
971 DEC_BLOCK(mi_row + hbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
972 if (mi_row + 3 * qbs < cm->mi_rows)
973 DEC_BLOCK(mi_row + 3 * qbs, mi_col,
974 get_subsize(bsize, PARTITION_HORZ_4));
975 break;
976 case PARTITION_VERT_A:
977 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_VERT_4));
978 DEC_BLOCK(mi_row, mi_col + qbs, get_subsize(bsize, PARTITION_VERT_4));
979 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
980 break;
981 case PARTITION_VERT_B:
982 DEC_BLOCK(mi_row, mi_col, subsize);
983 DEC_BLOCK(mi_row, mi_col + hbs, get_subsize(bsize, PARTITION_VERT_4));
984 if (mi_col + 3 * qbs < cm->mi_cols)
985 DEC_BLOCK(mi_row, mi_col + 3 * qbs,
986 get_subsize(bsize, PARTITION_VERT_4));
987 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100988#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700989 case PARTITION_HORZ_A:
990 DEC_BLOCK(mi_row, mi_col, bsize2);
991 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
992 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
993 break;
994 case PARTITION_HORZ_B:
995 DEC_BLOCK(mi_row, mi_col, subsize);
996 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
997 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
998 break;
999 case PARTITION_VERT_A:
1000 DEC_BLOCK(mi_row, mi_col, bsize2);
1001 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1002 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1003 break;
1004 case PARTITION_VERT_B:
1005 DEC_BLOCK(mi_row, mi_col, subsize);
1006 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
1007 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1008 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001009#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001010 case PARTITION_HORZ_4:
1011 for (i = 0; i < 4; ++i) {
1012 int this_mi_row = mi_row + i * quarter_step;
1013 if (i > 0 && this_mi_row >= cm->mi_rows) break;
1014 DEC_BLOCK(this_mi_row, mi_col, subsize);
1015 }
1016 break;
1017 case PARTITION_VERT_4:
1018 for (i = 0; i < 4; ++i) {
1019 int this_mi_col = mi_col + i * quarter_step;
1020 if (i > 0 && this_mi_col >= cm->mi_cols) break;
1021 DEC_BLOCK(mi_row, this_mi_col, subsize);
1022 }
1023 break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001024#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001025 default: assert(0 && "Invalid partition type");
Yaowu Xuc27fc142016-08-22 16:08:15 -07001026 }
1027
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001028#undef DEC_PARTITION
1029#undef DEC_BLOCK
1030#undef DEC_BLOCK_EPT_ARG
1031#undef DEC_BLOCK_STX_ARG
1032
Yaowu Xuc27fc142016-08-22 16:08:15 -07001033#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07001034 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001035#else
1036 // update partition context
1037 if (bsize >= BLOCK_8X8 &&
1038 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08001039 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01001040#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07001041
Cheng Chenf572cd32017-08-25 18:34:51 -07001042#if CONFIG_LPF_SB
1043 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001044 int filt_lvl;
1045 if (mi_row == 0 && mi_col == 0) {
1046 filt_lvl = aom_read_literal(r, 6, ACCT_STR);
Cheng Chen41d37c22017-09-08 19:00:21 -07001047 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
1048 cm->mi_grid_visible[0]->mbmi.delta = 0;
1049 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07001050 } else {
1051 int prev_mi_row, prev_mi_col;
1052 if (mi_col - MAX_MIB_SIZE < 0) {
1053 prev_mi_row = mi_row - MAX_MIB_SIZE;
1054 prev_mi_col = mi_col;
1055 } else {
1056 prev_mi_row = mi_row;
1057 prev_mi_col = mi_col - MAX_MIB_SIZE;
1058 }
Cheng Chenc7855b12017-09-05 10:49:08 -07001059
Cheng Chen41d37c22017-09-08 19:00:21 -07001060 MB_MODE_INFO *curr_mbmi =
1061 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
1062 MB_MODE_INFO *prev_mbmi =
1063 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
1064 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07001065
Cheng Chen41d37c22017-09-08 19:00:21 -07001066 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
1067 const int reuse_prev_lvl = aom_read_symbol(
1068 r, xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2, ACCT_STR);
1069 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
1070
Cheng Chenc7855b12017-09-05 10:49:08 -07001071 if (reuse_prev_lvl) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001072 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001073 curr_mbmi->delta = 0;
1074 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001075 } else {
Cheng Chen41d37c22017-09-08 19:00:21 -07001076 const int delta_ctx = prev_mbmi->delta;
1077 unsigned int delta = aom_read_symbol(
1078 r, xd->tile_ctx->lpf_delta_cdf[delta_ctx], DELTA_RANGE, ACCT_STR);
1079 curr_mbmi->delta = delta;
Cheng Chenf89ca3e2017-09-07 14:47:47 -07001080 delta *= LPF_STEP;
Cheng Chenc7855b12017-09-05 10:49:08 -07001081
1082 if (delta) {
Cheng Chen41d37c22017-09-08 19:00:21 -07001083 const int sign_ctx = prev_mbmi->sign;
1084 const int sign = aom_read_symbol(
1085 r, xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2, ACCT_STR);
1086 curr_mbmi->sign = sign;
Cheng Chenc7855b12017-09-05 10:49:08 -07001087 filt_lvl = sign ? prev_lvl + delta : prev_lvl - delta;
1088 } else {
1089 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001090 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001091 }
Cheng Chena4b27de2017-08-31 16:05:19 -07001092 }
1093 }
Cheng Chen5589d712017-09-05 12:03:25 -07001094
1095 av1_loop_filter_sb_level_init(cm, mi_row, mi_col, filt_lvl);
Cheng Chenf572cd32017-08-25 18:34:51 -07001096 }
1097#endif
1098
Jean-Marc Valin01435132017-02-18 14:12:53 -05001099#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07001100 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07001101 int width_step = mi_size_wide[BLOCK_64X64];
1102 int height_step = mi_size_wide[BLOCK_64X64];
1103 int w, h;
1104 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
1105 h += height_step) {
1106 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
1107 w += width_step) {
1108 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
1109 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1110 ->mbmi.cdef_strength =
1111 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
1112 else
1113 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1114 ->mbmi.cdef_strength = -1;
1115 }
Yaowu Xud71be782016-10-14 08:47:03 -07001116 }
1117 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05001118#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001119#if CONFIG_LOOP_RESTORATION
1120 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001121 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001122 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1123 &rcol0, &rcol1, &rrow0, &rrow1,
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001124 &tile_tl_idx)) {
1125 const int rstride = cm->rst_info[plane].horz_units_per_tile;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001126 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1127 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001128 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001129 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
1130 }
1131 }
1132 }
1133 }
1134#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001135}
1136
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
1138 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07001139 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08001140 aom_reader *r,
1141#if CONFIG_ANS && ANS_MAX_SYMBOLS
1142 int window_size,
1143#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
1144 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001145 // Validate the calculated partition length. If the buffer
1146 // described by the partition can't be fully read, then restrict
1147 // it to the portion that can be (for EC mode) or throw an error.
1148 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001149 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001150 "Truncated packet or corrupt tile length");
1151
Alex Converse2cdf0d82016-12-13 13:53:09 -08001152#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08001153 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08001154#endif
Alex Converse346440b2017-01-03 13:47:37 -08001155 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07001156 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001157 "Failed to allocate bool decoder %d", 1);
1158}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001159
Yaowu Xuf883b422016-08-30 14:01:10 -07001160static void setup_segmentation(AV1_COMMON *const cm,
1161 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001162 struct segmentation *const seg = &cm->seg;
1163 int i, j;
1164
1165 seg->update_map = 0;
1166 seg->update_data = 0;
Ryandd8df162017-09-27 15:40:13 -07001167 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168
Yaowu Xuf883b422016-08-30 14:01:10 -07001169 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001170 if (!seg->enabled) return;
1171
1172 // Segmentation map update
1173 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1174 seg->update_map = 1;
1175 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001176 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001177 }
1178 if (seg->update_map) {
1179 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1180 seg->temporal_update = 0;
1181 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001182 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183 }
1184 }
1185
1186 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07001187 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001188 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001189 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001190
Yaowu Xuf883b422016-08-30 14:01:10 -07001191 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001192
1193 for (i = 0; i < MAX_SEGMENTS; i++) {
1194 for (j = 0; j < SEG_LVL_MAX; j++) {
1195 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07001196 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001197 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001198 av1_enable_segfeature(seg, i, j);
1199 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
1200 if (av1_is_segfeature_signed(j))
1201 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001202 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001203 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001204 }
1205 }
1206 }
1207}
1208
1209#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001210static void decode_restoration_mode(AV1_COMMON *cm,
1211 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001212 int p;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001213 RestorationInfo *rsi;
1214 for (p = 0; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001215 rsi = &cm->rst_info[p];
1216 if (aom_rb_read_bit(rb)) {
1217 rsi->frame_restoration_type =
1218 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
1219 } else {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001220 rsi->frame_restoration_type =
1221 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001222 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001223 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001224 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1225 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1226 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001227 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
1228 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1229 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001230 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1231 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1232 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001233 rsi = &cm->rst_info[0];
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001234 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
1235 if (rsi->restoration_unit_size != (RESTORATION_TILESIZE_MAX >> 2)) {
1236 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001237 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001238 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001239 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
1240 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1241 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001242 cm->rst_info[1].restoration_unit_size =
1243 cm->rst_info[0].restoration_unit_size >> (aom_rb_read_bit(rb) * s);
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001244 } else {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001245 cm->rst_info[1].restoration_unit_size =
1246 cm->rst_info[0].restoration_unit_size;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001247 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001248 cm->rst_info[2].restoration_unit_size = cm->rst_info[1].restoration_unit_size;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001249}
1250
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001251static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001252 WienerInfo *ref_wiener_info, aom_reader *rb) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001253 memset(wiener_info->vfilter, 0, sizeof(wiener_info->vfilter));
1254 memset(wiener_info->hfilter, 0, sizeof(wiener_info->hfilter));
1255
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001256 if (wiener_win == WIENER_WIN)
1257 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
1258 aom_read_primitive_refsubexpfin(
1259 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1260 WIENER_FILT_TAP0_SUBEXP_K,
1261 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1262 WIENER_FILT_TAP0_MINV;
1263 else
1264 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001265 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001266 aom_read_primitive_refsubexpfin(
1267 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1268 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001269 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001270 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001271 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001272 aom_read_primitive_refsubexpfin(
1273 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1274 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001275 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001276 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001277 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001278 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001279 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
1280 wiener_info->vfilter[2]);
1281
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001282 if (wiener_win == WIENER_WIN)
1283 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
1284 aom_read_primitive_refsubexpfin(
1285 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1286 WIENER_FILT_TAP0_SUBEXP_K,
1287 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1288 WIENER_FILT_TAP0_MINV;
1289 else
1290 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001291 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001292 aom_read_primitive_refsubexpfin(
1293 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1294 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001295 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001296 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001297 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001298 aom_read_primitive_refsubexpfin(
1299 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1300 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001301 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001302 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001303 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001304 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001305 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
1306 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001307 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001308}
1309
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001310static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
1311 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001312 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
1313 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001314 aom_read_primitive_refsubexpfin(
1315 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001316 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001317 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001318 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001319 aom_read_primitive_refsubexpfin(
1320 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001321 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001322 SGRPROJ_PRJ_MIN1;
1323 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001324}
1325
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001326static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
1327 MACROBLOCKD *xd,
1328 aom_reader *const r, int plane,
1329 int rtile_idx) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001330 const RestorationInfo *rsi = &cm->rst_info[plane];
1331 RestorationUnitInfo *rui = &rsi->unit_info[rtile_idx];
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001332 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001333
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001334 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
1335 WienerInfo *wiener_info = xd->wiener_info + plane;
1336 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001337
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001338 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001339 rui->restoration_type =
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001340 aom_read_symbol(r, xd->tile_ctx->switchable_restore_cdf,
1341 RESTORE_SWITCHABLE_TYPES, ACCT_STR);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001342 switch (rui->restoration_type) {
1343 case RESTORE_WIENER:
1344 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
1345 break;
1346 case RESTORE_SGRPROJ:
1347 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
1348 break;
1349 default: assert(rui->restoration_type == RESTORE_NONE); break;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001350 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001351 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001352#if CONFIG_NEW_MULTISYMBOL
1353 if (aom_read_symbol(r, xd->tile_ctx->wiener_restore_cdf, 2, ACCT_STR)) {
1354#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001355 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001356#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001357 rui->restoration_type = RESTORE_WIENER;
1358 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001359 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001360 rui->restoration_type = RESTORE_NONE;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001361 }
1362 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001363#if CONFIG_NEW_MULTISYMBOL
1364 if (aom_read_symbol(r, xd->tile_ctx->sgrproj_restore_cdf, 2, ACCT_STR)) {
1365#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001366 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001367#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001368 rui->restoration_type = RESTORE_SGRPROJ;
1369 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001370 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001371 rui->restoration_type = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001372 }
1373 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001374}
1375#endif // CONFIG_LOOP_RESTORATION
1376
Yaowu Xuf883b422016-08-30 14:01:10 -07001377static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001378 struct loopfilter *lf = &cm->lf;
Cheng Chenf572cd32017-08-25 18:34:51 -07001379#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07001380#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07001381 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
1382 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
1383 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07001384 lf->filter_level_u = aom_rb_read_literal(rb, 6);
1385 lf->filter_level_v = aom_rb_read_literal(rb, 6);
1386 }
Cheng Chen179479f2017-08-04 10:56:39 -07001387#else
1388 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07001389#endif
Cheng Chenf572cd32017-08-25 18:34:51 -07001390#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07001391 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001392
1393 // Read in loop filter deltas applied at the MB level based on mode or ref
1394 // frame.
1395 lf->mode_ref_delta_update = 0;
1396
Yaowu Xuf883b422016-08-30 14:01:10 -07001397 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001398 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001399 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001400 if (lf->mode_ref_delta_update) {
1401 int i;
1402
1403 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001404 if (aom_rb_read_bit(rb))
1405 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001406
1407 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001408 if (aom_rb_read_bit(rb))
1409 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410 }
1411 }
1412}
1413
Jean-Marc Valin01435132017-02-18 14:12:53 -05001414#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01001415static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001416 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001417#if CONFIG_CDEF_SINGLEPASS
1418 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
1419#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02001420 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
1421 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001422#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001423 cm->cdef_bits = aom_rb_read_literal(rb, 2);
1424 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
1425 for (i = 0; i < cm->nb_cdef_strengths; i++) {
1426 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02001427 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
1428 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
1429 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001430 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001431}
Jean-Marc Valin01435132017-02-18 14:12:53 -05001432#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001433
Yaowu Xuf883b422016-08-30 14:01:10 -07001434static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
1435 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001436}
1437
Yaowu Xuf883b422016-08-30 14:01:10 -07001438static void setup_quantization(AV1_COMMON *const cm,
1439 struct aom_read_bit_buffer *rb) {
1440 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441 cm->y_dc_delta_q = read_delta_q(rb);
1442 cm->uv_dc_delta_q = read_delta_q(rb);
1443 cm->uv_ac_delta_q = read_delta_q(rb);
1444 cm->dequant_bit_depth = cm->bit_depth;
1445#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07001446 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001447 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001448 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
1449 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001450 } else {
1451 cm->min_qmlevel = 0;
1452 cm->max_qmlevel = 0;
1453 }
1454#endif
1455}
1456
Alex Converse05a3e7d2017-05-16 12:20:07 -07001457// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07001458static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001459#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001460 const int using_qm = cm->using_qmatrix;
1461 const int minqm = cm->min_qmlevel;
1462 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001463#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07001464 // When segmentation is disabled, only the first value is used. The
1465 // remaining are don't cares.
1466 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
1467 for (int i = 0; i < max_segments; ++i) {
1468 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
1469 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
1470 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
1471 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001472 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07001473 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001474 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001475#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001476 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
1477 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
1478 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07001479 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001480 const int qmlevel = (lossless || using_qm == 0)
1481 ? NUM_QM_LEVELS - 1
1482 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01001483 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Thomas Daviesdd3cf832017-10-20 15:49:57 +01001484 cm->y_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 0, j);
1485 cm->uv_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 1, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07001487#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07001488#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07001489 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
1490 for (int b = 0; b < COEF_BANDS; ++b) {
1491 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
1492 cm->y_dequant_nuq[i][dq][b], NULL, dq);
1493 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
1494 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001495 }
1496 }
1497#endif // CONFIG_NEW_QUANT
1498 }
1499}
1500
Angie Chiang5678ad92016-11-21 09:38:40 -08001501static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001502 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07001503 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001504}
1505
Yaowu Xuf883b422016-08-30 14:01:10 -07001506static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001507#if CONFIG_FRAME_SUPERRES
1508 cm->render_width = cm->superres_upscaled_width;
1509 cm->render_height = cm->superres_upscaled_height;
1510#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001511 cm->render_width = cm->width;
1512 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001513#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07001514 if (aom_rb_read_bit(rb))
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001515#if CONFIG_FRAME_SIZE
1516 av1_read_frame_size(rb, 16, 16, &cm->render_width, &cm->render_height);
1517#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001518 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001519#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001520}
1521
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001522#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001523// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001524static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
1525 int *width, int *height) {
1526 cm->superres_upscaled_width = *width;
1527 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07001528 if (aom_rb_read_bit(rb)) {
Urvang Joshide71d142017-10-05 12:12:15 -07001529 cm->superres_scale_denominator =
Fergus Simpsone7508412017-03-14 18:14:09 -07001530 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
Urvang Joshide71d142017-10-05 12:12:15 -07001531 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001532 // Don't edit cm->width or cm->height directly, or the buffers won't get
1533 // resized correctly
Urvang Joshi69fde2e2017-10-09 15:34:18 -07001534 av1_calculate_scaled_superres_size(width, height,
1535 cm->superres_scale_denominator);
Fergus Simpsone7508412017-03-14 18:14:09 -07001536 } else {
1537 // 1:1 scaling - ie. no scaling, scale not provided
Urvang Joshide71d142017-10-05 12:12:15 -07001538 cm->superres_scale_denominator = SCALE_NUMERATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07001539 }
1540}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001541#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001542
Yaowu Xuf883b422016-08-30 14:01:10 -07001543static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001544#if CONFIG_SIZE_LIMIT
1545 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07001546 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001547 "Dimensions of %dx%d beyond allowed size of %dx%d.",
1548 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1549#endif
1550 if (cm->width != width || cm->height != height) {
1551 const int new_mi_rows =
1552 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1553 const int new_mi_cols =
1554 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1555
Yaowu Xuf883b422016-08-30 14:01:10 -07001556 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07001557 // dimensions as well as the overall size.
1558 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001559 if (av1_alloc_context_buffers(cm, width, height))
1560 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001561 "Failed to allocate context buffers");
1562 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001563 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001564 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001565 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001566 cm->width = width;
1567 cm->height = height;
1568 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01001569
1570 ensure_mv_buffer(cm->cur_frame, cm);
1571 cm->cur_frame->width = cm->width;
1572 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001573}
1574
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001575#if CONFIG_FRAME_SIZE
1576static void setup_frame_size(AV1_COMMON *cm, int frame_size_override_flag,
1577 struct aom_read_bit_buffer *rb) {
1578#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001579static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001580#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001581 int width, height;
1582 BufferPool *const pool = cm->buffer_pool;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001583#if CONFIG_FRAME_SIZE
1584 if (frame_size_override_flag) {
1585 int num_bits_width = cm->seq_params.num_bits_width;
1586 int num_bits_height = cm->seq_params.num_bits_height;
1587 av1_read_frame_size(rb, num_bits_width, num_bits_height, &width, &height);
1588 } else {
1589 width = cm->seq_params.max_frame_width;
1590 height = cm->seq_params.max_frame_height;
1591 }
1592#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001593 av1_read_frame_size(rb, &width, &height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001594#endif
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001595#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001596 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001597#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001598 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001599 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001600
1601 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001602 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001603 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1604 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001605#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001606 cm->use_highbitdepth,
1607#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001608 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001609 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1610 pool->cb_priv)) {
1611 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001612 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001613 "Failed to allocate frame buffer");
1614 }
1615 unlock_buffer_pool(pool);
1616
1617 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1618 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1619 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1620 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001621#if CONFIG_COLORSPACE_HEADERS
1622 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1623 cm->transfer_function;
1624 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1625 cm->chroma_sample_position;
1626#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001627 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1628 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1629 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1630}
1631
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07001632static void setup_sb_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
1633 (void)rb;
1634#if CONFIG_EXT_PARTITION
1635 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
1636#else
1637 set_sb_size(cm, BLOCK_64X64);
1638#endif // CONFIG_EXT_PARTITION
1639}
1640
Yaowu Xuf883b422016-08-30 14:01:10 -07001641static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001642 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07001643 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001644 int this_xss, int this_yss) {
1645 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1646 ref_yss == this_yss;
1647}
1648
Yaowu Xuf883b422016-08-30 14:01:10 -07001649static void setup_frame_size_with_refs(AV1_COMMON *cm,
1650 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001651 int width, height;
1652 int found = 0, i;
1653 int has_valid_ref_frame = 0;
1654 BufferPool *const pool = cm->buffer_pool;
1655 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001656 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001657 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
1658 width = buf->y_crop_width;
1659 height = buf->y_crop_height;
1660 cm->render_width = buf->render_width;
1661 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001662#if CONFIG_FRAME_SUPERRES
1663 setup_superres(cm, rb, &width, &height);
1664#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001665 found = 1;
1666 break;
1667 }
1668 }
1669
1670 if (!found) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001671#if CONFIG_FRAME_SIZE
1672 int num_bits_width = cm->seq_params.num_bits_width;
1673 int num_bits_height = cm->seq_params.num_bits_height;
1674 av1_read_frame_size(rb, num_bits_width, num_bits_height, &width, &height);
1675#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001676 av1_read_frame_size(rb, &width, &height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01001677#endif
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001678#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001679 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001680#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001681 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001682 }
1683
1684 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001685 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001686 "Invalid frame size");
1687
1688 // Check to make sure at least one of frames that this frame references
1689 // has valid dimensions.
1690 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1691 RefBuffer *const ref_frame = &cm->frame_refs[i];
1692 has_valid_ref_frame |=
1693 valid_ref_frame_size(ref_frame->buf->y_crop_width,
1694 ref_frame->buf->y_crop_height, width, height);
1695 }
1696 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07001697 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001698 "Referenced frame has invalid size");
1699 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1700 RefBuffer *const ref_frame = &cm->frame_refs[i];
1701 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
1702 ref_frame->buf->subsampling_x,
1703 ref_frame->buf->subsampling_y, cm->bit_depth,
1704 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07001705 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001706 "Referenced frame has incompatible color format");
1707 }
1708
1709 resize_context_buffers(cm, width, height);
1710
1711 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001712 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001713 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1714 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001715#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001716 cm->use_highbitdepth,
1717#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001718 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001719 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1720 pool->cb_priv)) {
1721 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001722 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001723 "Failed to allocate frame buffer");
1724 }
1725 unlock_buffer_pool(pool);
1726
1727 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1728 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1729 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1730 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001731#if CONFIG_COLORSPACE_HEADERS
1732 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1733 cm->transfer_function;
1734 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1735 cm->chroma_sample_position;
1736#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001737 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1738 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1739 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1740}
1741
David Barker01563082017-10-09 13:59:16 +01001742#if !CONFIG_OBU
David Barker1a191122017-09-06 15:24:16 +01001743static void read_tile_group_range(AV1Decoder *pbi,
1744 struct aom_read_bit_buffer *const rb) {
1745 AV1_COMMON *const cm = &pbi->common;
1746 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
1747 const int num_tiles =
1748 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
1749 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
1750 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
1751 if (pbi->tg_start + pbi->tg_size > num_tiles)
1752 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1753 "Tile group extends past last tile in frame");
1754}
David Barker01563082017-10-09 13:59:16 +01001755#endif // !CONFIG_OBU
David Barker1a191122017-09-06 15:24:16 +01001756
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001757#if CONFIG_MAX_TILE
1758
1759// Same function as av1_read_uniform but reading from uncompresses header wb
1760static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
1761 const int l = get_unsigned_bits(n);
1762 const int m = (1 << l) - n;
1763 const int v = aom_rb_read_literal(rb, l - 1);
1764 assert(l != 0);
1765 if (v < m)
1766 return v;
1767 else
1768 return (v << 1) - m + aom_rb_read_literal(rb, 1);
1769}
1770
1771static void read_tile_info_max_tile(AV1_COMMON *const cm,
1772 struct aom_read_bit_buffer *const rb) {
1773 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
1774 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
1775 int width_sb = width_mi >> MAX_MIB_SIZE_LOG2;
1776 int height_sb = height_mi >> MAX_MIB_SIZE_LOG2;
1777 int start_sb, size_sb, i;
1778
1779 av1_get_tile_limits(cm);
1780 cm->uniform_tile_spacing_flag = aom_rb_read_bit(rb);
1781
1782 // Read tile columns
1783 if (cm->uniform_tile_spacing_flag) {
1784 cm->log2_tile_cols = cm->min_log2_tile_cols;
1785 while (cm->log2_tile_cols < cm->max_log2_tile_cols) {
1786 if (!aom_rb_read_bit(rb)) {
1787 break;
1788 }
1789 cm->log2_tile_cols++;
1790 }
1791 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001792 for (i = 0, start_sb = 0; width_sb > 0 && i < MAX_TILE_COLS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001793 size_sb = 1 + rb_read_uniform(rb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB));
1794 cm->tile_col_start_sb[i] = start_sb;
1795 start_sb += size_sb;
1796 width_sb -= size_sb;
1797 }
1798 cm->tile_cols = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001799 cm->tile_col_start_sb[i] = start_sb + width_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001800 }
1801 av1_calculate_tile_cols(cm);
1802
1803 // Read tile rows
1804 if (cm->uniform_tile_spacing_flag) {
1805 cm->log2_tile_rows = cm->min_log2_tile_rows;
1806 while (cm->log2_tile_rows < cm->max_log2_tile_rows) {
1807 if (!aom_rb_read_bit(rb)) {
1808 break;
1809 }
1810 cm->log2_tile_rows++;
1811 }
1812 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001813 for (i = 0, start_sb = 0; height_sb > 0 && i < MAX_TILE_ROWS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001814 size_sb =
1815 1 + rb_read_uniform(rb, AOMMIN(height_sb, cm->max_tile_height_sb));
1816 cm->tile_row_start_sb[i] = start_sb;
1817 start_sb += size_sb;
1818 height_sb -= size_sb;
1819 }
1820 cm->tile_rows = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001821 cm->tile_row_start_sb[i] = start_sb + height_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001822 }
1823 av1_calculate_tile_rows(cm);
1824}
1825#endif
1826
Yaowu Xuf883b422016-08-30 14:01:10 -07001827static void read_tile_info(AV1Decoder *const pbi,
1828 struct aom_read_bit_buffer *const rb) {
1829 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001830#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001831 cm->single_tile_decoding = 0;
1832 if (cm->large_scale_tile) {
1833 struct loopfilter *lf = &cm->lf;
1834
Rupert Swarbrick566155f2017-10-27 11:59:17 +01001835// Figure out single_tile_decoding by loopfilter_level.
1836#if CONFIG_LOOPFILTER_LEVEL
1837 const int no_loopfilter = !(lf->filter_level[0] || lf->filter_level[1]);
1838#else
1839 const int no_loopfilter = !lf->filter_level;
1840#endif
1841 cm->single_tile_decoding = no_loopfilter ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001842// Read the tile width/height
1843#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001844 if (cm->sb_size == BLOCK_128X128) {
1845 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
1846 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
1847 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001848#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001849 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
1850 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
1851#if CONFIG_EXT_PARTITION
1852 }
1853#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001854
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001855 cm->tile_width <<= cm->mib_size_log2;
1856 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001857
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001858 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
1859 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001860
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001861 // Get the number of tiles
1862 cm->tile_cols = 1;
1863 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001864
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001865 cm->tile_rows = 1;
1866 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001867
Yunqing Wang42015d12017-10-17 15:43:49 -07001868#if CONFIG_DEPENDENT_HORZTILES
1869 cm->dependent_horz_tiles = 0;
1870#endif
1871#if CONFIG_LOOPFILTERING_ACROSS_TILES
1872 if (cm->tile_cols * cm->tile_rows > 1)
1873 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1874 else
1875 cm->loop_filter_across_tiles_enabled = 1;
1876#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
1877
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001878 if (cm->tile_cols * cm->tile_rows > 1) {
1879 // Read the number of bytes used to store tile size
1880 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1881 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1882 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001883 } else {
1884#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001885
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001886#if CONFIG_MAX_TILE
1887 read_tile_info_max_tile(cm, rb);
1888#else
1889 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
1890 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001891
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001892 // columns
1893 max_ones = max_log2_tile_cols - min_log2_tile_cols;
1894 cm->log2_tile_cols = min_log2_tile_cols;
1895 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001896
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001897 if (cm->log2_tile_cols > 6)
1898 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1899 "Invalid number of tile columns");
1900
1901 // rows
1902 cm->log2_tile_rows = aom_rb_read_bit(rb);
1903 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
1904
Rupert Swarbrick5a010aa2017-09-26 16:16:48 +01001905 cm->tile_width =
1906 get_tile_size(cm->mi_cols, cm->log2_tile_cols, &cm->tile_cols);
1907 cm->tile_height =
1908 get_tile_size(cm->mi_rows, cm->log2_tile_rows, &cm->tile_rows);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001909
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001910#endif // CONFIG_MAX_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001911#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001912 if (cm->tile_rows > 1)
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001913 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
1914 else
1915 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001916#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001917#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07001918 if (cm->tile_cols * cm->tile_rows > 1)
1919 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1920 else
1921 cm->loop_filter_across_tiles_enabled = 1;
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001922#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08001923
Thomas Daviesb25ba502017-07-18 10:18:24 +01001924 // tile size magnitude
1925 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001926#if CONFIG_EXT_TILE
1927 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001928#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00001929
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001930// each tile group header is in its own tile group OBU
1931#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07001932 // Store an index to the location of the tile group information
1933 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01001934 read_tile_group_range(pbi, rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001935#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001936}
1937
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001938static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001939 switch (sz) {
1940 case 1: return src[0];
1941 case 2: return mem_get_le16(src);
1942 case 3: return mem_get_le24(src);
1943 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07001944 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001945 }
1946}
1947
1948#if CONFIG_EXT_TILE
1949// Reads the next tile returning its size and adjusting '*data' accordingly
1950// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001951static void get_ls_tile_buffer(
1952 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
1953 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
1954 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
1955 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001956 size_t size;
1957
1958 size_t copy_size = 0;
1959 const uint8_t *copy_data = NULL;
1960
1961 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001962 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001963 "Truncated packet or corrupt tile length");
1964 if (decrypt_cb) {
1965 uint8_t be_data[4];
1966 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
1967
1968 // Only read number of bytes in cm->tile_size_bytes.
1969 size = mem_get_varsize(be_data, tile_size_bytes);
1970 } else {
1971 size = mem_get_varsize(*data, tile_size_bytes);
1972 }
1973
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001974 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
1975 // mode.
1976 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001977 // The remaining bits in the top byte signal the row offset
1978 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
1979
1980 // Currently, only use tiles in same column as reference tiles.
1981 copy_data = tile_buffers[row - offset][col].data;
1982 copy_size = tile_buffers[row - offset][col].size;
1983 size = 0;
1984 }
1985
1986 *data += tile_size_bytes;
1987
1988 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07001989 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001990 "Truncated packet or corrupt tile size");
1991
1992 if (size > 0) {
1993 tile_buffers[row][col].data = *data;
1994 tile_buffers[row][col].size = size;
1995 } else {
1996 tile_buffers[row][col].data = copy_data;
1997 tile_buffers[row][col].size = copy_size;
1998 }
1999
2000 *data += size;
2001
2002 tile_buffers[row][col].raw_data_end = *data;
2003}
2004
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002005static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07002006 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002007 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002008 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002009 const int tile_cols = cm->tile_cols;
2010 const int tile_rows = cm->tile_rows;
2011 const int have_tiles = tile_cols * tile_rows > 1;
2012
2013 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07002014 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002015 tile_buffers[0][0].data = data;
2016 tile_buffers[0][0].size = tile_size;
2017 tile_buffers[0][0].raw_data_end = NULL;
2018 } else {
2019 // We locate only the tile buffers that are required, which are the ones
2020 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
2021 // need the last (bottom right) tile buffer, as we need to know where the
2022 // end of the compressed frame buffer is for proper superframe decoding.
2023
2024 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
2025 const uint8_t *const data_start = data;
2026
Yaowu Xuf883b422016-08-30 14:01:10 -07002027 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002028 const int single_row = pbi->dec_tile_row >= 0;
2029 const int tile_rows_start = single_row ? dec_tile_row : 0;
2030 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07002031 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002032 const int single_col = pbi->dec_tile_col >= 0;
2033 const int tile_cols_start = single_col ? dec_tile_col : 0;
2034 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2035
2036 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
2037 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002038 const int tile_copy_mode =
2039 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
2040 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002041 size_t tile_col_size;
2042 int r, c;
2043
2044 // Read tile column sizes for all columns (we need the last tile buffer)
2045 for (c = 0; c < tile_cols; ++c) {
2046 const int is_last = c == tile_cols - 1;
2047 if (!is_last) {
2048 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
2049 data += tile_col_size_bytes;
2050 tile_col_data_end[c] = data + tile_col_size;
2051 } else {
2052 tile_col_size = data_end - data;
2053 tile_col_data_end[c] = data_end;
2054 }
2055 data += tile_col_size;
2056 }
2057
2058 data = data_start;
2059
2060 // Read the required tile sizes.
2061 for (c = tile_cols_start; c < tile_cols_end; ++c) {
2062 const int is_last = c == tile_cols - 1;
2063
2064 if (c > 0) data = tile_col_data_end[c - 1];
2065
2066 if (!is_last) data += tile_col_size_bytes;
2067
2068 // Get the whole of the last column, otherwise stop at the required tile.
2069 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
2070 tile_buffers[r][c].col = c;
2071
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002072 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2073 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2074 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002075 }
2076 }
2077
2078 // If we have not read the last column, then read it to get the last tile.
2079 if (tile_cols_end != tile_cols) {
2080 c = tile_cols - 1;
2081
2082 data = tile_col_data_end[c - 1];
2083
2084 for (r = 0; r < tile_rows; ++r) {
2085 tile_buffers[r][c].col = c;
2086
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002087 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2088 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2089 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002090 }
2091 }
2092 }
2093}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002094#endif // CONFIG_EXT_TILE
2095
Yaowu Xuc27fc142016-08-22 16:08:15 -07002096// Reads the next tile returning its size and adjusting '*data' accordingly
2097// based on 'is_last'.
2098static void get_tile_buffer(const uint8_t *const data_end,
2099 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07002100 struct aom_internal_error_info *error_info,
2101 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002102 void *decrypt_state, TileBufferDec *const buf) {
2103 size_t size;
2104
2105 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08002106 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002107 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002108 "Truncated packet or corrupt tile length");
2109
2110 if (decrypt_cb) {
2111 uint8_t be_data[4];
2112 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2113 size = mem_get_varsize(be_data, tile_size_bytes);
2114 } else {
2115 size = mem_get_varsize(*data, tile_size_bytes);
2116 }
2117 *data += tile_size_bytes;
2118
2119 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002120 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002121 "Truncated packet or corrupt tile size");
2122 } else {
Soo-Chul Han38427e82017-09-27 15:06:13 -04002123#if !CONFIG_OBU || CONFIG_ADD_4BYTES_OBUSIZE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002124 size = data_end - *data;
Soo-Chul Han38427e82017-09-27 15:06:13 -04002125#else
2126 size = mem_get_varsize(*data, tile_size_bytes);
2127 *data += tile_size_bytes;
2128#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002129 }
2130
2131 buf->data = *data;
2132 buf->size = size;
2133
2134 *data += size;
2135}
2136
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002137static void get_tile_buffers(AV1Decoder *pbi, const uint8_t *data,
2138 const uint8_t *data_end,
2139 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
2140 int startTile, int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002141 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07002142 int r, c;
2143 const int tile_cols = cm->tile_cols;
2144 const int tile_rows = cm->tile_rows;
2145 int tc = 0;
2146 int first_tile_in_tg = 0;
David Barker01563082017-10-09 13:59:16 +01002147#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07002148 struct aom_read_bit_buffer rb_tg_hdr;
2149 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
James Zern6efba482017-04-20 20:53:49 -07002150 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002151 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002152#endif
2153
Fangwen Fu73126c02017-02-08 22:37:47 -08002154#if CONFIG_DEPENDENT_HORZTILES
2155 int tile_group_start_col = 0;
2156 int tile_group_start_row = 0;
2157#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002158
Thomas Davies4822e142017-10-10 11:30:36 +01002159#if CONFIG_SIMPLE_BWD_ADAPT
2160 size_t max_tile_size = 0;
2161 cm->largest_tile_id = 0;
2162#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002163 for (r = 0; r < tile_rows; ++r) {
2164 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07002165 TileBufferDec *const buf = &tile_buffers[r][c];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002166#if CONFIG_OBU
2167 const int is_last = (tc == endTile);
2168 const size_t hdr_offset = 0;
2169#else
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002170 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07002171 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002172#endif
2173
2174 if (tc < startTile || tc > endTile) continue;
Thomas Davies80188d12016-10-26 16:08:35 -07002175
Rupert Swarbrickcd757392017-09-01 13:57:53 +01002176 if (data + hdr_offset >= data_end)
2177 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2178 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07002179 buf->col = c;
David Barker01563082017-10-09 13:59:16 +01002180#if CONFIG_OBU
2181#if CONFIG_DEPENDENT_HORZTILES
2182 if (tc == startTile) {
2183 tile_group_start_row = r;
2184 tile_group_start_col = c;
2185 }
2186#endif // CONFIG_DEPENDENT_HORZTILES
2187#else // CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07002188 if (hdr_offset) {
2189 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
2190 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01002191 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08002192#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01002193 tile_group_start_row = r;
2194 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08002195#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002196 }
David Barker01563082017-10-09 13:59:16 +01002197#endif // CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07002198 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
2199 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002200 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
2201 &pbi->common.error, &data, pbi->decrypt_cb,
2202 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08002203#if CONFIG_DEPENDENT_HORZTILES
2204 cm->tile_group_start_row[r][c] = tile_group_start_row;
2205 cm->tile_group_start_col[r][c] = tile_group_start_col;
2206#endif
Thomas Davies4822e142017-10-10 11:30:36 +01002207#if CONFIG_SIMPLE_BWD_ADAPT
2208 if (buf->size > max_tile_size) {
2209 max_tile_size = buf->size;
2210 cm->largest_tile_id = r * tile_cols + c;
2211 }
2212#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002213 }
2214 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002215}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002216
David Barker5c06a642017-08-18 13:18:16 +01002217#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002218static void dec_setup_across_tile_boundary_info(
2219 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02002220 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
2221 tile_info->mi_col_start >= tile_info->mi_col_end)
2222 return;
2223
David Barker5c06a642017-08-18 13:18:16 +01002224 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07002225 av1_setup_across_tile_boundary_info(cm, tile_info);
2226 }
2227}
David Barker5c06a642017-08-18 13:18:16 +01002228#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002229
Yaowu Xuf883b422016-08-30 14:01:10 -07002230static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002231 const uint8_t *data_end, int startTile,
2232 int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002233 AV1_COMMON *const cm = &pbi->common;
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002234#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002235 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002236#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002237 const int tile_cols = cm->tile_cols;
2238 const int tile_rows = cm->tile_rows;
2239 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07002240 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002241#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07002242 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002243 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002244 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002245 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002246#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002247 int tile_rows_start;
2248 int tile_rows_end;
2249 int tile_cols_start;
2250 int tile_cols_end;
2251 int inv_col_order;
2252 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002253 int tile_row, tile_col;
2254
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002255#if CONFIG_EXT_TILE
2256 if (cm->large_scale_tile) {
2257 tile_rows_start = single_row ? dec_tile_row : 0;
2258 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
2259 tile_cols_start = single_col ? dec_tile_col : 0;
2260 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2261 inv_col_order = pbi->inv_tile_order && !single_col;
2262 inv_row_order = pbi->inv_tile_order && !single_row;
2263 } else {
2264#endif // CONFIG_EXT_TILE
2265 tile_rows_start = 0;
2266 tile_rows_end = tile_rows;
2267 tile_cols_start = 0;
2268 tile_cols_end = tile_cols;
2269 inv_col_order = pbi->inv_tile_order;
2270 inv_row_order = pbi->inv_tile_order;
2271#if CONFIG_EXT_TILE
2272 }
2273#endif // CONFIG_EXT_TILE
2274
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002275#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002276 if (cm->lf.filter_level && !cm->skip_loop_filter &&
2277 pbi->lf_worker.data1 == NULL) {
2278 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07002279 aom_memalign(32, sizeof(LFWorkerData)));
2280 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002281 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002282 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002283 "Loop filter thread creation failed");
2284 }
2285 }
2286
2287 if (cm->lf.filter_level && !cm->skip_loop_filter) {
2288 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
2289 // Be sure to sync as we might be resuming after a failed frame decode.
2290 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07002291 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
2292 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002293 }
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002294#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002295
2296 assert(tile_rows <= MAX_TILE_ROWS);
2297 assert(tile_cols <= MAX_TILE_COLS);
2298
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002299#if CONFIG_EXT_TILE
2300 if (cm->large_scale_tile)
2301 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
2302 else
2303#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002304 get_tile_buffers(pbi, data, data_end, tile_buffers, startTile, endTile);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002305
2306 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002307 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002308 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07002309 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002310 pbi->allocated_tiles = n_tiles;
2311 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002312#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002313 if (pbi->acct_enabled) {
2314 aom_accounting_reset(&pbi->accounting);
2315 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002316#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002317 // Load all tile information into tile_data.
2318 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2319 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2320 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
2321 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
2322
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002323 if (tile_row * cm->tile_cols + tile_col < startTile ||
2324 tile_row * cm->tile_cols + tile_col > endTile)
2325 continue;
2326
Yaowu Xuc27fc142016-08-22 16:08:15 -07002327 td->cm = cm;
2328 td->xd = pbi->mb;
2329 td->xd.corrupted = 0;
2330 td->xd.counts =
2331 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
2332 ? &cm->counts
2333 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07002334 av1_zero(td->dqcoeff);
2335 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002336 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08002337 &td->bit_reader,
2338#if CONFIG_ANS && ANS_MAX_SYMBOLS
2339 1 << cm->ans_window_size_log2,
2340#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2341 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07002342#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002343 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002344 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002345 } else {
David Barkerd971f402016-10-25 13:52:07 +01002346 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002347 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002348#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002349 av1_init_macroblockd(cm, &td->xd,
Luc Trudeauf8164152017-04-11 16:20:51 -04002350#if CONFIG_CFL
2351 &td->cfl,
2352#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002353 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07002354
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00002355 // Initialise the tile context from the frame context
2356 td->tctx = *cm->fc;
2357 td->xd.tile_ctx = &td->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002358 td->xd.plane[0].color_index_map = td->color_index_map[0];
2359 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07002360#if CONFIG_MRC_TX
2361 td->xd.mrc_mask = td->mrc_mask;
2362#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002363 }
2364 }
2365
2366 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2367 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
2368 int mi_row = 0;
2369 TileInfo tile_info;
2370
Yaowu Xuf883b422016-08-30 14:01:10 -07002371 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002372
2373 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2374 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
2375 TileData *const td = pbi->tile_data + tile_cols * row + col;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002376
2377 if (tile_row * cm->tile_cols + tile_col < startTile ||
2378 tile_row * cm->tile_cols + tile_col > endTile)
2379 continue;
2380
Michael Bebenita6048d052016-08-25 14:40:54 -07002381#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002382 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002383 td->bit_reader.accounting->last_tell_frac =
2384 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002385 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002386#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002387
Yaowu Xuf883b422016-08-30 14:01:10 -07002388 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002389
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002390#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002391 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
2392 if (!cm->dependent_horz_tiles || tile_row == 0 ||
2393 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002394 av1_zero_above_context(cm, tile_info.mi_col_start,
2395 tile_info.mi_col_end);
2396 }
2397#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002398 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002399#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002400#if CONFIG_LOOP_RESTORATION
2401 for (int p = 0; p < MAX_MB_PLANE; ++p) {
2402 set_default_wiener(td->xd.wiener_info + p);
2403 set_default_sgrproj(td->xd.sgrproj_info + p);
2404 }
2405#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002406
David Barker5c06a642017-08-18 13:18:16 +01002407#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002408 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01002409#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07002410
Yaowu Xuc27fc142016-08-22 16:08:15 -07002411 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
2412 mi_row += cm->mib_size) {
2413 int mi_col;
2414
Yaowu Xuf883b422016-08-30 14:01:10 -07002415 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002416
2417 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
2418 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002419#if CONFIG_NCOBMC_ADAPT_WEIGHT
2420 alloc_ncobmc_pred_buffer(&td->xd);
2421 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
2422#endif
Angie Chiangd9af8ac2017-10-25 10:48:53 -07002423#if CONFIG_SYMBOLRATE
2424 av1_record_superblock(td->xd.counts);
2425#endif
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002426 decode_partition(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2427 cm->sb_size);
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002428#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002429 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2430 cm->sb_size);
2431#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002432#if CONFIG_NCOBMC_ADAPT_WEIGHT
2433 free_ncobmc_pred_buffer(&td->xd);
2434#endif
Cheng Chen5ad5b282017-10-05 16:36:06 -07002435#if CONFIG_LPF_SB
2436 if (USE_LOOP_FILTER_SUPERBLOCK) {
2437 // apply deblocking filtering right after each superblock is decoded
2438 const int guess_filter_lvl = FAKE_FILTER_LEVEL;
2439 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2440 guess_filter_lvl, 0, 1, mi_row, mi_col);
2441 }
2442#endif // CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07002443 }
Angie Chiangd0916d92017-03-10 17:54:18 -08002444 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002445 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07002446 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002447 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002448 }
2449 }
2450
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002451#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002452 assert(mi_row > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002453#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002454
Yaowu Xuc27fc142016-08-22 16:08:15 -07002455 // After loopfiltering, the last 7 row pixels in each superblock row may
2456 // still be changed by the longest loopfilter of the next superblock row.
2457 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002458 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002459 }
2460
Cheng Chen5ad5b282017-10-05 16:36:06 -07002461#if CONFIG_INTRABC
2462// When intraBC is on, do loop filtering per superblock,
2463// instead of do it after the whole frame has been encoded,
2464// as is in the else branch
2465#else
Cheng Chene94df5c2017-07-19 17:25:33 -07002466// Loopfilter the whole frame.
Cheng Chenf572cd32017-08-25 18:34:51 -07002467#if CONFIG_LPF_SB
2468 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2469 cm->lf.filter_level, 0, 0, 0, 0);
2470#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002471#if CONFIG_OBU
2472 if (endTile == cm->tile_rows * cm->tile_cols - 1)
2473#endif
David Barker3dffa272017-10-18 17:07:26 +01002474#if CONFIG_LOOPFILTER_LEVEL
2475 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
2476 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2477 cm->lf.filter_level[0], cm->lf.filter_level[1], 0,
2478 0);
2479 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2480 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
2481 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2482 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
2483 }
2484#else
2485 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2486 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07002487#endif // CONFIG_LOOPFILTER_LEVEL
Cheng Chenf572cd32017-08-25 18:34:51 -07002488#endif // CONFIG_LPF_SB
Cheng Chen5ad5b282017-10-05 16:36:06 -07002489#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002490 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002491 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002492
2493#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002494 if (cm->large_scale_tile) {
2495 if (n_tiles == 1) {
2496#if CONFIG_ANS
2497 return data_end;
2498#else
2499 // Find the end of the single tile buffer
2500 return aom_reader_find_end(&pbi->tile_data->bit_reader);
2501#endif // CONFIG_ANS
2502 } else {
2503 // Return the end of the last tile buffer
2504 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
2505 }
2506 } else {
2507#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002508#if CONFIG_ANS
2509 return data_end;
2510#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002511#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002512 {
2513 // Get last tile data.
2514 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002515 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002516 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002517#else
2518 TileData *const td = pbi->tile_data + endTile;
2519 return aom_reader_find_end(&td->bit_reader);
2520#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002521#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002522#if CONFIG_EXT_TILE
2523 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002524#endif // CONFIG_EXT_TILE
2525}
2526
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002528 AV1_COMMON *const cm = (AV1_COMMON *)data;
2529 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002530}
2531
Yaowu Xuf883b422016-08-30 14:01:10 -07002532static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002533 struct aom_read_bit_buffer *rb,
2534 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002536 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002537 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002538 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002539 }
2540
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02002541#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002542 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07002543#else
2544 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002545#endif
anorkin76fb1262017-03-22 15:12:12 -07002546#if CONFIG_COLORSPACE_HEADERS
2547 cm->color_space = aom_rb_read_literal(rb, 5);
2548 cm->transfer_function = aom_rb_read_literal(rb, 5);
2549#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002550 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07002551#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002552 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002553 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07002554 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002555 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002556 cm->subsampling_x = aom_rb_read_bit(rb);
2557 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002558 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07002559 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002560 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07002561 if (aom_rb_read_bit(rb))
2562 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002563 "Reserved bit set");
2564 } else {
2565 cm->subsampling_y = cm->subsampling_x = 1;
2566 }
anorkin76fb1262017-03-22 15:12:12 -07002567#if CONFIG_COLORSPACE_HEADERS
2568 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
2569 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
2570 }
2571#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002572 } else {
2573 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
2574 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
2575 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
2576 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002577 if (aom_rb_read_bit(rb))
2578 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002579 "Reserved bit set");
2580 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002581 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002582 "4:4:4 color not supported in profile 0 or 2");
2583 }
2584 }
2585}
2586
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002587#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002588void read_sequence_header(SequenceHeader *seq_params,
2589 struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002590#if CONFIG_FRAME_SIZE
2591 int num_bits_width = aom_rb_read_literal(rb, 4) + 1;
2592 int num_bits_height = aom_rb_read_literal(rb, 4) + 1;
2593 int max_frame_width = aom_rb_read_literal(rb, num_bits_width) + 1;
2594 int max_frame_height = aom_rb_read_literal(rb, num_bits_height) + 1;
2595
2596 seq_params->num_bits_width = num_bits_width;
2597 seq_params->num_bits_height = num_bits_height;
2598 seq_params->max_frame_width = max_frame_width;
2599 seq_params->max_frame_height = max_frame_height;
2600#endif
2601
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002602 /* Placeholder for actually reading from the bitstream */
David Barker5e70a112017-10-03 14:28:17 +01002603 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
2604 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002605 // We must always have delta_frame_id_length < frame_id_length,
2606 // in order for a frame to be referenced with a unique delta.
2607 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002608 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002609 seq_params->frame_id_length =
2610 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
David Barker5e70a112017-10-03 14:28:17 +01002611 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002612}
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002613#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002614
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002615static void read_compound_tools(AV1_COMMON *cm,
2616 struct aom_read_bit_buffer *rb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002617 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
2618 cm->allow_interintra_compound = aom_rb_read_bit(rb);
2619 } else {
2620 cm->allow_interintra_compound = 0;
2621 }
Zoe Liu85b66462017-04-20 14:28:19 -07002622#if CONFIG_COMPOUND_SINGLEREF
2623 if (!frame_is_intra_only(cm)) {
2624#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002625 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07002626#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002627 cm->allow_masked_compound = aom_rb_read_bit(rb);
2628 } else {
2629 cm->allow_masked_compound = 0;
2630 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002631}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002632
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002633#if CONFIG_VAR_REFS
2634static void check_valid_ref_frames(AV1_COMMON *cm) {
2635 MV_REFERENCE_FRAME ref_frame;
2636 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
2637 // reference frames: Current encoder invalid ALTREF when ALTREF
2638 // is the same as LAST, but invalid all the other references
2639 // when they are the same as ALTREF.
2640 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2641 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
2642
2643 if (ref_buf->idx != INVALID_IDX) {
2644 ref_buf->is_valid = 1;
2645
2646 MV_REFERENCE_FRAME ref;
2647 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
2648 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
2649 if (buf->is_valid && buf->idx == ref_buf->idx) {
2650 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
2651 ref_buf->is_valid = 0;
2652 break;
2653 } else {
2654 buf->is_valid = 0;
2655 }
2656 }
2657 }
2658 } else {
2659 ref_buf->is_valid = 0;
2660 }
2661 }
2662}
2663#endif // CONFIG_VAR_REFS
2664
Sarah Parker3e579a62017-08-23 16:53:20 -07002665static int read_global_motion_params(WarpedMotionParams *params,
David Barkerd7c8bd52017-09-25 14:47:29 +01002666 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07002667 struct aom_read_bit_buffer *rb,
2668 int allow_hp) {
2669 TransformationType type = aom_rb_read_bit(rb);
2670 if (type != IDENTITY) {
2671#if GLOBAL_TRANS_TYPES > 4
2672 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
2673#else
2674 if (aom_rb_read_bit(rb))
2675 type = ROTZOOM;
2676 else
2677 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
2678#endif // GLOBAL_TRANS_TYPES > 4
2679 }
2680
David Barkerd7c8bd52017-09-25 14:47:29 +01002681 *params = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002682 params->wmtype = type;
Sebastien Alaiwan238a6d62017-11-01 11:29:46 +01002683
2684 if (type >= ROTZOOM) {
2685 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
2686 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2687 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2688 (1 << GM_ALPHA_PREC_BITS)) *
2689 GM_ALPHA_DECODE_FACTOR +
2690 (1 << WARPEDMODEL_PREC_BITS);
2691 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
2692 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2693 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
2694 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002695 }
Sebastien Alaiwan238a6d62017-11-01 11:29:46 +01002696
2697 if (type >= AFFINE) {
2698 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
2699 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2700 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
2701 GM_ALPHA_DECODE_FACTOR;
2702 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
2703 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2704 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2705 (1 << GM_ALPHA_PREC_BITS)) *
2706 GM_ALPHA_DECODE_FACTOR +
2707 (1 << WARPEDMODEL_PREC_BITS);
2708 } else {
2709 params->wmmat[4] = -params->wmmat[3];
2710 params->wmmat[5] = params->wmmat[2];
2711 }
2712
2713 if (type >= TRANSLATION) {
2714 const int trans_bits = (type == TRANSLATION)
2715 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2716 : GM_ABS_TRANS_BITS;
2717 const int trans_dec_factor =
2718 (type == TRANSLATION) ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
2719 : GM_TRANS_DECODE_FACTOR;
2720 const int trans_prec_diff = (type == TRANSLATION)
2721 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2722 : GM_TRANS_PREC_DIFF;
2723 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
2724 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2725 (ref_params->wmmat[0] >> trans_prec_diff)) *
2726 trans_dec_factor;
2727 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
2728 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2729 (ref_params->wmmat[1] >> trans_prec_diff)) *
2730 trans_dec_factor;
2731 }
2732
Sarah Parker3e579a62017-08-23 16:53:20 -07002733 if (params->wmtype <= AFFINE) {
2734 int good_shear_params = get_shear_params(params);
2735 if (!good_shear_params) return 0;
2736 }
2737
2738 return 1;
2739}
2740
2741static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
2742 int frame;
2743 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002744 const WarpedMotionParams *ref_params =
2745 cm->error_resilient_mode ? &default_warp_params
2746 : &cm->prev_frame->global_motion[frame];
Sarah Parker3e579a62017-08-23 16:53:20 -07002747 int good_params = read_global_motion_params(
David Barkerd7c8bd52017-09-25 14:47:29 +01002748 &cm->global_motion[frame], ref_params, rb, cm->allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002749 if (!good_params)
2750 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2751 "Invalid shear parameters for global motion.");
2752
2753 // TODO(sarahparker, debargha): The logic in the commented out code below
2754 // does not work currently and causes mismatches when resize is on. Fix it
2755 // before turning the optimization back on.
2756 /*
2757 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
2758 if (cm->width == ref_buf->y_crop_width &&
2759 cm->height == ref_buf->y_crop_height) {
2760 read_global_motion_params(&cm->global_motion[frame],
2761 &cm->prev_frame->global_motion[frame], rb,
2762 cm->allow_high_precision_mv);
2763 } else {
David Barkerd7c8bd52017-09-25 14:47:29 +01002764 cm->global_motion[frame] = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002765 }
2766 */
2767 /*
2768 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
2769 frame, cm->current_video_frame, cm->show_frame,
2770 cm->global_motion[frame].wmmat[0],
2771 cm->global_motion[frame].wmmat[1],
2772 cm->global_motion[frame].wmmat[2],
2773 cm->global_motion[frame].wmmat[3]);
2774 */
2775 }
David Barkercba7da72017-09-14 11:24:27 +01002776 memcpy(cm->cur_frame->global_motion, cm->global_motion,
2777 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Sarah Parker3e579a62017-08-23 16:53:20 -07002778}
Sarah Parker3e579a62017-08-23 16:53:20 -07002779
Yaowu Xuf883b422016-08-30 14:01:10 -07002780static size_t read_uncompressed_header(AV1Decoder *pbi,
2781 struct aom_read_bit_buffer *rb) {
2782 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002783 MACROBLOCKD *const xd = &pbi->mb;
2784 BufferPool *const pool = cm->buffer_pool;
2785 RefCntBuffer *const frame_bufs = pool->frame_bufs;
2786 int i, mask, ref_index = 0;
2787 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002788
Yaowu Xuc27fc142016-08-22 16:08:15 -07002789 cm->last_frame_type = cm->frame_type;
2790 cm->last_intra_only = cm->intra_only;
2791
Yaowu Xuc27fc142016-08-22 16:08:15 -07002792 // NOTE: By default all coded frames to be used as a reference
2793 cm->is_reference_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002794
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002795#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002796 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
2797 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002798 "Invalid frame marker");
2799
Yaowu Xuf883b422016-08-30 14:01:10 -07002800 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02002801
2802 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
2803 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
2804
2805 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002806 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002807 "Unsupported bitstream profile");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002808#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002809
Yunqing Wangc2502b52017-07-19 17:44:18 -07002810#if CONFIG_EXT_TILE
2811 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
2812#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002813 if (cm->large_scale_tile) cm->seq_params.frame_id_numbers_present_flag = 0;
Yunqing Wangc2502b52017-07-19 17:44:18 -07002814#endif // CONFIG_REFERENCE_BUFFER
2815#endif // CONFIG_EXT_TILE
2816
Yaowu Xuf883b422016-08-30 14:01:10 -07002817 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002818
2819 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08002820 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01002821 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
2822 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08002823#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002824 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002825 int frame_id_length = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002826 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
2827 /* Compare display_frame_id with ref_frame_id and check valid for
2828 * referencing */
2829 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
2830 cm->valid_for_referencing[existing_frame_idx] == 0)
2831 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2832 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002833 }
2834#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002835 lock_buffer_pool(pool);
2836 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
2837 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002838 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002839 "Buffer %d does not contain a decoded frame",
2840 frame_to_show);
2841 }
2842 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2843 unlock_buffer_pool(pool);
2844
Cheng Chen13fc8192017-08-19 11:49:28 -07002845#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002846 cm->lf.filter_level[0] = 0;
2847 cm->lf.filter_level[1] = 0;
2848#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002849 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07002850#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002851 cm->show_frame = 1;
2852 pbi->refresh_frame_flags = 0;
2853
2854 if (cm->frame_parallel_decode) {
2855 for (i = 0; i < REF_FRAMES; ++i)
2856 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
2857 }
2858
2859 return 0;
2860 }
2861
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002862#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002863 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002864 cm->show_frame = aom_rb_read_bit(rb);
2865 if (cm->frame_type != KEY_FRAME)
2866 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002867#else
2868 cm->frame_type = (FRAME_TYPE)aom_rb_read_literal(rb, 2); // 2 bits
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002869 cm->show_frame = aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002870 cm->intra_only = cm->frame_type == INTRA_ONLY_FRAME;
2871#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002872 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002873#if CONFIG_REFERENCE_BUFFER
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002874#if !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002875 if (frame_is_intra_only(cm)) read_sequence_header(&cm->seq_params, rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002876#endif // !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002877 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002878 int frame_id_length = cm->seq_params.frame_id_length;
2879 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002880 int prev_frame_id = 0;
2881 if (cm->frame_type != KEY_FRAME) {
2882 prev_frame_id = cm->current_frame_id;
2883 }
2884 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002885
David Barker5e70a112017-10-03 14:28:17 +01002886 if (cm->frame_type != KEY_FRAME) {
2887 int diff_frame_id;
2888 if (cm->current_frame_id > prev_frame_id) {
2889 diff_frame_id = cm->current_frame_id - prev_frame_id;
2890 } else {
2891 diff_frame_id =
2892 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002893 }
David Barker5e70a112017-10-03 14:28:17 +01002894 /* Check current_frame_id for conformance */
2895 if (prev_frame_id == cm->current_frame_id ||
2896 diff_frame_id >= (1 << (frame_id_length - 1))) {
2897 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2898 "Invalid value of current_frame_id");
2899 }
2900 }
2901 /* Check if some frames need to be marked as not valid for referencing */
2902 for (i = 0; i < REF_FRAMES; i++) {
2903 if (cm->frame_type == KEY_FRAME) {
2904 cm->valid_for_referencing[i] = 0;
2905 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
2906 if (cm->ref_frame_id[i] > cm->current_frame_id ||
2907 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002908 cm->valid_for_referencing[i] = 0;
David Barker5e70a112017-10-03 14:28:17 +01002909 } else {
2910 if (cm->ref_frame_id[i] > cm->current_frame_id &&
2911 cm->ref_frame_id[i] <
2912 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
2913 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002914 }
2915 }
2916 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002917#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002918
2919#if CONFIG_FRAME_SIZE
2920 int frame_size_override_flag = aom_rb_read_literal(rb, 1);
2921#endif
2922
Yaowu Xuc27fc142016-08-22 16:08:15 -07002923 if (cm->frame_type == KEY_FRAME) {
Jingning Hand8a15a62017-10-30 10:53:42 -07002924 cm->current_video_frame = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002925#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002926 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002927#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002928 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2929
2930 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2931 cm->frame_refs[i].idx = INVALID_IDX;
2932 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002933#if CONFIG_VAR_REFS
2934 cm->frame_refs[i].is_valid = 0;
2935#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002936 }
2937
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002938#if CONFIG_FRAME_SIZE
2939 setup_frame_size(cm, frame_size_override_flag, rb);
2940#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941 setup_frame_size(cm, rb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002942#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07002943 setup_sb_size(cm, rb);
2944
Yaowu Xuc27fc142016-08-22 16:08:15 -07002945 if (pbi->need_resync) {
2946 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
2947 pbi->need_resync = 0;
2948 }
Alex Converseeb780e72016-12-13 12:46:41 -08002949#if CONFIG_ANS && ANS_MAX_SYMBOLS
2950 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
2951#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07002952 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002953#if CONFIG_AMVR
2954 if (cm->allow_screen_content_tools) {
2955 if (aom_rb_read_bit(rb)) {
RogerZhou10a03802017-10-26 11:49:48 -07002956 cm->seq_force_integer_mv = 2;
RogerZhou3b635242017-09-19 10:06:46 -07002957 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002958 cm->seq_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002959 }
2960 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002961 cm->seq_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07002962 }
2963#endif
Fangwen Fu930c51c2017-05-07 20:39:17 -07002964#if CONFIG_TEMPMV_SIGNALING
2965 cm->use_prev_frame_mvs = 0;
2966#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002967 } else {
hui su24f7b072016-10-12 11:36:24 -07002968 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07002969#if CONFIG_TEMPMV_SIGNALING
2970 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
2971#endif
2972#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
2973// The only way to reset all frame contexts to their default values is with a
2974// keyframe.
2975#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002976 if (cm->error_resilient_mode) {
2977 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
2978 } else {
2979 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002980 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002981 ? RESET_FRAME_CONTEXT_ALL
2982 : RESET_FRAME_CONTEXT_CURRENT;
2983 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002984 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002985 ? RESET_FRAME_CONTEXT_CURRENT
2986 : RESET_FRAME_CONTEXT_NONE;
2987 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07002988 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002989 ? RESET_FRAME_CONTEXT_ALL
2990 : RESET_FRAME_CONTEXT_CURRENT;
2991 }
2992 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07002993#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002994
2995 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002996#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002997 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002998#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002999
Yaowu Xuf883b422016-08-30 14:01:10 -07003000 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003001#if CONFIG_FRAME_SIZE
3002 setup_frame_size(cm, frame_size_override_flag, rb);
3003#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003004 setup_frame_size(cm, rb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003005#endif
Pavel Frolovea3dd3a2017-09-25 16:06:19 +03003006 setup_sb_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003007 if (pbi->need_resync) {
3008 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3009 pbi->need_resync = 0;
3010 }
Alex Converseeb780e72016-12-13 12:46:41 -08003011#if CONFIG_ANS && ANS_MAX_SYMBOLS
3012 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
3013#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003014 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003015#if CONFIG_OBU
3016 pbi->refresh_frame_flags = (cm->frame_type == S_FRAME)
3017 ? ~(1 << REF_FRAMES)
3018 : aom_rb_read_literal(rb, REF_FRAMES);
3019#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003020 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003021#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003022
Yaowu Xuc27fc142016-08-22 16:08:15 -07003023 if (!pbi->refresh_frame_flags) {
3024 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
3025 // will not be used as a reference
3026 cm->is_reference_frame = 0;
3027 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003028
3029 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003030 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031 const int idx = cm->ref_frame_map[ref];
Rupert Swarbrick5eb471c2017-10-02 16:06:54 +01003032
3033 // Most of the time, streams start with a keyframe. In that case,
3034 // ref_frame_map will have been filled in at that point and will not
3035 // contain any -1's. However, streams are explicitly allowed to start
3036 // with an intra-only frame, so long as they don't then signal a
3037 // reference to a slot that hasn't been set yet. That's what we are
3038 // checking here.
3039 if (idx == -1)
3040 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3041 "Inter frame requests nonexistent reference");
3042
Yaowu Xuc27fc142016-08-22 16:08:15 -07003043 RefBuffer *const ref_frame = &cm->frame_refs[i];
3044 ref_frame->idx = idx;
3045 ref_frame->buf = &frame_bufs[idx].buf;
Zoe Liu17af2742017-10-06 10:36:42 -07003046#if CONFIG_FRAME_SIGN_BIAS
3047#if CONFIG_OBU
3048 // NOTE: For the scenario of (cm->frame_type != S_FRAME),
3049 // ref_frame_sign_bias will be reset based on frame offsets.
3050 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
3051#endif // CONFIG_OBU
3052#else // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003053#if CONFIG_OBU
3054 cm->ref_frame_sign_bias[LAST_FRAME + i] =
3055 (cm->frame_type == S_FRAME) ? 0 : aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003056#else // !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07003057 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003058#endif // CONFIG_OBU
3059#endif // CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003060#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003061 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003062 int frame_id_length = cm->seq_params.frame_id_length;
3063 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003064 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
3065 int ref_frame_id =
3066 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
3067 (1 << frame_id_length)) %
3068 (1 << frame_id_length));
3069 /* Compare values derived from delta_frame_id_minus1 and
3070 * refresh_frame_flags. Also, check valid for referencing */
3071 if (ref_frame_id != cm->ref_frame_id[ref] ||
3072 cm->valid_for_referencing[ref] == 0)
3073 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3074 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003075 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003076#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003077 }
3078
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07003079#if CONFIG_VAR_REFS
3080 check_valid_ref_frames(cm);
3081#endif // CONFIG_VAR_REFS
3082
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003083#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003084 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003085 setup_frame_size_with_refs(cm, rb);
3086 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003087 setup_frame_size(cm, frame_size_override_flag, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003088 }
3089#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003090 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003091#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003092
RogerZhou3b635242017-09-19 10:06:46 -07003093#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003094 if (cm->seq_force_integer_mv == 2) {
3095 cm->cur_frame_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07003096 } else {
RogerZhou10a03802017-10-26 11:49:48 -07003097 cm->cur_frame_force_integer_mv = cm->seq_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003098 }
RogerZhou10a03802017-10-26 11:49:48 -07003099
3100 if (cm->cur_frame_force_integer_mv) {
3101 cm->allow_high_precision_mv = 0;
3102 } else {
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003103#if CONFIG_EIGHTH_PEL_MV_ONLY
3104 cm->allow_high_precision_mv = 1;
3105#else
RogerZhou10a03802017-10-26 11:49:48 -07003106 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003107#endif // CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003108 }
3109#else
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003110#if CONFIG_EIGHTH_PEL_MV_ONLY
3111 cm->allow_high_precision_mv = 1;
3112#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003113 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003114#endif // CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003115#endif
Angie Chiang5678ad92016-11-21 09:38:40 -08003116 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003117#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003118 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08003119 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003120 else
3121 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08003122#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003123 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3124 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003125#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07003126 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003127 &ref_buf->sf, ref_buf->buf->y_crop_width,
3128 ref_buf->buf->y_crop_height, cm->width, cm->height,
3129 cm->use_highbitdepth);
3130#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003131 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003132 &ref_buf->sf, ref_buf->buf->y_crop_width,
3133 ref_buf->buf->y_crop_height, cm->width, cm->height);
3134#endif
3135 }
3136 }
3137 }
Jingning Hanc723b342017-08-24 11:19:46 -07003138
Jingning Hanea255c92017-09-29 08:12:09 -07003139#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003140 if (cm->show_frame == 0) {
3141 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
3142 } else {
3143 cm->frame_offset = cm->current_video_frame;
3144 }
Zoe Liu17af2742017-10-06 10:36:42 -07003145 av1_setup_frame_buf_refs(cm);
3146
3147#if CONFIG_FRAME_SIGN_BIAS
3148#if CONFIG_OBU
3149 if (cm->frame_type != S_FRAME)
3150#endif // CONFIG_OBU
3151 av1_setup_frame_sign_bias(cm);
Zoe Liu17af2742017-10-06 10:36:42 -07003152#endif // CONFIG_FRAME_SIGN_BIAS
3153#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003154
Fangwen Fu8d164de2016-12-14 13:40:54 -08003155#if CONFIG_TEMPMV_SIGNALING
3156 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
3157#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003158
3159#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003160 if (cm->seq_params.frame_id_numbers_present_flag) {
3161 /* If bitmask is set, update reference frame id values and
3162 mark frames as valid for reference */
3163 int refresh_frame_flags =
3164 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
3165 for (i = 0; i < REF_FRAMES; i++) {
3166 if ((refresh_frame_flags >> i) & 1) {
3167 cm->ref_frame_id[i] = cm->current_frame_id;
3168 cm->valid_for_referencing[i] = 1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003169 }
3170 }
3171 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003172#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003173
Yaowu Xuc27fc142016-08-22 16:08:15 -07003174 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003175 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003176#if CONFIG_COLORSPACE_HEADERS
3177 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
3178 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
3179#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003180 get_frame_new_buffer(cm)->color_range = cm->color_range;
3181 get_frame_new_buffer(cm)->render_width = cm->render_width;
3182 get_frame_new_buffer(cm)->render_height = cm->render_height;
3183
3184 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003185 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003186 "Keyframe / intra-only frame required to reset decoder"
3187 " state");
3188 }
3189
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003190#if CONFIG_EXT_TILE
3191 const int might_bwd_adapt =
3192 !(cm->error_resilient_mode || cm->large_scale_tile);
3193#else
3194 const int might_bwd_adapt = !cm->error_resilient_mode;
3195#endif // CONFIG_EXT_TILE
3196 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003197 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003198 ? REFRESH_FRAME_CONTEXT_FORWARD
3199 : REFRESH_FRAME_CONTEXT_BACKWARD;
3200 } else {
3201 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
3202 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003203#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003204 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07003205 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07003206 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003207#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003208
3209 // Generate next_ref_frame_map.
3210 lock_buffer_pool(pool);
3211 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
3212 if (mask & 1) {
3213 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
3214 ++frame_bufs[cm->new_fb_idx].ref_count;
3215 } else {
3216 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3217 }
3218 // Current thread holds the reference frame.
3219 if (cm->ref_frame_map[ref_index] >= 0)
3220 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3221 ++ref_index;
3222 }
3223
3224 for (; ref_index < REF_FRAMES; ++ref_index) {
3225 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3226
3227 // Current thread holds the reference frame.
3228 if (cm->ref_frame_map[ref_index] >= 0)
3229 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3230 }
3231 unlock_buffer_pool(pool);
3232 pbi->hold_ref_buf = 1;
3233
3234 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003235 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003236
Yaowu Xuc27fc142016-08-22 16:08:15 -07003237 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003238 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003239 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240
hui su0d103572017-03-01 17:58:01 -08003241#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07003242 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003243 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
3244 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
3245 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
3246 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07003247#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3248 if (cm->frame_refs[0].idx <= 0) {
3249 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
3250 }
3251#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003252 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07003253#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003254 }
hui su0d103572017-03-01 17:58:01 -08003255#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003256
3257 setup_segmentation(cm, rb);
3258
Arild Fuldseth07441162016-08-15 15:07:52 +02003259 {
Thomas Davies28444be2017-10-13 18:12:25 +01003260 int delta_q_allowed = 1;
3261#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003262 struct segmentation *const seg = &cm->seg;
3263 int segment_quantizer_active = 0;
3264 for (i = 0; i < MAX_SEGMENTS; i++) {
3265 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3266 segment_quantizer_active = 1;
3267 }
3268 }
Thomas Davies28444be2017-10-13 18:12:25 +01003269 delta_q_allowed = !segment_quantizer_active;
3270#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003271
Thomas Daviesf6936102016-09-05 16:51:31 +01003272 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07003273#if CONFIG_EXT_DELTA_Q
3274 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01003275 cm->delta_lf_present_flag = 0;
Cheng Chen880166a2017-10-02 17:48:48 -07003276#if CONFIG_LOOPFILTER_LEVEL
3277 cm->delta_lf_multi = 0;
3278#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003279#endif
Thomas Davies28444be2017-10-13 18:12:25 +01003280 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003281 cm->delta_q_present_flag = aom_rb_read_bit(rb);
3282 } else {
3283 cm->delta_q_present_flag = 0;
3284 }
3285 if (cm->delta_q_present_flag) {
3286 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01003287 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07003288#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003289 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
3290 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07003291 xd->prev_delta_lf_from_base = 0;
3292 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07003293#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003294 cm->delta_lf_multi = aom_rb_read_bit(rb);
Cheng Chena97394f2017-09-27 15:05:14 -07003295 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3296 xd->prev_delta_lf[lf_id] = 0;
3297#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003298 }
3299#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003300 }
3301 }
RogerZhou3b635242017-09-19 10:06:46 -07003302#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003303 xd->cur_frame_force_integer_mv = cm->cur_frame_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003304#endif
Thomas Davies3ab20b42017-09-19 10:30:53 +01003305
Urvang Joshi454280d2016-10-14 16:51:44 -07003306 for (i = 0; i < MAX_SEGMENTS; ++i) {
3307 const int qindex = cm->seg.enabled
3308 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
3309 : cm->base_qindex;
3310 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
3311 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3312 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003313 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003314 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003315 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07003316#if CONFIG_CDEF
3317 if (!cm->all_lossless) {
3318 setup_cdef(cm, rb);
3319 }
3320#endif
3321#if CONFIG_LOOP_RESTORATION
3322 decode_restoration_mode(cm, rb);
3323#endif // CONFIG_LOOP_RESTORATION
3324 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003325 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee6f3c8982017-09-22 21:14:01 -07003326 if (cm->reference_mode != SINGLE_REFERENCE) setup_compound_reference_mode(cm);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003327 read_compound_tools(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003328
Sarah Parkere68a3e42017-02-16 14:03:24 -08003329 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
Sarah Parkere68a3e42017-02-16 14:03:24 -08003330
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003331#if CONFIG_ADAPT_SCAN
3332 cm->use_adapt_scan = aom_rb_read_bit(rb);
3333 // TODO(angiebird): call av1_init_scan_order only when use_adapt_scan
3334 // switches from 1 to 0
3335 if (cm->use_adapt_scan == 0) av1_init_scan_order(cm);
3336#endif // CONFIG_ADAPT_SCAN
3337
Pavel Frolov57c36e12017-09-12 15:00:40 +03003338 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3339 // show_exisiting_frame=1, nor can it take a frame not used as
3340 // a reference, it is probable that by the time it is being
3341 // referred to, the frame buffer it originally points to may
3342 // already get expired and have been reassigned to the current
3343 // newly coded frame. Hence, we need to check whether this is
3344 // the case, and if yes, we have 2 choices:
3345 // (1) Simply disable the use of previous frame mvs; or
3346 // (2) Have cm->prev_frame point to one reference frame buffer,
3347 // e.g. LAST_FRAME.
3348 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3349 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3350 cm->prev_frame =
3351 cm->frame_refs[LAST_FRAME - LAST_FRAME].idx != INVALID_IDX
3352 ? &cm->buffer_pool
3353 ->frame_bufs[cm->frame_refs[LAST_FRAME - LAST_FRAME].idx]
3354 : NULL;
3355 }
Pavel Frolov57c36e12017-09-12 15:00:40 +03003356
3357#if CONFIG_TEMPMV_SIGNALING
3358 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3359 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3360 "Frame wrongly requests previous frame MVs");
3361 }
3362#else
3363 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3364#if CONFIG_FRAME_SUPERRES
3365 cm->width == cm->last_width &&
3366 cm->height == cm->last_height &&
3367#else
3368 cm->width == cm->prev_frame->buf.y_crop_width &&
3369 cm->height == cm->prev_frame->buf.y_crop_height &&
3370#endif // CONFIG_FRAME_SUPERRES
3371 !cm->last_intra_only && cm->last_show_frame &&
3372 (cm->last_frame_type != KEY_FRAME);
3373#endif // CONFIG_TEMPMV_SIGNALING
3374
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003375 if (!frame_is_intra_only(cm)) read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003376
Yaowu Xuc27fc142016-08-22 16:08:15 -07003377 read_tile_info(pbi, rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003378 if (use_compressed_header(cm)) {
3379 sz = aom_rb_read_literal(rb, 16);
3380 if (sz == 0)
3381 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3382 "Invalid header size");
3383 } else {
3384 sz = 0;
3385 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386 return sz;
3387}
3388
Yaowu Xuf883b422016-08-30 14:01:10 -07003389static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003390 size_t partition_size) {
Thomas Davies2e868ab2017-10-24 10:42:27 +01003391#if CONFIG_NEW_MULTISYMBOL
Thomas Daviese7154832017-10-03 10:12:17 +01003392 (void)pbi;
3393 (void)data;
3394 (void)partition_size;
3395 return 0;
3396#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003397 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07003398 aom_reader r;
Ryanf0e39192017-10-09 09:45:13 -07003399
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003400#if ((CONFIG_RECT_TX_EXT) || (!CONFIG_NEW_MULTISYMBOL || CONFIG_LV_MAP) || \
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02003401 (CONFIG_COMPOUND_SINGLEREF))
Thomas Davies599395e2017-07-21 18:02:48 +01003402 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies599395e2017-07-21 18:02:48 +01003403#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003404
Alex Converse2cdf0d82016-12-13 13:53:09 -08003405#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08003406 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08003407#endif
Alex Converse346440b2017-01-03 13:47:37 -08003408 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
3409 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07003410 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003411 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003412
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003413#if CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -07003414 if (cm->tx_mode == TX_MODE_SELECT)
3415 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07003416#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003417
Thomas Davies985bfc32017-06-27 16:51:26 +01003418#if !CONFIG_NEW_MULTISYMBOL
David Barker16c64e32017-08-23 16:54:59 +01003419 if (cm->tx_mode == TX_MODE_SELECT)
Ryanf0e39192017-10-09 09:45:13 -07003420 for (int i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
David Barker16c64e32017-08-23 16:54:59 +01003421 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Ryanf0e39192017-10-09 09:45:13 -07003422 for (int i = 0; i < SKIP_CONTEXTS; ++i)
Thomas Davies61e3e372017-04-04 16:10:23 +01003423 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
Cheng Chen0a7f2f52017-10-10 15:16:09 -07003424
3425#if CONFIG_JNT_COMP
3426 for (int i = 0; i < COMP_INDEX_CONTEXTS; ++i)
3427 av1_diff_update_prob(&r, &fc->compound_index_probs[i], ACCT_STR);
3428#endif // CONFIG_JNT_COMP
Thomas Davies61e3e372017-04-04 16:10:23 +01003429#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003430
Debargha Mukherjee801cc922017-09-22 17:22:50 -07003431 if (!frame_is_intra_only(cm)) {
Thomas Davies149eda52017-06-12 18:11:55 +01003432#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003433 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01003434#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003435
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003436 if (cm->reference_mode != COMPOUND_REFERENCE &&
3437 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01003438#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003439 for (int i = 0; i < BLOCK_SIZE_GROUPS; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003440 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003441 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003442 }
3443 }
Thomas Daviescff91712017-07-07 11:49:55 +01003444#endif
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003445#if !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003446#if CONFIG_EXT_PARTITION_TYPES
3447 int block_sizes_to_update = BLOCK_SIZES_ALL;
3448#else
3449 int block_sizes_to_update = BLOCK_SIZES;
3450#endif
Ryanf0e39192017-10-09 09:45:13 -07003451 for (int i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003452 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003453 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003454 }
3455 }
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003456#endif // !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003457 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003458
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003459#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003460 for (int i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07003461 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003462#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003463
David Barker037ee412017-09-19 12:43:46 +01003464#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003465 read_frame_reference_mode_probs(cm, &r);
David Barker037ee412017-09-19 12:43:46 +01003466#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003467
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003468#if CONFIG_COMPOUND_SINGLEREF
Ryanf0e39192017-10-09 09:45:13 -07003469 for (int i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
Zoe Liu85b66462017-04-20 14:28:19 -07003470 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003471#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003472
Thomas Davies599395e2017-07-21 18:02:48 +01003473#if !CONFIG_NEW_MULTISYMBOL
RogerZhou3b635242017-09-19 10:06:46 -07003474#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003475 if (cm->cur_frame_force_integer_mv == 0) {
RogerZhou3b635242017-09-19 10:06:46 -07003476#endif
Ryanf0e39192017-10-09 09:45:13 -07003477 for (int i = 0; i < NMV_CONTEXTS; ++i)
RogerZhou3b635242017-09-19 10:06:46 -07003478 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
3479#if CONFIG_AMVR
3480 }
3481#endif
Thomas Davies599395e2017-07-21 18:02:48 +01003482#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003483 }
3484
Yaowu Xuf883b422016-08-30 14:01:10 -07003485 return aom_reader_has_error(&r);
Thomas Davies2e868ab2017-10-24 10:42:27 +01003486#endif // CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003487}
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003488
Yaowu Xuc27fc142016-08-22 16:08:15 -07003489#ifdef NDEBUG
3490#define debug_check_frame_counts(cm) (void)0
3491#else // !NDEBUG
3492// Counts should only be incremented when frame_parallel_decoding_mode and
3493// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07003494static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003495 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003496 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
3498 cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003499 assert(!memcmp(cm->counts.partition, zero_counts.partition,
3500 sizeof(cm->counts.partition)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003501 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
3502 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003503 assert(!memcmp(cm->counts.inter_compound_mode,
3504 zero_counts.inter_compound_mode,
3505 sizeof(cm->counts.inter_compound_mode)));
3506 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
3507 sizeof(cm->counts.interintra)));
3508 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
3509 sizeof(cm->counts.wedge_interintra)));
Sarah Parker6fddd182016-11-10 20:57:20 -08003510 assert(!memcmp(cm->counts.compound_interinter,
3511 zero_counts.compound_interinter,
3512 sizeof(cm->counts.compound_interinter)));
Yue Chencb60b182016-10-13 15:18:22 -07003513 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
3514 sizeof(cm->counts.motion_mode)));
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01003515#if CONFIG_NCOBMC_ADAPT_WEIGHT
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07003516 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
3517 sizeof(cm->counts.ncobmc_mode)));
3518#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003519 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
3520 sizeof(cm->counts.intra_inter)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003521#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003522 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
3523 sizeof(cm->counts.comp_inter_mode)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003524#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07003525 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
3526 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07003527#if CONFIG_EXT_COMP_REFS
3528 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
3529 sizeof(cm->counts.comp_ref_type)));
3530 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
3531 sizeof(cm->counts.uni_comp_ref)));
3532#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
3534 sizeof(cm->counts.single_ref)));
3535 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
3536 sizeof(cm->counts.comp_ref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003537 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
3538 sizeof(cm->counts.comp_bwdref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003539 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003540 assert(
3541 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
3542 assert(
3543 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003544}
3545#endif // NDEBUG
3546
Yaowu Xuf883b422016-08-30 14:01:10 -07003547static struct aom_read_bit_buffer *init_read_bit_buffer(
3548 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
3549 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003550 rb->bit_offset = 0;
3551 rb->error_handler = error_handler;
3552 rb->error_handler_data = &pbi->common;
3553 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003554 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003555 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
3556 rb->bit_buffer = clear_data;
3557 rb->bit_buffer_end = clear_data + n;
3558 } else {
3559 rb->bit_buffer = data;
3560 rb->bit_buffer_end = data_end;
3561 }
3562 return rb;
3563}
3564
3565//------------------------------------------------------------------------------
3566
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003567#if CONFIG_FRAME_SIZE
3568void av1_read_frame_size(struct aom_read_bit_buffer *rb, int num_bits_width,
3569 int num_bits_height, int *width, int *height) {
3570 *width = aom_rb_read_literal(rb, num_bits_width) + 1;
3571 *height = aom_rb_read_literal(rb, num_bits_height) + 1;
3572#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003573void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
3574 int *height) {
3575 *width = aom_rb_read_literal(rb, 16) + 1;
3576 *height = aom_rb_read_literal(rb, 16) + 1;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003577#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003578}
3579
Yaowu Xuf883b422016-08-30 14:01:10 -07003580BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
3581 int profile = aom_rb_read_bit(rb);
3582 profile |= aom_rb_read_bit(rb) << 1;
3583 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003584 return (BITSTREAM_PROFILE)profile;
3585}
3586
Thomas Davies4822e142017-10-10 11:30:36 +01003587static void make_update_tile_list_dec(AV1Decoder *pbi, int start_tile,
3588 int num_tile, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00003589 int i;
Thomas Davies4822e142017-10-10 11:30:36 +01003590 for (i = start_tile; i < start_tile + num_tile; ++i)
3591 ec_ctxs[i - start_tile] = &pbi->tile_data[i].tctx;
Thomas Davies028b57f2017-02-22 16:42:11 +00003592}
Thomas Davies028b57f2017-02-22 16:42:11 +00003593
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003594#if CONFIG_FRAME_SUPERRES
3595void superres_post_decode(AV1Decoder *pbi) {
3596 AV1_COMMON *const cm = &pbi->common;
3597 BufferPool *const pool = cm->buffer_pool;
3598
3599 if (av1_superres_unscaled(cm)) return;
3600
3601 lock_buffer_pool(pool);
3602 av1_superres_upscale(cm, pool);
3603 unlock_buffer_pool(pool);
3604}
3605#endif // CONFIG_FRAME_SUPERRES
3606
Yi Luo10e23002017-07-31 11:54:43 -07003607static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01003608// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
3609// boundary information every frame, since the tile boundaries may
3610// change every frame (particularly when dependent-horztiles is also
3611// enabled); when it is disabled, the only information stored is the frame
3612// boundaries, which only depend on the frame size.
3613#if !CONFIG_LOOPFILTERING_ACROSS_TILES
3614 if (cm->width != cm->last_width || cm->height != cm->last_height)
3615#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
3616 {
Yi Luo10e23002017-07-31 11:54:43 -07003617 int row, col;
3618 for (row = 0; row < cm->mi_rows; ++row) {
3619 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
3620 for (col = 0; col < cm->mi_cols; ++col) {
3621 mi->mbmi.boundary_info = 0;
3622 mi++;
3623 }
3624 }
3625 av1_setup_frame_boundary_info(cm);
3626 }
3627}
3628
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003629size_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi, const uint8_t *data,
3630 const uint8_t *data_end,
3631 const uint8_t **p_data_end) {
3632 AV1_COMMON *const cm = &pbi->common;
3633 MACROBLOCKD *const xd = &pbi->mb;
3634 struct aom_read_bit_buffer rb;
3635 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
3636 size_t first_partition_size;
3637 YV12_BUFFER_CONFIG *new_fb;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003638 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003639
3640#if CONFIG_ADAPT_SCAN
3641 av1_deliver_eob_threshold(cm, xd);
3642#endif
3643#if CONFIG_BITSTREAM_DEBUG
3644 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
3645#endif
3646
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003647 int i;
3648 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003649 cm->global_motion[i] = default_warp_params;
3650 cm->cur_frame->global_motion[i] = default_warp_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003651 }
3652 xd->global_motion = cm->global_motion;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003653
3654 first_partition_size = read_uncompressed_header(
3655 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
3656
3657#if CONFIG_EXT_TILE
3658 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
3659 // or a section of a frame is not allowed.
3660 if (!cm->single_tile_decoding &&
3661 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
3662 pbi->dec_tile_row = -1;
3663 pbi->dec_tile_col = -1;
3664 }
3665#endif // CONFIG_EXT_TILE
3666
3667 pbi->first_partition_size = first_partition_size;
3668 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
3669 new_fb = get_frame_new_buffer(cm);
3670 xd->cur_buf = new_fb;
3671#if CONFIG_INTRABC
3672#if CONFIG_HIGHBITDEPTH
3673 av1_setup_scale_factors_for_frame(
3674 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3675 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3676 cm->use_highbitdepth);
3677#else
3678 av1_setup_scale_factors_for_frame(
3679 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3680 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
3681#endif // CONFIG_HIGHBITDEPTH
3682#endif // CONFIG_INTRABC
3683
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003684 if (cm->show_existing_frame) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003685 // showing a frame directly
3686 *p_data_end = data + aom_rb_bytes_read(&rb);
3687 return 0;
3688 }
3689
3690 data += aom_rb_bytes_read(&rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003691 if (first_partition_size)
3692 if (!read_is_valid(data, first_partition_size, data_end))
3693 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3694 "Truncated packet or corrupt header length");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003695
3696 cm->setup_mi(cm);
3697
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003698 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3699 // show_exisiting_frame=1, nor can it take a frame not used as
3700 // a reference, it is probable that by the time it is being
3701 // referred to, the frame buffer it originally points to may
3702 // already get expired and have been reassigned to the current
3703 // newly coded frame. Hence, we need to check whether this is
3704 // the case, and if yes, we have 2 choices:
3705 // (1) Simply disable the use of previous frame mvs; or
3706 // (2) Have cm->prev_frame point to one reference frame buffer,
3707 // e.g. LAST_FRAME.
3708 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3709 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3710 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
3711 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
3712 : NULL;
3713 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003714
3715#if CONFIG_TEMPMV_SIGNALING
3716 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3717 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3718 "Frame wrongly requests previous frame MVs");
3719 }
3720#else
3721 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3722#if CONFIG_FRAME_SUPERRES
3723 cm->width == cm->last_width &&
3724 cm->height == cm->last_height &&
3725#else
3726 cm->width == cm->prev_frame->buf.y_crop_width &&
3727 cm->height == cm->prev_frame->buf.y_crop_height &&
3728#endif // CONFIG_FRAME_SUPERRES
3729 !cm->last_intra_only && cm->last_show_frame &&
3730 (cm->last_frame_type != KEY_FRAME);
3731#endif // CONFIG_TEMPMV_SIGNALING
3732
Zoe Liuf704a1c2017-10-02 16:55:59 -07003733#if CONFIG_EXT_SKIP
3734 av1_setup_skip_mode_allowed(cm);
3735#if 0
3736 printf("\nDECODER: Frame=%d, frame_offset=%d, show_frame=%d, "
3737 "is_skip_mode_allowed=%d, ref_frame_idx=(%d,%d)\n",
3738 cm->current_video_frame, cm->frame_offset, cm->show_frame,
3739 cm->is_skip_mode_allowed, cm->ref_frame_idx_0, cm->ref_frame_idx_1);
3740#endif // 0
3741#endif // CONFIG_EXT_SKIP
3742
Jingning Hanea255c92017-09-29 08:12:09 -07003743#if CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003744 av1_setup_motion_field(cm);
Jingning Hanea255c92017-09-29 08:12:09 -07003745#endif // CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003746
3747 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
3748#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3749 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
3750 // use the default frame context values
3751 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3752 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3753 } else {
3754 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
3755 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
3756 }
3757#else
3758 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
3759 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
3760#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3761 if (!cm->fc->initialized)
3762 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3763 "Uninitialized entropy context.");
3764
3765 av1_zero(cm->counts);
3766
3767 xd->corrupted = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003768 if (first_partition_size) {
3769 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
3770 if (new_fb->corrupted)
3771 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3772 "Decode failed. Frame data header is corrupted.");
3773 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003774 return first_partition_size;
3775}
3776
David Barker01563082017-10-09 13:59:16 +01003777// Once-per-frame initialization
3778static void setup_frame_info(AV1Decoder *pbi) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003779 AV1_COMMON *const cm = &pbi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003780
3781#if CONFIG_LOOP_RESTORATION
3782 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3783 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3784 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3785 av1_alloc_restoration_buffers(cm);
3786 }
3787#endif
3788
Cheng Chend8184da2017-09-26 18:15:22 -07003789#if !CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003790 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3791 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
3792 }
3793#endif
3794
3795 // If encoded in frame parallel mode, frame context is ready after decoding
3796 // the frame header.
David Barker01563082017-10-09 13:59:16 +01003797 if (cm->frame_parallel_decode &&
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003798 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
3799 AVxWorker *const worker = pbi->frame_worker_owner;
3800 FrameWorkerData *const frame_worker_data = worker->data1;
3801 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003802#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3803 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3804#else
3805 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3806#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3807 }
3808 av1_frameworker_lock_stats(worker);
3809 pbi->cur_buf->row = -1;
3810 pbi->cur_buf->col = -1;
3811 frame_worker_data->frame_context_ready = 1;
3812 // Signal the main thread that context is ready.
3813 av1_frameworker_signal_stats(worker);
3814 av1_frameworker_unlock_stats(worker);
3815 }
3816
3817 dec_setup_frame_boundary_info(cm);
David Barker01563082017-10-09 13:59:16 +01003818}
3819
3820void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
3821 const uint8_t *data_end,
3822 const uint8_t **p_data_end, int startTile,
3823 int endTile, int initialize_flag) {
3824 AV1_COMMON *const cm = &pbi->common;
3825 MACROBLOCKD *const xd = &pbi->mb;
3826
3827 if (initialize_flag) setup_frame_info(pbi);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003828
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003829#if CONFIG_OBU
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003830 *p_data_end = decode_tiles(pbi, data, data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003831#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003832 *p_data_end =
3833 decode_tiles(pbi, data + pbi->uncomp_hdr_size + pbi->first_partition_size,
3834 data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003835#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003836
3837 if (endTile != cm->tile_rows * cm->tile_cols - 1) {
3838 return;
3839 }
3840
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02003841#if CONFIG_STRIPED_LOOP_RESTORATION
3842 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3843 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3844 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Rupert Swarbrick76c78002017-11-02 17:26:35 +00003845#if CONFIG_FRAME_SUPERRES && CONFIG_HORZONLY_FRAME_SUPERRES
3846 aom_extend_frame_borders(&pbi->cur_buf->buf);
3847#endif
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02003848 av1_loop_restoration_save_boundary_lines(&pbi->cur_buf->buf, cm);
3849 }
3850#endif
3851
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003852#if CONFIG_CDEF
3853 if (!cm->skip_loop_filter && !cm->all_lossless) {
3854 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
3855 }
3856#endif // CONFIG_CDEF
3857
3858#if CONFIG_FRAME_SUPERRES
3859 superres_post_decode(pbi);
3860#endif // CONFIG_FRAME_SUPERRES
3861
3862#if CONFIG_LOOP_RESTORATION
3863 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3864 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3865 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01003866 av1_loop_restoration_filter_frame((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
3867 cm->rst_info, 7, NULL);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003868 }
3869#endif // CONFIG_LOOP_RESTORATION
3870
3871 if (!xd->corrupted) {
3872 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies4822e142017-10-10 11:30:36 +01003873#if CONFIG_SIMPLE_BWD_ADAPT
3874 const int num_bwd_ctxs = 1;
3875#else
3876 const int num_bwd_ctxs = cm->tile_rows * cm->tile_cols;
3877#endif
3878 FRAME_CONTEXT **tile_ctxs =
3879 aom_malloc(num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx));
3880 aom_cdf_prob **cdf_ptrs = aom_malloc(
3881 num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
3882#if CONFIG_SIMPLE_BWD_ADAPT
3883 make_update_tile_list_dec(pbi, cm->largest_tile_id, num_bwd_ctxs,
3884 tile_ctxs);
3885#else
3886 make_update_tile_list_dec(pbi, 0, num_bwd_ctxs, tile_ctxs);
3887#endif
Angie Chiang85e3b962017-10-01 16:04:43 -07003888#if CONFIG_SYMBOLRATE
3889 av1_dump_symbol_rate(cm);
3890#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003891 av1_adapt_intra_frame_probs(cm);
3892 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003893 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003894 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003895 num_bwd_ctxs);
Debargha Mukherjee43061b32017-10-13 16:50:17 -07003896 av1_average_tile_loopfilter_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
3897 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003898#if CONFIG_ADAPT_SCAN
3899 av1_adapt_scan_order(cm);
3900#endif // CONFIG_ADAPT_SCAN
3901
3902 if (!frame_is_intra_only(cm)) {
3903 av1_adapt_inter_frame_probs(cm);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003904#if !CONFIG_NEW_MULTISYMBOL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003905 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003906#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003907 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies4822e142017-10-10 11:30:36 +01003908 cdf_ptrs, num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003909 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003910 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003911 }
3912 aom_free(tile_ctxs);
3913 aom_free(cdf_ptrs);
3914 } else {
3915 debug_check_frame_counts(cm);
3916 }
3917 } else {
3918 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3919 "Decode failed. Frame data is corrupted.");
3920 }
3921
3922#if CONFIG_INSPECTION
3923 if (pbi->inspect_cb != NULL) {
3924 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
3925 }
3926#endif
3927
David Barker01563082017-10-09 13:59:16 +01003928 // Non frame parallel update frame context here.
3929 if (cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_FORWARD) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003930#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
David Barker01563082017-10-09 13:59:16 +01003931 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003932#else
David Barker01563082017-10-09 13:59:16 +01003933 if (!cm->error_resilient_mode)
3934 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003935#endif
David Barker01563082017-10-09 13:59:16 +01003936 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003937}
3938
3939#if CONFIG_OBU
3940
3941static OBU_TYPE read_obu_header(struct aom_read_bit_buffer *rb,
3942 uint32_t *header_size) {
3943 OBU_TYPE obu_type;
3944 int obu_extension_flag;
3945
3946 *header_size = 1;
3947
Soo-Chul Han38427e82017-09-27 15:06:13 -04003948 // first bit is obu_forbidden_bit (0) according to R19
3949 aom_rb_read_bit(rb);
3950
3951 obu_type = (OBU_TYPE)aom_rb_read_literal(rb, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003952 aom_rb_read_literal(rb, 2); // reserved
3953 obu_extension_flag = aom_rb_read_bit(rb);
3954 if (obu_extension_flag) {
3955 *header_size += 1;
3956 aom_rb_read_literal(rb, 3); // temporal_id
3957 aom_rb_read_literal(rb, 2);
3958 aom_rb_read_literal(rb, 2);
3959 aom_rb_read_literal(rb, 1); // reserved
3960 }
3961
3962 return obu_type;
3963}
3964
3965static uint32_t read_temporal_delimiter_obu() { return 0; }
3966
3967static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
3968 struct aom_read_bit_buffer *rb) {
3969 AV1_COMMON *const cm = &pbi->common;
David Barker5e70a112017-10-03 14:28:17 +01003970 SequenceHeader *const seq_params = &cm->seq_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003971 uint32_t saved_bit_offset = rb->bit_offset;
3972
3973 cm->profile = av1_read_profile(rb);
3974 aom_rb_read_literal(rb, 4); // level
3975
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003976#if CONFIG_FRAME_SIZE
3977 int num_bits_width = aom_rb_read_literal(rb, 4) + 1;
3978 int num_bits_height = aom_rb_read_literal(rb, 4) + 1;
3979 int max_frame_width = aom_rb_read_literal(rb, num_bits_width) + 1;
3980 int max_frame_height = aom_rb_read_literal(rb, num_bits_height) + 1;
3981
3982 seq_params->num_bits_width = num_bits_width;
3983 seq_params->num_bits_height = num_bits_height;
3984 seq_params->max_frame_width = max_frame_width;
3985 seq_params->max_frame_height = max_frame_height;
3986#endif
3987
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003988 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
3989 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003990 // We must always have delta_frame_id_length < frame_id_length,
3991 // in order for a frame to be referenced with a unique delta.
3992 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003993 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003994 seq_params->frame_id_length =
3995 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003996 }
3997
3998 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
3999
4000 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
4001}
4002
4003static uint32_t read_frame_header_obu(AV1Decoder *pbi, const uint8_t *data,
4004 const uint8_t *data_end,
4005 const uint8_t **p_data_end) {
4006 size_t header_size;
4007
4008 header_size =
4009 av1_decode_frame_headers_and_setup(pbi, data, data_end, p_data_end);
4010 return (uint32_t)(pbi->uncomp_hdr_size + header_size);
4011}
4012
4013static uint32_t read_tile_group_header(AV1Decoder *pbi,
4014 struct aom_read_bit_buffer *rb,
4015 int *startTile, int *endTile) {
4016 AV1_COMMON *const cm = &pbi->common;
4017 uint32_t saved_bit_offset = rb->bit_offset;
4018
4019 *startTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4020 *endTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4021
4022 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
4023}
4024
4025static uint32_t read_one_tile_group_obu(AV1Decoder *pbi,
4026 struct aom_read_bit_buffer *rb,
4027 int is_first_tg, const uint8_t *data,
4028 const uint8_t *data_end,
4029 const uint8_t **p_data_end,
4030 int *is_last_tg) {
4031 AV1_COMMON *const cm = &pbi->common;
4032 int startTile, endTile;
4033 uint32_t header_size, tg_payload_size;
4034
4035 header_size = read_tile_group_header(pbi, rb, &startTile, &endTile);
4036 data += header_size;
4037 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, startTile,
4038 endTile, is_first_tg);
4039 tg_payload_size = (uint32_t)(*p_data_end - data);
4040
4041 // TODO(shan): For now, assume all tile groups received in order
4042 *is_last_tg = endTile == cm->tile_rows * cm->tile_cols - 1;
4043
4044 return header_size + tg_payload_size;
4045}
4046
Soo-Chul Han38427e82017-09-27 15:06:13 -04004047static void read_metadata_private_data(const uint8_t *data, uint32_t sz) {
4048 int i;
4049
4050 for (i = 0; i < (int)sz; i++) {
4051 mem_get_le16(data);
4052 data += 2;
4053 }
4054}
4055
4056static void read_metadata_hdr_cll(const uint8_t *data) {
4057 mem_get_le16(data);
4058 mem_get_le16(data + 2);
4059}
4060
4061static void read_metadata_hdr_mdcv(const uint8_t *data) {
4062 int i;
4063
4064 for (i = 0; i < 3; i++) {
4065 mem_get_le16(data);
4066 data += 2;
4067 mem_get_le16(data);
4068 data += 2;
4069 }
4070
4071 mem_get_le16(data);
4072 data += 2;
4073 mem_get_le16(data);
4074 data += 2;
4075 mem_get_le16(data);
4076 data += 2;
4077 mem_get_le16(data);
4078}
4079
4080static uint32_t read_metadata(const uint8_t *data, uint32_t sz) {
4081 METADATA_TYPE metadata_type;
4082
4083 metadata_type = (METADATA_TYPE)mem_get_le16(data);
4084
4085 if (metadata_type == METADATA_TYPE_PRIVATE_DATA) {
4086 read_metadata_private_data(data + 2, sz - 2);
4087 } else if (metadata_type == METADATA_TYPE_HDR_CLL) {
4088 read_metadata_hdr_cll(data + 2);
4089 } else if (metadata_type == METADATA_TYPE_HDR_MDCV) {
4090 read_metadata_hdr_mdcv(data + 2);
4091 }
4092
4093 return sz;
4094}
4095
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004096void av1_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
4097 const uint8_t *data_end,
4098 const uint8_t **p_data_end) {
4099 AV1_COMMON *const cm = &pbi->common;
4100 int frame_decoding_finished = 0;
4101 int is_first_tg_obu_received = 1;
4102 int frame_header_received = 0;
4103 int frame_header_size = 0;
4104
4105 // decode frame as a series of OBUs
4106 while (!frame_decoding_finished && !cm->error.error_code) {
4107 struct aom_read_bit_buffer rb;
4108 uint8_t clear_data[80];
4109 uint32_t obu_size, obu_header_size, obu_payload_size = 0;
4110 OBU_TYPE obu_type;
4111
Soo-Chul Han38427e82017-09-27 15:06:13 -04004112 init_read_bit_buffer(pbi, &rb, data + PRE_OBU_SIZE_BYTES, data_end,
4113 clear_data);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004114
Soo-Chul Han38427e82017-09-27 15:06:13 -04004115// every obu is preceded by PRE_OBU_SIZE_BYTES-byte size of obu (obu header +
4116// payload size)
4117// The obu size is only needed for tile group OBUs
4118#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004119 obu_size = mem_get_le32(data);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004120#else
4121 obu_size = data_end - data;
4122#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004123 obu_type = read_obu_header(&rb, &obu_header_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004124 data += (PRE_OBU_SIZE_BYTES + obu_header_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004125
4126 switch (obu_type) {
4127 case OBU_TD: obu_payload_size = read_temporal_delimiter_obu(); break;
4128 case OBU_SEQUENCE_HEADER:
4129 obu_payload_size = read_sequence_header_obu(pbi, &rb);
4130 break;
4131 case OBU_FRAME_HEADER:
4132 // Only decode first frame header received
4133 if (!frame_header_received) {
4134 frame_header_size = obu_payload_size =
4135 read_frame_header_obu(pbi, data, data_end, p_data_end);
4136 frame_header_received = 1;
4137 } else {
4138 obu_payload_size = frame_header_size;
4139 }
4140 if (cm->show_existing_frame) frame_decoding_finished = 1;
4141 break;
4142 case OBU_TILE_GROUP:
David Barker01563082017-10-09 13:59:16 +01004143 obu_payload_size =
4144 read_one_tile_group_obu(pbi, &rb, is_first_tg_obu_received, data,
4145 data + obu_size - obu_header_size,
4146 p_data_end, &frame_decoding_finished);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004147 is_first_tg_obu_received = 0;
4148 break;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004149 case OBU_METADATA:
4150 obu_payload_size = read_metadata(data, obu_size);
4151 break;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004152 default: break;
4153 }
4154 data += obu_payload_size;
4155 }
4156}
4157#endif