blob: a08856bb6c192d7153a6c884237e874aa359bea2 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070022#include "aom_dsp/binary_codes_reader.h"
Jingning Han1aab8182016-06-03 11:09:06 -070023#include "aom_dsp/bitreader.h"
24#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
Angie Chiang85e3b962017-10-01 16:04:43 -070067#include "av1/decoder/symbolrate.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Yue Chen69f18e12016-09-08 14:48:15 -070069#include "av1/common/warped_motion.h"
Yue Chen69f18e12016-09-08 14:48:15 -070070
Yaowu Xuf883b422016-08-30 14:01:10 -070071#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070072#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070073
Luc Trudeaue3980282017-04-25 23:17:21 -040074#if CONFIG_CFL
75#include "av1/common/cfl.h"
76#endif
77
Ola Hugosson1e7f2d02017-09-22 21:36:26 +020078#if CONFIG_STRIPED_LOOP_RESTORATION && !CONFIG_LOOP_RESTORATION
79#error "striped_loop_restoration requires loop_restoration"
80#endif
81
Rupert Swarbrick6c545212017-09-01 17:17:25 +010082#if CONFIG_LOOP_RESTORATION
83static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
84 MACROBLOCKD *xd,
85 aom_reader *const r, int plane,
86 int rtile_idx);
87#endif
88
Thomas Davies80188d12016-10-26 16:08:35 -070089static struct aom_read_bit_buffer *init_read_bit_buffer(
90 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
91 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
92static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
93 size_t partition_size);
94static size_t read_uncompressed_header(AV1Decoder *pbi,
95 struct aom_read_bit_buffer *rb);
96
Yaowu Xuf883b422016-08-30 14:01:10 -070097static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -070098#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +020099 return !frame_is_intra_only(cm);
100#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101 int i;
102 if (frame_is_intra_only(cm)) return 0;
103 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
104 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
105
106 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700107#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108}
109
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111 cm->comp_fwd_ref[0] = LAST_FRAME;
112 cm->comp_fwd_ref[1] = LAST2_FRAME;
113 cm->comp_fwd_ref[2] = LAST3_FRAME;
114 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
115
116 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700117 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
118 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119}
120
121static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
122 return len != 0 && len <= (size_t)(end - start);
123}
124
Yaowu Xuf883b422016-08-30 14:01:10 -0700125static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
126 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700127 return data > max ? max : data;
128}
129
Thomas Daedef636d5c2017-06-29 13:48:27 -0700130static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800131#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800132 TX_MODE tx_mode;
133#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700134 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400135#if CONFIG_VAR_TX_NO_TX_MODE
136 (void)rb;
137 return TX_MODE_SELECT;
138#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800139#if CONFIG_TX64X64
140 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800141 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
142 return tx_mode;
143#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800145#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400146#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147}
148
Thomas Davies2e868ab2017-10-24 10:42:27 +0100149#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700150static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700151 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700153 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700154 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700155 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700157 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700159 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160}
Thomas Davies149eda52017-06-12 18:11:55 +0100161#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800166#if CONFIG_REF_ADAPT
167 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
168#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700169 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800172#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700173 } else {
174 return SINGLE_REFERENCE;
175 }
176}
177
Thomas Davies2e868ab2017-10-24 10:42:27 +0100178#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700179static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100181 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182
183 if (cm->reference_mode == REFERENCE_MODE_SELECT)
184 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700185 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700186
187 if (cm->reference_mode != COMPOUND_REFERENCE) {
188 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100189 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700191 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192 }
193 }
194 }
195
196 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700197#if CONFIG_EXT_COMP_REFS
198 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
199 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
200
Thomas Davies894cc812017-06-22 17:51:33 +0100201 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
202 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700203 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
204 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100205 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700206#endif // CONFIG_EXT_COMP_REFS
207
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100209 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700211 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700213 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 }
215 }
216}
217
Yaowu Xuf883b422016-08-30 14:01:10 -0700218static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700220 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221}
222
Yaowu Xuf883b422016-08-30 14:01:10 -0700223static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100224 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 if (allow_hp) {
226 for (i = 0; i < 2; ++i) {
227 nmv_component *const comp_ctx = &ctx->comps[i];
228 update_mv_probs(&comp_ctx->class0_hp, 1, r);
229 update_mv_probs(&comp_ctx->hp, 1, r);
230 }
231 }
232}
Thomas Davies599395e2017-07-21 18:02:48 +0100233#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700234
235static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu432012f2017-08-17 14:39:29 -0700236#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700237 PREDICTION_MODE mode,
238#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239 const TX_TYPE tx_type,
240 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700241 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700242 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700243 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700244 av1_inverse_transform_block(xd, dqcoeff,
Lester Lu432012f2017-08-17 14:39:29 -0700245#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700246 mode,
247#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700248#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
249 xd->mrc_mask,
250#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Lester Lu708c1ec2017-06-14 14:54:49 -0700251 tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700252 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253}
254
Lester Lu9c521922017-07-11 11:16:45 -0700255static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
256 const int bsize = xd->mi[0]->mbmi.sb_type;
257 const struct macroblockd_plane *pd = &xd->plane[plane];
Lester Lu9c521922017-07-11 11:16:45 -0700258 const BLOCK_SIZE plane_bsize =
259 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Lester Lu9c521922017-07-11 11:16:45 -0700260 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700261 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700262 const uint8_t txh_unit = tx_size_high_unit[tx_size];
263 return row * max_blocks_wide + col * txh_unit;
264}
265
Alex Converse8aca36d2017-01-31 12:33:15 -0800266static void predict_and_reconstruct_intra_block(
267 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
268 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500269 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700270 const int block_idx = get_block_idx(xd, plane, row, col);
David Barker761b1ac2017-09-25 11:23:03 +0100271 av1_predict_intra_block_facade(cm, xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272
273 if (!mbmi->skip) {
Angie Chiang3d005e42017-04-02 16:31:35 -0700274 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700275#if CONFIG_LV_MAP
276 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700277 int eob;
278 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700279 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700280 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700281 const TX_TYPE tx_type =
282 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700283#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700284 const TX_TYPE tx_type =
285 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700286 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700287 int16_t max_scan_line = 0;
288 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700289 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700290 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700291#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700292 if (eob) {
293 uint8_t *dst =
294 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700295 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700296#if CONFIG_LGT_FROM_PRED
Lester Lu918fe692017-08-17 14:39:29 -0700297 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700298#endif
Hui Su400bf652017-08-15 15:42:19 -0700299 tx_type, tx_size, dst, pd->dst.stride,
300 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700301 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400303#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400304 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400305 cfl_store_tx(xd, row, col, tx_size, mbmi->sb_type);
Luc Trudeaue3980282017-04-25 23:17:21 -0400306 }
Sebastien Alaiwanc4559ca2017-09-27 09:47:30 +0200307#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308}
309
Angie Chiangff6d8902016-10-21 11:02:09 -0700310static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
311 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700312 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700313 int blk_row, int blk_col, int block,
314 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700315 const struct macroblockd_plane *const pd = &xd->plane[plane];
316 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
317 const int tx_row = blk_row >> (1 - pd->subsampling_y);
318 const int tx_col = blk_col >> (1 - pd->subsampling_x);
319 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700320 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700322 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700323 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
324 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700325
326 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
327
328 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500329 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700330#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700331 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700332 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700333 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700334 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700335 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700336 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700337 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700338#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700339 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700340 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700341 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700342 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700343 const int eob = av1_decode_block_tokens(
344 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
345 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700346#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700347 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700348#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700349 mbmi->mode,
350#endif
351 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800352 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
353 << tx_size_wide_log2[0]],
354 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700355 *eob_total += eob;
356 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700357#if CONFIG_RECT_TX_EXT
358 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
359 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
360 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
361#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700362 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Urvang Joshidff57e02017-09-29 11:15:48 -0700363 assert(IMPLIES(tx_size <= TX_4X4, sub_txs == tx_size));
364 assert(IMPLIES(tx_size > TX_4X4, sub_txs < tx_size));
Yue Chend6bdd462017-07-19 16:05:43 -0700365#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700366 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700367 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368 int i;
369
370 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700371
372 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700373#if CONFIG_RECT_TX_EXT
374 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
375 const int offsetr =
376 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
377 : blk_row + ((i >> 1) * bsl);
378 const int offsetc =
379 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
380 : blk_col + (i & 0x01) * bsl;
381#else
Jingning Han5f614262016-10-27 14:27:43 -0700382 const int offsetr = blk_row + (i >> 1) * bsl;
383 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700384#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700385
386 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
387
Jingning Han8fd62b72016-10-21 12:55:54 -0700388 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700389 offsetc, block, sub_txs, eob_total);
390 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700391 }
392 }
393}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394
Angie Chiang44701f22017-02-27 10:36:44 -0800395static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
396 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
397 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700398 const int offset = mi_row * cm->mi_stride + mi_col;
399 int x, y;
400 const TileInfo *const tile = &xd->tile;
401
402 xd->mi = cm->mi_grid_visible + offset;
403 xd->mi[0] = &cm->mi[offset];
404 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
405 // passing bsize from decode_partition().
406 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700407#if CONFIG_RD_DEBUG
408 xd->mi[0]->mbmi.mi_row = mi_row;
409 xd->mi[0]->mbmi.mi_col = mi_col;
410#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400411#if CONFIG_CFL
412 xd->cfl->mi_row = mi_row;
413 xd->cfl->mi_col = mi_col;
414#endif
Yunqing Wangb90a97a2017-10-24 11:50:15 -0700415
416 assert(x_mis && y_mis);
417 for (x = 1; x < x_mis; ++x) xd->mi[x] = xd->mi[0];
418 int idx = cm->mi_stride;
419 for (y = 1; y < y_mis; ++y) {
420 memcpy(&xd->mi[idx], &xd->mi[0], x_mis * sizeof(xd->mi[0]));
421 idx += cm->mi_stride;
422 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423
Jingning Hanfaad0e12016-12-07 10:54:57 -0800424 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700425 set_skip_context(xd, mi_row, mi_col);
426
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700427 // Distance of Mb to the various image edges. These are specified to 8th pel
428 // as they are always compared to values that are in 1/8th pel units
429 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800430#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700431 cm->dependent_horz_tiles,
432#endif // CONFIG_DEPENDENT_HORZTILES
433 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700434
Jingning Han91d9a792017-04-18 12:01:52 -0700435 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
436 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437}
438
Yue Chen64550b62017-01-12 12:18:22 -0800439static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800440 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800442 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700443#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800444 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700445 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -0800446 const int bw = mi_size_wide[bsize];
447 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700448 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
449 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -0400450
Michael Bebenita6048d052016-08-25 14:40:54 -0700451#if CONFIG_ACCOUNTING
452 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
453#endif
Yue Chen64550b62017-01-12 12:18:22 -0800454 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700455#if CONFIG_EXT_PARTITION_TYPES
456 xd->mi[0]->mbmi.partition = partition;
457#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700458 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700459 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
460 const BLOCK_SIZE uv_subsize =
461 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
462 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -0700463 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464 "Invalid block size.");
465 }
466
Angie Chiangd0916d92017-03-10 17:54:18 -0800467 int reader_corrupted_flag = aom_reader_has_error(r);
468 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -0800469}
470
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700471#if CONFIG_NCOBMC_ADAPT_WEIGHT
472static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
473 int mi_row, int mi_col) {
474 const int offset = mi_row * cm->mi_stride + mi_col;
475 xd->mi = cm->mi_grid_visible + offset;
476 xd->mi[0] = &cm->mi[offset];
477}
478
479static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
480 int mi_col, int bsize, int mode) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700481 uint8_t *pred_buf[4][MAX_MB_PLANE];
482 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
483 // target block in pxl
484 int pxl_row = mi_row << MI_SIZE_LOG2;
485 int pxl_col = mi_col << MI_SIZE_LOG2;
486
487 int plane;
488#if CONFIG_HIGHBITDEPTH
489 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
490 int len = sizeof(uint16_t);
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700491 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE,
492 len);
493 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE,
494 len);
495 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE,
496 len);
497 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE,
498 len);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700499 } else {
500#endif // CONFIG_HIGHBITDEPTH
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700501 ASSIGN_ALIGNED_PTRS(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE);
502 ASSIGN_ALIGNED_PTRS(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE);
503 ASSIGN_ALIGNED_PTRS(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE);
504 ASSIGN_ALIGNED_PTRS(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700505#if CONFIG_HIGHBITDEPTH
506 }
507#endif
508 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
509 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
510 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
511 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
512 pred_stride, mode);
513 }
514}
515
516static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
517 int bsize, const int mi_row, const int mi_col,
518 const NCOBMC_MODE modes) {
519 const int mi_width = mi_size_wide[bsize];
520 const int mi_height = mi_size_high[bsize];
521
522 assert(bsize >= BLOCK_8X8);
523
524 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
525 cm->mi_cols);
526 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
527}
528
529static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
530 MACROBLOCKD *const xd, int mi_row,
531 int mi_col, BLOCK_SIZE bsize) {
532 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
533 const int mi_width = mi_size_wide[bsize];
534 const int mi_height = mi_size_high[bsize];
535 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
536 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
537 if (mi_width > mi_height) {
538 // horizontal partition
539 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
540 xd->mi += hbs;
541 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
542 mbmi->ncobmc_mode[1]);
543 } else if (mi_height > mi_width) {
544 // vertical partition
545 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
546 xd->mi += hbs * xd->mi_stride;
547 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
548 mbmi->ncobmc_mode[1]);
549 } else {
550 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
551 }
552 set_mode_info_offsets(cm, xd, mi_row, mi_col);
553 // restore dst buffer and mode info
554 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
555 mi_col);
556}
557#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
558
Yue Chen64550b62017-01-12 12:18:22 -0800559static void decode_token_and_recon_block(AV1Decoder *const pbi,
560 MACROBLOCKD *const xd, int mi_row,
561 int mi_col, aom_reader *r,
562 BLOCK_SIZE bsize) {
563 AV1_COMMON *const cm = &pbi->common;
564 const int bw = mi_size_wide[bsize];
565 const int bh = mi_size_high[bsize];
566 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
567 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -0800568
Angie Chiang44701f22017-02-27 10:36:44 -0800569 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
570 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Hui Su9fa96232017-10-23 15:46:04 -0700571#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400572 CFL_CTX *const cfl = xd->cfl;
573 cfl->is_chroma_reference = is_chroma_reference(
574 mi_row, mi_col, bsize, cfl->subsampling_x, cfl->subsampling_y);
Hui Su9fa96232017-10-23 15:46:04 -0700575#endif // CONFIG_CFL
Yue Chen19e7aa82016-11-30 14:05:39 -0800576
Arild Fuldseth07441162016-08-15 15:07:52 +0200577 if (cm->delta_q_present_flag) {
578 int i;
579 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -0700580#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -0700581 const int current_qindex =
582 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -0700583#else
Cheng Chen49d30e62017-08-28 20:59:27 -0700584 const int current_qindex = xd->current_qindex;
585#endif // CONFIG_EXT_DELTA_Q
586 int j;
587 for (j = 0; j < MAX_MB_PLANE; ++j) {
588 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
589 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
590
591 xd->plane[j].seg_dequant[i][0] =
592 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
593 xd->plane[j].seg_dequant[i][1] =
594 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
595 }
Arild Fuldseth07441162016-08-15 15:07:52 +0200596 }
597 }
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -0700598 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Hand39cc722016-12-02 14:03:26 -0800599
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600 if (!is_inter_block(mbmi)) {
601 int plane;
Yushin Choa8810392017-09-06 15:16:14 -0700602
Yaowu Xuc27fc142016-08-22 16:08:15 -0700603 for (plane = 0; plane <= 1; ++plane) {
604 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -0700605 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606 }
Yushin Choa8810392017-09-06 15:16:14 -0700607
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
609 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -0700610 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -0700611 const int stepr = tx_size_high_unit[tx_size];
612 const int stepc = tx_size_wide_unit[tx_size];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800613 const BLOCK_SIZE plane_bsize =
614 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700615 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -0800616 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
617 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hand3a64432017-04-06 17:04:17 -0700618 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
619 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800620 continue;
Jingning Han5b701742017-07-19 14:39:07 -0700621 int blk_row, blk_col;
622 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
623 int mu_blocks_wide =
624 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
625 int mu_blocks_high =
626 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
627 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
628 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800629
Jingning Han5b701742017-07-19 14:39:07 -0700630 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -0400631 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -0700632 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -0700633 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
634
635 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
636 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
637 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
638 blk_row, blk_col, tx_size);
639 }
640 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 }
642 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -0800643 int ref;
644
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200645#if CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700646 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
647#else
648 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200649#endif // CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700650 {
Zoe Liu85b66462017-04-20 14:28:19 -0700651 const MV_REFERENCE_FRAME frame =
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200652#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700653 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
654#else
Yushin Cho127c5832017-07-28 16:39:04 -0700655 mbmi->ref_frame[ref];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200656#endif // CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -0700657 if (frame < LAST_FRAME) {
658#if CONFIG_INTRABC
659 assert(is_intrabc_block(mbmi));
660 assert(frame == INTRA_FRAME);
661 assert(ref == 0);
662#else
663 assert(0);
664#endif // CONFIG_INTRABC
665 } else {
666 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -0800667
Alex Converse28744302017-04-13 14:46:22 -0700668 xd->block_refs[ref] = ref_buf;
669 if ((!av1_is_valid_scale(&ref_buf->sf)))
670 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
671 "Reference frame has invalid dimensions");
672 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
673 &ref_buf->sf);
674 }
Yue Chen9ab6d712017-01-12 15:50:46 -0800675 }
Yue Chen69f18e12016-09-08 14:48:15 -0700676
Jingning Hanc44009c2017-05-06 11:36:49 -0700677 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Sarah Parker4c10a3c2017-04-10 19:37:59 -0700678
Yue Chencb60b182016-10-13 15:18:22 -0700679 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -0800680#if CONFIG_NCOBMC
681 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
682#else
Yue Chen894fcce2016-10-21 16:50:52 -0700683 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -0800684#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700685 }
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700686#if CONFIG_NCOBMC_ADAPT_WEIGHT
687 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
688 int plane;
689 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
690 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
691 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
692 }
693 }
694#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700695 // Reconstruction
696 if (!mbmi->skip) {
697 int eobtotal = 0;
698 int plane;
699
700 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
701 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800702 const BLOCK_SIZE plane_bsize =
703 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Hanbafee8d2016-12-02 10:25:03 -0800704 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
705 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700706 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800707
Jingning Hand3a64432017-04-06 17:04:17 -0700708 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
709 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800710 continue;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800711
Jingning Hanc2b797f2017-07-19 09:37:11 -0700712 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
713 int mu_blocks_wide =
714 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
715 int mu_blocks_high =
716 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
717
718 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
719 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
720
Rupert Swarbrick4e7b7d62017-09-28 17:30:44 +0100721 const TX_SIZE max_tx_size = get_vartx_max_txsize(
722 mbmi, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hanf64062f2016-11-02 16:22:18 -0700723 const int bh_var_tx = tx_size_high_unit[max_tx_size];
724 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -0700725 int block = 0;
726 int step =
727 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -0700728
729 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
730 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
731 int blk_row, blk_col;
732 const int unit_height =
733 AOMMIN(mu_blocks_high + row, max_blocks_high);
734 const int unit_width =
735 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
736 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
737 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
738 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
739 blk_row, blk_col, block, max_tx_size,
740 &eobtotal);
741 block += step;
742 }
743 }
Jingning Hana65f3052017-06-23 10:52:05 -0700744 }
745 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700746 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700747 }
748 }
Hui Su9fa96232017-10-23 15:46:04 -0700749#if CONFIG_CFL
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400750 if (mbmi->uv_mode != UV_CFL_PRED) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400751 if (!cfl->is_chroma_reference && is_inter_block(mbmi)) {
752 cfl_store_block(xd, mbmi->sb_type, mbmi->tx_size);
753 }
754 }
Hui Su9fa96232017-10-23 15:46:04 -0700755#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756
Angie Chiangd0916d92017-03-10 17:54:18 -0800757 int reader_corrupted_flag = aom_reader_has_error(r);
758 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759}
760
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100761#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -0800762static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
763 int mi_row, int mi_col, aom_reader *r,
764 BLOCK_SIZE bsize) {
765 AV1_COMMON *const cm = &pbi->common;
766 const int hbs = mi_size_wide[bsize] >> 1;
Yue Chen9ab6d712017-01-12 15:50:46 -0800767#if CONFIG_EXT_PARTITION_TYPES
768 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
769#endif
770 PARTITION_TYPE partition;
771 BLOCK_SIZE subsize;
772 const int has_rows = (mi_row + hbs) < cm->mi_rows;
773 const int has_cols = (mi_col + hbs) < cm->mi_cols;
774
775 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
776
777 partition = get_partition(cm, mi_row, mi_col, bsize);
778 subsize = subsize_lookup[partition][bsize];
779
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700780 switch (partition) {
781 case PARTITION_NONE:
782 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
783 break;
784 case PARTITION_HORZ:
785 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
786 if (has_rows)
787 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
788 break;
789 case PARTITION_VERT:
790 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
791 if (has_cols)
792 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
793 break;
794 case PARTITION_SPLIT:
795 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
796 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
797 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
798 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
799 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800800#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100801#if CONFIG_EXT_PARTITION_TYPES_AB
802#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
803#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700804 case PARTITION_HORZ_A:
805 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
806 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
807 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
808 break;
809 case PARTITION_HORZ_B:
810 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
811 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
812 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
813 bsize2);
814 break;
815 case PARTITION_VERT_A:
816 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
817 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
818 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
819 break;
820 case PARTITION_VERT_B:
821 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
822 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
823 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
824 bsize2);
825 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800826#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700827 default: assert(0 && "Invalid partition type");
Yue Chen9ab6d712017-01-12 15:50:46 -0800828 }
829}
830#endif
831
Yue Chen64550b62017-01-12 12:18:22 -0800832static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800833 int mi_row, int mi_col, aom_reader *r,
834#if CONFIG_EXT_PARTITION_TYPES
835 PARTITION_TYPE partition,
836#endif // CONFIG_EXT_PARTITION_TYPES
837 BLOCK_SIZE bsize) {
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200838 decode_mbmi_block(pbi, xd, mi_row, mi_col, r,
Yue Chen64550b62017-01-12 12:18:22 -0800839#if CONFIG_EXT_PARTITION_TYPES
840 partition,
841#endif
842 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -0700843
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +0100844#if !(NC_MODE_INFO)
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200845 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -0800846#endif
Yue Chen64550b62017-01-12 12:18:22 -0800847}
848
Yaowu Xuf883b422016-08-30 14:01:10 -0700849static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
850 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700851 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -0800852 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -0800853#if CONFIG_UNPOISON_PARTITION_CTX
854 const int ctx =
855 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800856#else
Jingning Han1beb0102016-12-07 11:08:30 -0800857 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -0800858#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +0000860 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
861 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862
Jingning Han5fe79db2017-03-27 15:10:30 -0700863 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -0700864
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100865 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100867 const int num_partition_types =
868 (mi_width_log2_lookup[bsize] > mi_width_log2_lookup[BLOCK_8X8])
869 ? EXT_PARTITION_TYPES
870 : PARTITION_TYPES;
Alex Converse57795a42017-03-14 12:18:25 -0700871#else
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100872 const int num_partition_types = PARTITION_TYPES;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +0100874 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, num_partition_types,
875 ACCT_STR);
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100876 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700877 assert(bsize > BLOCK_8X8);
878 aom_cdf_prob cdf[2];
879 partition_gather_vert_alike(cdf, partition_cdf);
880 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
881 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
882 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100883 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -0700884 assert(bsize > BLOCK_8X8);
885 aom_cdf_prob cdf[2];
886 partition_gather_horz_alike(cdf, partition_cdf);
887 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
888 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100889 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +0100891 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 return p;
894}
895
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -0700897static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700898 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -0700899 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700900 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -0800901 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902 const int hbs = num_8x8_wh >> 1;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100903#if CONFIG_EXT_PARTITION_TYPES && CONFIG_EXT_PARTITION_TYPES_AB
904 const int qbs = num_8x8_wh >> 2;
905#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700906 PARTITION_TYPE partition;
907 BLOCK_SIZE subsize;
908#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick93c39e92017-07-12 11:11:02 +0100909 const int quarter_step = num_8x8_wh / 4;
910 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100911#if !CONFIG_EXT_PARTITION_TYPES_AB
912 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
913#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700914#endif
915 const int has_rows = (mi_row + hbs) < cm->mi_rows;
916 const int has_cols = (mi_col + hbs) < cm->mi_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700917
918 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
919
Jingning Hancd959762017-03-27 14:49:59 -0700920 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
921 : read_partition(cm, xd, mi_row, mi_col, r,
922 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -0700924
Rupert Swarbrick415c8f12017-10-09 16:26:23 +0100925 // Check the bitstream is conformant: if there is subsampling on the
926 // chroma planes, subsize must subsample to a valid block size.
927 const struct macroblockd_plane *const pd_u = &xd->plane[1];
928 if (get_plane_block_size(subsize, pd_u) == BLOCK_INVALID) {
929 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
930 "Block size %dx%d invalid with this subsampling mode",
931 block_size_wide[subsize], block_size_high[subsize]);
932 }
933
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100934#define DEC_BLOCK_STX_ARG
Rupert Swarbrick668d3d92017-09-06 16:09:51 +0100935#if CONFIG_EXT_PARTITION_TYPES
936#define DEC_BLOCK_EPT_ARG partition,
937#else
938#define DEC_BLOCK_EPT_ARG
939#endif
940#define DEC_BLOCK(db_r, db_c, db_subsize) \
941 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
942 DEC_BLOCK_EPT_ARG(db_subsize))
943#define DEC_PARTITION(db_r, db_c, db_subsize) \
944 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
945
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700946 switch (partition) {
947 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
948 case PARTITION_HORZ:
949 DEC_BLOCK(mi_row, mi_col, subsize);
950 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
951 break;
952 case PARTITION_VERT:
953 DEC_BLOCK(mi_row, mi_col, subsize);
954 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
955 break;
956 case PARTITION_SPLIT:
957 DEC_PARTITION(mi_row, mi_col, subsize);
958 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
959 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
960 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
961 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700962#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100963#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700964 case PARTITION_HORZ_A:
965 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
966 DEC_BLOCK(mi_row + qbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
967 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
968 break;
969 case PARTITION_HORZ_B:
970 DEC_BLOCK(mi_row, mi_col, subsize);
971 DEC_BLOCK(mi_row + hbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
972 if (mi_row + 3 * qbs < cm->mi_rows)
973 DEC_BLOCK(mi_row + 3 * qbs, mi_col,
974 get_subsize(bsize, PARTITION_HORZ_4));
975 break;
976 case PARTITION_VERT_A:
977 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_VERT_4));
978 DEC_BLOCK(mi_row, mi_col + qbs, get_subsize(bsize, PARTITION_VERT_4));
979 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
980 break;
981 case PARTITION_VERT_B:
982 DEC_BLOCK(mi_row, mi_col, subsize);
983 DEC_BLOCK(mi_row, mi_col + hbs, get_subsize(bsize, PARTITION_VERT_4));
984 if (mi_col + 3 * qbs < cm->mi_cols)
985 DEC_BLOCK(mi_row, mi_col + 3 * qbs,
986 get_subsize(bsize, PARTITION_VERT_4));
987 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100988#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700989 case PARTITION_HORZ_A:
990 DEC_BLOCK(mi_row, mi_col, bsize2);
991 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
992 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
993 break;
994 case PARTITION_HORZ_B:
995 DEC_BLOCK(mi_row, mi_col, subsize);
996 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
997 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
998 break;
999 case PARTITION_VERT_A:
1000 DEC_BLOCK(mi_row, mi_col, bsize2);
1001 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1002 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1003 break;
1004 case PARTITION_VERT_B:
1005 DEC_BLOCK(mi_row, mi_col, subsize);
1006 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
1007 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1008 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001009#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001010 case PARTITION_HORZ_4:
1011 for (i = 0; i < 4; ++i) {
1012 int this_mi_row = mi_row + i * quarter_step;
1013 if (i > 0 && this_mi_row >= cm->mi_rows) break;
1014 DEC_BLOCK(this_mi_row, mi_col, subsize);
1015 }
1016 break;
1017 case PARTITION_VERT_4:
1018 for (i = 0; i < 4; ++i) {
1019 int this_mi_col = mi_col + i * quarter_step;
1020 if (i > 0 && this_mi_col >= cm->mi_cols) break;
1021 DEC_BLOCK(mi_row, this_mi_col, subsize);
1022 }
1023 break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001024#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001025 default: assert(0 && "Invalid partition type");
Yaowu Xuc27fc142016-08-22 16:08:15 -07001026 }
1027
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001028#undef DEC_PARTITION
1029#undef DEC_BLOCK
1030#undef DEC_BLOCK_EPT_ARG
1031#undef DEC_BLOCK_STX_ARG
1032
Yaowu Xuc27fc142016-08-22 16:08:15 -07001033#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07001034 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001035#else
1036 // update partition context
1037 if (bsize >= BLOCK_8X8 &&
1038 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08001039 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01001040#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07001041
Cheng Chenf572cd32017-08-25 18:34:51 -07001042#if CONFIG_LPF_SB
1043 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001044 int filt_lvl;
1045 if (mi_row == 0 && mi_col == 0) {
1046 filt_lvl = aom_read_literal(r, 6, ACCT_STR);
Cheng Chen41d37c22017-09-08 19:00:21 -07001047 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
1048 cm->mi_grid_visible[0]->mbmi.delta = 0;
1049 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07001050 } else {
1051 int prev_mi_row, prev_mi_col;
1052 if (mi_col - MAX_MIB_SIZE < 0) {
1053 prev_mi_row = mi_row - MAX_MIB_SIZE;
1054 prev_mi_col = mi_col;
1055 } else {
1056 prev_mi_row = mi_row;
1057 prev_mi_col = mi_col - MAX_MIB_SIZE;
1058 }
Cheng Chenc7855b12017-09-05 10:49:08 -07001059
Cheng Chen41d37c22017-09-08 19:00:21 -07001060 MB_MODE_INFO *curr_mbmi =
1061 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
1062 MB_MODE_INFO *prev_mbmi =
1063 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
1064 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07001065
Cheng Chen41d37c22017-09-08 19:00:21 -07001066 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
1067 const int reuse_prev_lvl = aom_read_symbol(
1068 r, xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2, ACCT_STR);
1069 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
1070
Cheng Chenc7855b12017-09-05 10:49:08 -07001071 if (reuse_prev_lvl) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001072 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001073 curr_mbmi->delta = 0;
1074 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001075 } else {
Cheng Chen41d37c22017-09-08 19:00:21 -07001076 const int delta_ctx = prev_mbmi->delta;
1077 unsigned int delta = aom_read_symbol(
1078 r, xd->tile_ctx->lpf_delta_cdf[delta_ctx], DELTA_RANGE, ACCT_STR);
1079 curr_mbmi->delta = delta;
Cheng Chenf89ca3e2017-09-07 14:47:47 -07001080 delta *= LPF_STEP;
Cheng Chenc7855b12017-09-05 10:49:08 -07001081
1082 if (delta) {
Cheng Chen41d37c22017-09-08 19:00:21 -07001083 const int sign_ctx = prev_mbmi->sign;
1084 const int sign = aom_read_symbol(
1085 r, xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2, ACCT_STR);
1086 curr_mbmi->sign = sign;
Cheng Chenc7855b12017-09-05 10:49:08 -07001087 filt_lvl = sign ? prev_lvl + delta : prev_lvl - delta;
1088 } else {
1089 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001090 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001091 }
Cheng Chena4b27de2017-08-31 16:05:19 -07001092 }
1093 }
Cheng Chen5589d712017-09-05 12:03:25 -07001094
1095 av1_loop_filter_sb_level_init(cm, mi_row, mi_col, filt_lvl);
Cheng Chenf572cd32017-08-25 18:34:51 -07001096 }
1097#endif
1098
Jean-Marc Valin01435132017-02-18 14:12:53 -05001099#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07001100 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07001101 int width_step = mi_size_wide[BLOCK_64X64];
1102 int height_step = mi_size_wide[BLOCK_64X64];
1103 int w, h;
1104 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
1105 h += height_step) {
1106 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
1107 w += width_step) {
1108 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
1109 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1110 ->mbmi.cdef_strength =
1111 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
1112 else
1113 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1114 ->mbmi.cdef_strength = -1;
1115 }
Yaowu Xud71be782016-10-14 08:47:03 -07001116 }
1117 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05001118#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001119#if CONFIG_LOOP_RESTORATION
1120 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001121 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001122 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1123 &rcol0, &rcol1, &rrow0, &rrow1,
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001124 &tile_tl_idx)) {
1125 const int rstride = cm->rst_info[plane].horz_units_per_tile;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001126 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1127 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001128 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001129 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
1130 }
1131 }
1132 }
1133 }
1134#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001135}
1136
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
1138 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07001139 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08001140 aom_reader *r,
1141#if CONFIG_ANS && ANS_MAX_SYMBOLS
1142 int window_size,
1143#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
1144 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001145 // Validate the calculated partition length. If the buffer
1146 // described by the partition can't be fully read, then restrict
1147 // it to the portion that can be (for EC mode) or throw an error.
1148 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001149 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001150 "Truncated packet or corrupt tile length");
1151
Alex Converse2cdf0d82016-12-13 13:53:09 -08001152#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08001153 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08001154#endif
Alex Converse346440b2017-01-03 13:47:37 -08001155 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07001156 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001157 "Failed to allocate bool decoder %d", 1);
1158}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001159
Yaowu Xuf883b422016-08-30 14:01:10 -07001160static void setup_segmentation(AV1_COMMON *const cm,
1161 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001162 struct segmentation *const seg = &cm->seg;
1163 int i, j;
1164
1165 seg->update_map = 0;
1166 seg->update_data = 0;
Ryandd8df162017-09-27 15:40:13 -07001167 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168
Yaowu Xuf883b422016-08-30 14:01:10 -07001169 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001170 if (!seg->enabled) return;
1171
1172 // Segmentation map update
1173 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1174 seg->update_map = 1;
1175 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001176 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001177 }
1178 if (seg->update_map) {
1179 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1180 seg->temporal_update = 0;
1181 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001182 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183 }
1184 }
1185
1186 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07001187 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001188 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001189 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001190
Yaowu Xuf883b422016-08-30 14:01:10 -07001191 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001192
1193 for (i = 0; i < MAX_SEGMENTS; i++) {
1194 for (j = 0; j < SEG_LVL_MAX; j++) {
1195 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07001196 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001197 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001198 av1_enable_segfeature(seg, i, j);
1199 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
1200 if (av1_is_segfeature_signed(j))
1201 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001202 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001203 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001204 }
1205 }
1206 }
1207}
1208
1209#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001210static void decode_restoration_mode(AV1_COMMON *cm,
1211 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001212 int p;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001213 RestorationInfo *rsi;
1214 for (p = 0; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001215 rsi = &cm->rst_info[p];
1216 if (aom_rb_read_bit(rb)) {
1217 rsi->frame_restoration_type =
1218 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
1219 } else {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001220 rsi->frame_restoration_type =
1221 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001222 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001223 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001224 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1225 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX;
1226 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001227 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
1228 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1229 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001230 cm->rst_info[0].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1231 cm->rst_info[1].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
1232 cm->rst_info[2].restoration_unit_size = RESTORATION_TILESIZE_MAX >> 2;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001233 rsi = &cm->rst_info[0];
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001234 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
1235 if (rsi->restoration_unit_size != (RESTORATION_TILESIZE_MAX >> 2)) {
1236 rsi->restoration_unit_size <<= aom_rb_read_bit(rb);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001237 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001238 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001239 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
1240 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1241 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001242 cm->rst_info[1].restoration_unit_size =
1243 cm->rst_info[0].restoration_unit_size >> (aom_rb_read_bit(rb) * s);
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001244 } else {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001245 cm->rst_info[1].restoration_unit_size =
1246 cm->rst_info[0].restoration_unit_size;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001247 }
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01001248 cm->rst_info[2].restoration_unit_size = cm->rst_info[1].restoration_unit_size;
Debargha Mukherjee7a5587a2017-08-31 07:41:30 -07001249
1250 cm->rst_info[0].procunit_width = cm->rst_info[0].procunit_height =
1251 RESTORATION_PROC_UNIT_SIZE;
1252 cm->rst_info[1].procunit_width = cm->rst_info[2].procunit_width =
1253 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_x;
1254 cm->rst_info[1].procunit_height = cm->rst_info[2].procunit_height =
1255 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_y;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001256}
1257
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001258static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001259 WienerInfo *ref_wiener_info, aom_reader *rb) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001260 memset(wiener_info->vfilter, 0, sizeof(wiener_info->vfilter));
1261 memset(wiener_info->hfilter, 0, sizeof(wiener_info->hfilter));
1262
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001263 if (wiener_win == WIENER_WIN)
1264 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
1265 aom_read_primitive_refsubexpfin(
1266 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1267 WIENER_FILT_TAP0_SUBEXP_K,
1268 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1269 WIENER_FILT_TAP0_MINV;
1270 else
1271 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001272 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001273 aom_read_primitive_refsubexpfin(
1274 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1275 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001276 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001277 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001278 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001279 aom_read_primitive_refsubexpfin(
1280 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1281 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001282 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001283 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001284 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001285 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001286 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
1287 wiener_info->vfilter[2]);
1288
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001289 if (wiener_win == WIENER_WIN)
1290 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
1291 aom_read_primitive_refsubexpfin(
1292 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1293 WIENER_FILT_TAP0_SUBEXP_K,
1294 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1295 WIENER_FILT_TAP0_MINV;
1296 else
1297 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001298 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001299 aom_read_primitive_refsubexpfin(
1300 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1301 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001302 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001303 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001304 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001305 aom_read_primitive_refsubexpfin(
1306 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1307 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001308 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001309 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001310 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001311 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001312 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
1313 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001314 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001315}
1316
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001317static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
1318 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001319 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
1320 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001321 aom_read_primitive_refsubexpfin(
1322 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001323 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001324 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001325 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001326 aom_read_primitive_refsubexpfin(
1327 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001328 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001329 SGRPROJ_PRJ_MIN1;
1330 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001331}
1332
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001333static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
1334 MACROBLOCKD *xd,
1335 aom_reader *const r, int plane,
1336 int rtile_idx) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001337 const RestorationInfo *rsi = &cm->rst_info[plane];
1338 RestorationUnitInfo *rui = &rsi->unit_info[rtile_idx];
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001339 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001340
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001341 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
1342 WienerInfo *wiener_info = xd->wiener_info + plane;
1343 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001344
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001345 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001346 rui->restoration_type =
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001347 aom_read_symbol(r, xd->tile_ctx->switchable_restore_cdf,
1348 RESTORE_SWITCHABLE_TYPES, ACCT_STR);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001349 switch (rui->restoration_type) {
1350 case RESTORE_WIENER:
1351 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
1352 break;
1353 case RESTORE_SGRPROJ:
1354 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
1355 break;
1356 default: assert(rui->restoration_type == RESTORE_NONE); break;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001357 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001358 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001359#if CONFIG_NEW_MULTISYMBOL
1360 if (aom_read_symbol(r, xd->tile_ctx->wiener_restore_cdf, 2, ACCT_STR)) {
1361#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001362 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001363#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001364 rui->restoration_type = RESTORE_WIENER;
1365 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001366 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001367 rui->restoration_type = RESTORE_NONE;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001368 }
1369 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001370#if CONFIG_NEW_MULTISYMBOL
1371 if (aom_read_symbol(r, xd->tile_ctx->sgrproj_restore_cdf, 2, ACCT_STR)) {
1372#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001373 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001374#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001375 rui->restoration_type = RESTORE_SGRPROJ;
1376 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001377 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001378 rui->restoration_type = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001379 }
1380 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001381}
1382#endif // CONFIG_LOOP_RESTORATION
1383
Yaowu Xuf883b422016-08-30 14:01:10 -07001384static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001385 struct loopfilter *lf = &cm->lf;
Cheng Chenf572cd32017-08-25 18:34:51 -07001386#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07001387#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07001388 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
1389 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
1390 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07001391 lf->filter_level_u = aom_rb_read_literal(rb, 6);
1392 lf->filter_level_v = aom_rb_read_literal(rb, 6);
1393 }
Cheng Chen179479f2017-08-04 10:56:39 -07001394#else
1395 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07001396#endif
Cheng Chenf572cd32017-08-25 18:34:51 -07001397#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07001398 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001399
1400 // Read in loop filter deltas applied at the MB level based on mode or ref
1401 // frame.
1402 lf->mode_ref_delta_update = 0;
1403
Yaowu Xuf883b422016-08-30 14:01:10 -07001404 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001405 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001406 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001407 if (lf->mode_ref_delta_update) {
1408 int i;
1409
1410 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001411 if (aom_rb_read_bit(rb))
1412 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001413
1414 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001415 if (aom_rb_read_bit(rb))
1416 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001417 }
1418 }
1419}
1420
Jean-Marc Valin01435132017-02-18 14:12:53 -05001421#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01001422static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001423 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001424#if CONFIG_CDEF_SINGLEPASS
1425 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
1426#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02001427 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
1428 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001429#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001430 cm->cdef_bits = aom_rb_read_literal(rb, 2);
1431 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
1432 for (i = 0; i < cm->nb_cdef_strengths; i++) {
1433 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02001434 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
1435 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
1436 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001437 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001438}
Jean-Marc Valin01435132017-02-18 14:12:53 -05001439#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001440
Yaowu Xuf883b422016-08-30 14:01:10 -07001441static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
1442 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001443}
1444
Yaowu Xuf883b422016-08-30 14:01:10 -07001445static void setup_quantization(AV1_COMMON *const cm,
1446 struct aom_read_bit_buffer *rb) {
1447 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001448 cm->y_dc_delta_q = read_delta_q(rb);
1449 cm->uv_dc_delta_q = read_delta_q(rb);
1450 cm->uv_ac_delta_q = read_delta_q(rb);
1451 cm->dequant_bit_depth = cm->bit_depth;
1452#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07001453 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001454 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001455 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
1456 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001457 } else {
1458 cm->min_qmlevel = 0;
1459 cm->max_qmlevel = 0;
1460 }
1461#endif
1462}
1463
Alex Converse05a3e7d2017-05-16 12:20:07 -07001464// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07001465static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001466#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001467 const int using_qm = cm->using_qmatrix;
1468 const int minqm = cm->min_qmlevel;
1469 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001470#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07001471 // When segmentation is disabled, only the first value is used. The
1472 // remaining are don't cares.
1473 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
1474 for (int i = 0; i < max_segments; ++i) {
1475 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
1476 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
1477 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
1478 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001479 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07001480 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001481 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001482#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001483 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
1484 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
1485 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001487 const int qmlevel = (lossless || using_qm == 0)
1488 ? NUM_QM_LEVELS - 1
1489 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01001490 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Thomas Daviesdd3cf832017-10-20 15:49:57 +01001491 cm->y_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 0, j);
1492 cm->uv_iqmatrix[i][j] = aom_iqmatrix(cm, qmlevel, 1, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001493 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07001494#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07001495#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07001496 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
1497 for (int b = 0; b < COEF_BANDS; ++b) {
1498 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
1499 cm->y_dequant_nuq[i][dq][b], NULL, dq);
1500 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
1501 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001502 }
1503 }
1504#endif // CONFIG_NEW_QUANT
1505 }
1506}
1507
Angie Chiang5678ad92016-11-21 09:38:40 -08001508static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001509 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07001510 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001511}
1512
Yaowu Xuf883b422016-08-30 14:01:10 -07001513static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001514#if CONFIG_FRAME_SUPERRES
1515 cm->render_width = cm->superres_upscaled_width;
1516 cm->render_height = cm->superres_upscaled_height;
1517#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001518 cm->render_width = cm->width;
1519 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001520#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07001521 if (aom_rb_read_bit(rb))
1522 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001523}
1524
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001525#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001526// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001527static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
1528 int *width, int *height) {
1529 cm->superres_upscaled_width = *width;
1530 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07001531 if (aom_rb_read_bit(rb)) {
Urvang Joshide71d142017-10-05 12:12:15 -07001532 cm->superres_scale_denominator =
Fergus Simpsone7508412017-03-14 18:14:09 -07001533 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
Urvang Joshide71d142017-10-05 12:12:15 -07001534 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001535 // Don't edit cm->width or cm->height directly, or the buffers won't get
1536 // resized correctly
Urvang Joshi69fde2e2017-10-09 15:34:18 -07001537 av1_calculate_scaled_superres_size(width, height,
1538 cm->superres_scale_denominator);
Fergus Simpsone7508412017-03-14 18:14:09 -07001539 } else {
1540 // 1:1 scaling - ie. no scaling, scale not provided
Urvang Joshide71d142017-10-05 12:12:15 -07001541 cm->superres_scale_denominator = SCALE_NUMERATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07001542 }
1543}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001544#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001545
Yaowu Xuf883b422016-08-30 14:01:10 -07001546static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001547#if CONFIG_SIZE_LIMIT
1548 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07001549 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550 "Dimensions of %dx%d beyond allowed size of %dx%d.",
1551 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1552#endif
1553 if (cm->width != width || cm->height != height) {
1554 const int new_mi_rows =
1555 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1556 const int new_mi_cols =
1557 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1558
Yaowu Xuf883b422016-08-30 14:01:10 -07001559 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07001560 // dimensions as well as the overall size.
1561 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001562 if (av1_alloc_context_buffers(cm, width, height))
1563 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001564 "Failed to allocate context buffers");
1565 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001566 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001568 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001569 cm->width = width;
1570 cm->height = height;
1571 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01001572
1573 ensure_mv_buffer(cm->cur_frame, cm);
1574 cm->cur_frame->width = cm->width;
1575 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576}
1577
Yaowu Xuf883b422016-08-30 14:01:10 -07001578static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001579 int width, height;
1580 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07001581 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001582#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001583 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001584#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001585 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001586 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001587
1588 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001589 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1591 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001592#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001593 cm->use_highbitdepth,
1594#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001595 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001596 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1597 pool->cb_priv)) {
1598 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001599 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001600 "Failed to allocate frame buffer");
1601 }
1602 unlock_buffer_pool(pool);
1603
1604 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1605 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1606 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1607 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001608#if CONFIG_COLORSPACE_HEADERS
1609 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1610 cm->transfer_function;
1611 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1612 cm->chroma_sample_position;
1613#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001614 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1615 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1616 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1617}
1618
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07001619static void setup_sb_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
1620 (void)rb;
1621#if CONFIG_EXT_PARTITION
1622 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
1623#else
1624 set_sb_size(cm, BLOCK_64X64);
1625#endif // CONFIG_EXT_PARTITION
1626}
1627
Yaowu Xuf883b422016-08-30 14:01:10 -07001628static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001629 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07001630 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001631 int this_xss, int this_yss) {
1632 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1633 ref_yss == this_yss;
1634}
1635
Yaowu Xuf883b422016-08-30 14:01:10 -07001636static void setup_frame_size_with_refs(AV1_COMMON *cm,
1637 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001638 int width, height;
1639 int found = 0, i;
1640 int has_valid_ref_frame = 0;
1641 BufferPool *const pool = cm->buffer_pool;
1642 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001643 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001644 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
1645 width = buf->y_crop_width;
1646 height = buf->y_crop_height;
1647 cm->render_width = buf->render_width;
1648 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001649#if CONFIG_FRAME_SUPERRES
1650 setup_superres(cm, rb, &width, &height);
1651#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652 found = 1;
1653 break;
1654 }
1655 }
1656
1657 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001658 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001659#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001660 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001661#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001662 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001663 }
1664
1665 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001666 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001667 "Invalid frame size");
1668
1669 // Check to make sure at least one of frames that this frame references
1670 // has valid dimensions.
1671 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1672 RefBuffer *const ref_frame = &cm->frame_refs[i];
1673 has_valid_ref_frame |=
1674 valid_ref_frame_size(ref_frame->buf->y_crop_width,
1675 ref_frame->buf->y_crop_height, width, height);
1676 }
1677 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07001678 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001679 "Referenced frame has invalid size");
1680 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1681 RefBuffer *const ref_frame = &cm->frame_refs[i];
1682 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
1683 ref_frame->buf->subsampling_x,
1684 ref_frame->buf->subsampling_y, cm->bit_depth,
1685 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07001686 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001687 "Referenced frame has incompatible color format");
1688 }
1689
1690 resize_context_buffers(cm, width, height);
1691
1692 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001693 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001694 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1695 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001696#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001697 cm->use_highbitdepth,
1698#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001699 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001700 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1701 pool->cb_priv)) {
1702 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001703 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001704 "Failed to allocate frame buffer");
1705 }
1706 unlock_buffer_pool(pool);
1707
1708 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1709 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1710 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1711 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001712#if CONFIG_COLORSPACE_HEADERS
1713 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1714 cm->transfer_function;
1715 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1716 cm->chroma_sample_position;
1717#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001718 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1719 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1720 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1721}
1722
David Barker1a191122017-09-06 15:24:16 +01001723static void read_tile_group_range(AV1Decoder *pbi,
1724 struct aom_read_bit_buffer *const rb) {
1725 AV1_COMMON *const cm = &pbi->common;
1726 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
1727 const int num_tiles =
1728 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
1729 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
1730 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
1731 if (pbi->tg_start + pbi->tg_size > num_tiles)
1732 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1733 "Tile group extends past last tile in frame");
1734}
1735
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001736#if CONFIG_MAX_TILE
1737
1738// Same function as av1_read_uniform but reading from uncompresses header wb
1739static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
1740 const int l = get_unsigned_bits(n);
1741 const int m = (1 << l) - n;
1742 const int v = aom_rb_read_literal(rb, l - 1);
1743 assert(l != 0);
1744 if (v < m)
1745 return v;
1746 else
1747 return (v << 1) - m + aom_rb_read_literal(rb, 1);
1748}
1749
1750static void read_tile_info_max_tile(AV1_COMMON *const cm,
1751 struct aom_read_bit_buffer *const rb) {
1752 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
1753 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
1754 int width_sb = width_mi >> MAX_MIB_SIZE_LOG2;
1755 int height_sb = height_mi >> MAX_MIB_SIZE_LOG2;
1756 int start_sb, size_sb, i;
1757
1758 av1_get_tile_limits(cm);
1759 cm->uniform_tile_spacing_flag = aom_rb_read_bit(rb);
1760
1761 // Read tile columns
1762 if (cm->uniform_tile_spacing_flag) {
1763 cm->log2_tile_cols = cm->min_log2_tile_cols;
1764 while (cm->log2_tile_cols < cm->max_log2_tile_cols) {
1765 if (!aom_rb_read_bit(rb)) {
1766 break;
1767 }
1768 cm->log2_tile_cols++;
1769 }
1770 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001771 for (i = 0, start_sb = 0; width_sb > 0 && i < MAX_TILE_COLS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001772 size_sb = 1 + rb_read_uniform(rb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB));
1773 cm->tile_col_start_sb[i] = start_sb;
1774 start_sb += size_sb;
1775 width_sb -= size_sb;
1776 }
1777 cm->tile_cols = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001778 cm->tile_col_start_sb[i] = start_sb + width_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001779 }
1780 av1_calculate_tile_cols(cm);
1781
1782 // Read tile rows
1783 if (cm->uniform_tile_spacing_flag) {
1784 cm->log2_tile_rows = cm->min_log2_tile_rows;
1785 while (cm->log2_tile_rows < cm->max_log2_tile_rows) {
1786 if (!aom_rb_read_bit(rb)) {
1787 break;
1788 }
1789 cm->log2_tile_rows++;
1790 }
1791 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001792 for (i = 0, start_sb = 0; height_sb > 0 && i < MAX_TILE_ROWS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001793 size_sb =
1794 1 + rb_read_uniform(rb, AOMMIN(height_sb, cm->max_tile_height_sb));
1795 cm->tile_row_start_sb[i] = start_sb;
1796 start_sb += size_sb;
1797 height_sb -= size_sb;
1798 }
1799 cm->tile_rows = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001800 cm->tile_row_start_sb[i] = start_sb + height_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001801 }
1802 av1_calculate_tile_rows(cm);
1803}
1804#endif
1805
Yaowu Xuf883b422016-08-30 14:01:10 -07001806static void read_tile_info(AV1Decoder *const pbi,
1807 struct aom_read_bit_buffer *const rb) {
1808 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001809#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001810 cm->single_tile_decoding = 0;
1811 if (cm->large_scale_tile) {
1812 struct loopfilter *lf = &cm->lf;
1813
Rupert Swarbrick566155f2017-10-27 11:59:17 +01001814// Figure out single_tile_decoding by loopfilter_level.
1815#if CONFIG_LOOPFILTER_LEVEL
1816 const int no_loopfilter = !(lf->filter_level[0] || lf->filter_level[1]);
1817#else
1818 const int no_loopfilter = !lf->filter_level;
1819#endif
1820 cm->single_tile_decoding = no_loopfilter ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001821// Read the tile width/height
1822#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001823 if (cm->sb_size == BLOCK_128X128) {
1824 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
1825 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
1826 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001827#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001828 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
1829 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
1830#if CONFIG_EXT_PARTITION
1831 }
1832#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001833
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001834 cm->tile_width <<= cm->mib_size_log2;
1835 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001836
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001837 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
1838 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001839
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001840 // Get the number of tiles
1841 cm->tile_cols = 1;
1842 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001843
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001844 cm->tile_rows = 1;
1845 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001846
Yunqing Wang42015d12017-10-17 15:43:49 -07001847#if CONFIG_DEPENDENT_HORZTILES
1848 cm->dependent_horz_tiles = 0;
1849#endif
1850#if CONFIG_LOOPFILTERING_ACROSS_TILES
1851 if (cm->tile_cols * cm->tile_rows > 1)
1852 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1853 else
1854 cm->loop_filter_across_tiles_enabled = 1;
1855#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
1856
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001857 if (cm->tile_cols * cm->tile_rows > 1) {
1858 // Read the number of bytes used to store tile size
1859 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1860 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
1861 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001862 } else {
1863#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001864
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001865#if CONFIG_MAX_TILE
1866 read_tile_info_max_tile(cm, rb);
1867#else
1868 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
1869 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001870
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001871 // columns
1872 max_ones = max_log2_tile_cols - min_log2_tile_cols;
1873 cm->log2_tile_cols = min_log2_tile_cols;
1874 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001875
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001876 if (cm->log2_tile_cols > 6)
1877 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1878 "Invalid number of tile columns");
1879
1880 // rows
1881 cm->log2_tile_rows = aom_rb_read_bit(rb);
1882 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
1883
Rupert Swarbrick5a010aa2017-09-26 16:16:48 +01001884 cm->tile_width =
1885 get_tile_size(cm->mi_cols, cm->log2_tile_cols, &cm->tile_cols);
1886 cm->tile_height =
1887 get_tile_size(cm->mi_rows, cm->log2_tile_rows, &cm->tile_rows);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001888
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001889#endif // CONFIG_MAX_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001890#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001891 if (cm->tile_rows > 1)
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001892 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
1893 else
1894 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001895#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001896#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07001897 if (cm->tile_cols * cm->tile_rows > 1)
1898 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
1899 else
1900 cm->loop_filter_across_tiles_enabled = 1;
Ryan Lei9b02b0e2017-01-30 15:52:20 -08001901#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08001902
Thomas Daviesb25ba502017-07-18 10:18:24 +01001903 // tile size magnitude
1904 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001905#if CONFIG_EXT_TILE
1906 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001907#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00001908
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001909// each tile group header is in its own tile group OBU
1910#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07001911 // Store an index to the location of the tile group information
1912 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01001913 read_tile_group_range(pbi, rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04001914#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001915}
1916
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001917static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001918 switch (sz) {
1919 case 1: return src[0];
1920 case 2: return mem_get_le16(src);
1921 case 3: return mem_get_le24(src);
1922 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07001923 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001924 }
1925}
1926
1927#if CONFIG_EXT_TILE
1928// Reads the next tile returning its size and adjusting '*data' accordingly
1929// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001930static void get_ls_tile_buffer(
1931 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
1932 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
1933 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
1934 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001935 size_t size;
1936
1937 size_t copy_size = 0;
1938 const uint8_t *copy_data = NULL;
1939
1940 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001941 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001942 "Truncated packet or corrupt tile length");
1943 if (decrypt_cb) {
1944 uint8_t be_data[4];
1945 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
1946
1947 // Only read number of bytes in cm->tile_size_bytes.
1948 size = mem_get_varsize(be_data, tile_size_bytes);
1949 } else {
1950 size = mem_get_varsize(*data, tile_size_bytes);
1951 }
1952
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001953 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
1954 // mode.
1955 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001956 // The remaining bits in the top byte signal the row offset
1957 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
1958
1959 // Currently, only use tiles in same column as reference tiles.
1960 copy_data = tile_buffers[row - offset][col].data;
1961 copy_size = tile_buffers[row - offset][col].size;
1962 size = 0;
1963 }
1964
1965 *data += tile_size_bytes;
1966
1967 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07001968 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001969 "Truncated packet or corrupt tile size");
1970
1971 if (size > 0) {
1972 tile_buffers[row][col].data = *data;
1973 tile_buffers[row][col].size = size;
1974 } else {
1975 tile_buffers[row][col].data = copy_data;
1976 tile_buffers[row][col].size = copy_size;
1977 }
1978
1979 *data += size;
1980
1981 tile_buffers[row][col].raw_data_end = *data;
1982}
1983
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001984static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07001985 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001986 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001987 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001988 const int tile_cols = cm->tile_cols;
1989 const int tile_rows = cm->tile_rows;
1990 const int have_tiles = tile_cols * tile_rows > 1;
1991
1992 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07001993 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001994 tile_buffers[0][0].data = data;
1995 tile_buffers[0][0].size = tile_size;
1996 tile_buffers[0][0].raw_data_end = NULL;
1997 } else {
1998 // We locate only the tile buffers that are required, which are the ones
1999 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
2000 // need the last (bottom right) tile buffer, as we need to know where the
2001 // end of the compressed frame buffer is for proper superframe decoding.
2002
2003 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
2004 const uint8_t *const data_start = data;
2005
Yaowu Xuf883b422016-08-30 14:01:10 -07002006 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002007 const int single_row = pbi->dec_tile_row >= 0;
2008 const int tile_rows_start = single_row ? dec_tile_row : 0;
2009 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07002010 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002011 const int single_col = pbi->dec_tile_col >= 0;
2012 const int tile_cols_start = single_col ? dec_tile_col : 0;
2013 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2014
2015 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
2016 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002017 const int tile_copy_mode =
2018 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
2019 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002020 size_t tile_col_size;
2021 int r, c;
2022
2023 // Read tile column sizes for all columns (we need the last tile buffer)
2024 for (c = 0; c < tile_cols; ++c) {
2025 const int is_last = c == tile_cols - 1;
2026 if (!is_last) {
2027 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
2028 data += tile_col_size_bytes;
2029 tile_col_data_end[c] = data + tile_col_size;
2030 } else {
2031 tile_col_size = data_end - data;
2032 tile_col_data_end[c] = data_end;
2033 }
2034 data += tile_col_size;
2035 }
2036
2037 data = data_start;
2038
2039 // Read the required tile sizes.
2040 for (c = tile_cols_start; c < tile_cols_end; ++c) {
2041 const int is_last = c == tile_cols - 1;
2042
2043 if (c > 0) data = tile_col_data_end[c - 1];
2044
2045 if (!is_last) data += tile_col_size_bytes;
2046
2047 // Get the whole of the last column, otherwise stop at the required tile.
2048 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
2049 tile_buffers[r][c].col = c;
2050
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002051 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2052 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2053 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002054 }
2055 }
2056
2057 // If we have not read the last column, then read it to get the last tile.
2058 if (tile_cols_end != tile_cols) {
2059 c = tile_cols - 1;
2060
2061 data = tile_col_data_end[c - 1];
2062
2063 for (r = 0; r < tile_rows; ++r) {
2064 tile_buffers[r][c].col = c;
2065
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002066 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2067 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2068 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002069 }
2070 }
2071 }
2072}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002073#endif // CONFIG_EXT_TILE
2074
Yaowu Xuc27fc142016-08-22 16:08:15 -07002075// Reads the next tile returning its size and adjusting '*data' accordingly
2076// based on 'is_last'.
2077static void get_tile_buffer(const uint8_t *const data_end,
2078 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07002079 struct aom_internal_error_info *error_info,
2080 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002081 void *decrypt_state, TileBufferDec *const buf) {
2082 size_t size;
2083
2084 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08002085 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002086 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002087 "Truncated packet or corrupt tile length");
2088
2089 if (decrypt_cb) {
2090 uint8_t be_data[4];
2091 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2092 size = mem_get_varsize(be_data, tile_size_bytes);
2093 } else {
2094 size = mem_get_varsize(*data, tile_size_bytes);
2095 }
2096 *data += tile_size_bytes;
2097
2098 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002099 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002100 "Truncated packet or corrupt tile size");
2101 } else {
2102 size = data_end - *data;
2103 }
2104
2105 buf->data = *data;
2106 buf->size = size;
2107
2108 *data += size;
2109}
2110
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002111static void get_tile_buffers(AV1Decoder *pbi, const uint8_t *data,
2112 const uint8_t *data_end,
2113 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
2114 int startTile, int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002115 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07002116 int r, c;
2117 const int tile_cols = cm->tile_cols;
2118 const int tile_rows = cm->tile_rows;
2119 int tc = 0;
2120 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002121 struct aom_read_bit_buffer rb_tg_hdr;
2122 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002123#if !CONFIG_OBU
James Zern6efba482017-04-20 20:53:49 -07002124 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002125 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002126#else
2127 const int tg_size_bit_offset = 0;
2128#endif
2129
Fangwen Fu73126c02017-02-08 22:37:47 -08002130#if CONFIG_DEPENDENT_HORZTILES
2131 int tile_group_start_col = 0;
2132 int tile_group_start_row = 0;
2133#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002134
Thomas Davies4822e142017-10-10 11:30:36 +01002135#if CONFIG_SIMPLE_BWD_ADAPT
2136 size_t max_tile_size = 0;
2137 cm->largest_tile_id = 0;
2138#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002139 for (r = 0; r < tile_rows; ++r) {
2140 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07002141 TileBufferDec *const buf = &tile_buffers[r][c];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002142#if CONFIG_OBU
2143 const int is_last = (tc == endTile);
2144 const size_t hdr_offset = 0;
2145#else
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002146 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07002147 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002148#endif
2149
2150 if (tc < startTile || tc > endTile) continue;
Thomas Davies80188d12016-10-26 16:08:35 -07002151
Rupert Swarbrickcd757392017-09-01 13:57:53 +01002152 if (data + hdr_offset >= data_end)
2153 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2154 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07002155 buf->col = c;
2156 if (hdr_offset) {
2157 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
2158 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01002159 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08002160#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01002161 tile_group_start_row = r;
2162 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08002163#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002164 }
2165 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
2166 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002167 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
2168 &pbi->common.error, &data, pbi->decrypt_cb,
2169 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08002170#if CONFIG_DEPENDENT_HORZTILES
2171 cm->tile_group_start_row[r][c] = tile_group_start_row;
2172 cm->tile_group_start_col[r][c] = tile_group_start_col;
2173#endif
Thomas Davies4822e142017-10-10 11:30:36 +01002174#if CONFIG_SIMPLE_BWD_ADAPT
2175 if (buf->size > max_tile_size) {
2176 max_tile_size = buf->size;
2177 cm->largest_tile_id = r * tile_cols + c;
2178 }
2179#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002180 }
2181 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002182}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002183
David Barker5c06a642017-08-18 13:18:16 +01002184#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002185static void dec_setup_across_tile_boundary_info(
2186 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02002187 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
2188 tile_info->mi_col_start >= tile_info->mi_col_end)
2189 return;
2190
David Barker5c06a642017-08-18 13:18:16 +01002191 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07002192 av1_setup_across_tile_boundary_info(cm, tile_info);
2193 }
2194}
David Barker5c06a642017-08-18 13:18:16 +01002195#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002196
Yaowu Xuf883b422016-08-30 14:01:10 -07002197static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002198 const uint8_t *data_end, int startTile,
2199 int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002200 AV1_COMMON *const cm = &pbi->common;
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002201#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002202 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002203#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002204 const int tile_cols = cm->tile_cols;
2205 const int tile_rows = cm->tile_rows;
2206 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07002207 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002208#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07002209 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002210 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002211 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002212 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002213#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002214 int tile_rows_start;
2215 int tile_rows_end;
2216 int tile_cols_start;
2217 int tile_cols_end;
2218 int inv_col_order;
2219 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002220 int tile_row, tile_col;
2221
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002222#if CONFIG_EXT_TILE
2223 if (cm->large_scale_tile) {
2224 tile_rows_start = single_row ? dec_tile_row : 0;
2225 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
2226 tile_cols_start = single_col ? dec_tile_col : 0;
2227 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2228 inv_col_order = pbi->inv_tile_order && !single_col;
2229 inv_row_order = pbi->inv_tile_order && !single_row;
2230 } else {
2231#endif // CONFIG_EXT_TILE
2232 tile_rows_start = 0;
2233 tile_rows_end = tile_rows;
2234 tile_cols_start = 0;
2235 tile_cols_end = tile_cols;
2236 inv_col_order = pbi->inv_tile_order;
2237 inv_row_order = pbi->inv_tile_order;
2238#if CONFIG_EXT_TILE
2239 }
2240#endif // CONFIG_EXT_TILE
2241
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002242#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002243 if (cm->lf.filter_level && !cm->skip_loop_filter &&
2244 pbi->lf_worker.data1 == NULL) {
2245 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07002246 aom_memalign(32, sizeof(LFWorkerData)));
2247 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002248 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002249 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002250 "Loop filter thread creation failed");
2251 }
2252 }
2253
2254 if (cm->lf.filter_level && !cm->skip_loop_filter) {
2255 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
2256 // Be sure to sync as we might be resuming after a failed frame decode.
2257 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07002258 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
2259 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002260 }
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002261#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002262
2263 assert(tile_rows <= MAX_TILE_ROWS);
2264 assert(tile_cols <= MAX_TILE_COLS);
2265
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002266#if CONFIG_EXT_TILE
2267 if (cm->large_scale_tile)
2268 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
2269 else
2270#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002271 get_tile_buffers(pbi, data, data_end, tile_buffers, startTile, endTile);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002272
2273 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002274 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002275 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07002276 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 pbi->allocated_tiles = n_tiles;
2278 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002279#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002280 if (pbi->acct_enabled) {
2281 aom_accounting_reset(&pbi->accounting);
2282 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002283#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284 // Load all tile information into tile_data.
2285 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2286 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2287 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
2288 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
2289
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002290 if (tile_row * cm->tile_cols + tile_col < startTile ||
2291 tile_row * cm->tile_cols + tile_col > endTile)
2292 continue;
2293
Yaowu Xuc27fc142016-08-22 16:08:15 -07002294 td->cm = cm;
2295 td->xd = pbi->mb;
2296 td->xd.corrupted = 0;
2297 td->xd.counts =
2298 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
2299 ? &cm->counts
2300 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07002301 av1_zero(td->dqcoeff);
2302 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08002304 &td->bit_reader,
2305#if CONFIG_ANS && ANS_MAX_SYMBOLS
2306 1 << cm->ans_window_size_log2,
2307#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2308 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07002309#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002310 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002311 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002312 } else {
David Barkerd971f402016-10-25 13:52:07 +01002313 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002314 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002315#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002316 av1_init_macroblockd(cm, &td->xd,
Luc Trudeauf8164152017-04-11 16:20:51 -04002317#if CONFIG_CFL
2318 &td->cfl,
2319#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002320 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07002321
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00002322 // Initialise the tile context from the frame context
2323 td->tctx = *cm->fc;
2324 td->xd.tile_ctx = &td->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002325 td->xd.plane[0].color_index_map = td->color_index_map[0];
2326 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07002327#if CONFIG_MRC_TX
2328 td->xd.mrc_mask = td->mrc_mask;
2329#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002330 }
2331 }
2332
2333 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2334 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
2335 int mi_row = 0;
2336 TileInfo tile_info;
2337
Yaowu Xuf883b422016-08-30 14:01:10 -07002338 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002339
2340 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2341 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
2342 TileData *const td = pbi->tile_data + tile_cols * row + col;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002343
2344 if (tile_row * cm->tile_cols + tile_col < startTile ||
2345 tile_row * cm->tile_cols + tile_col > endTile)
2346 continue;
2347
Michael Bebenita6048d052016-08-25 14:40:54 -07002348#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002349 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002350 td->bit_reader.accounting->last_tell_frac =
2351 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002352 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002353#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002354
Yaowu Xuf883b422016-08-30 14:01:10 -07002355 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002356
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002357#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002358 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
2359 if (!cm->dependent_horz_tiles || tile_row == 0 ||
2360 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002361 av1_zero_above_context(cm, tile_info.mi_col_start,
2362 tile_info.mi_col_end);
2363 }
2364#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002365 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002366#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002367#if CONFIG_LOOP_RESTORATION
2368 for (int p = 0; p < MAX_MB_PLANE; ++p) {
2369 set_default_wiener(td->xd.wiener_info + p);
2370 set_default_sgrproj(td->xd.sgrproj_info + p);
2371 }
2372#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002373
David Barker5c06a642017-08-18 13:18:16 +01002374#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002375 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01002376#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07002377
Yaowu Xuc27fc142016-08-22 16:08:15 -07002378 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
2379 mi_row += cm->mib_size) {
2380 int mi_col;
2381
Yaowu Xuf883b422016-08-30 14:01:10 -07002382 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002383
2384 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
2385 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002386#if CONFIG_NCOBMC_ADAPT_WEIGHT
2387 alloc_ncobmc_pred_buffer(&td->xd);
2388 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
2389#endif
Angie Chiangd9af8ac2017-10-25 10:48:53 -07002390#if CONFIG_SYMBOLRATE
2391 av1_record_superblock(td->xd.counts);
2392#endif
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002393 decode_partition(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2394 cm->sb_size);
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002395#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002396 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2397 cm->sb_size);
2398#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002399#if CONFIG_NCOBMC_ADAPT_WEIGHT
2400 free_ncobmc_pred_buffer(&td->xd);
2401#endif
Cheng Chen5ad5b282017-10-05 16:36:06 -07002402#if CONFIG_LPF_SB
2403 if (USE_LOOP_FILTER_SUPERBLOCK) {
2404 // apply deblocking filtering right after each superblock is decoded
2405 const int guess_filter_lvl = FAKE_FILTER_LEVEL;
2406 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2407 guess_filter_lvl, 0, 1, mi_row, mi_col);
2408 }
2409#endif // CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07002410 }
Angie Chiangd0916d92017-03-10 17:54:18 -08002411 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002412 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07002413 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002414 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002415 }
2416 }
2417
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002418#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002419 assert(mi_row > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002420#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002421
Yaowu Xuc27fc142016-08-22 16:08:15 -07002422 // After loopfiltering, the last 7 row pixels in each superblock row may
2423 // still be changed by the longest loopfilter of the next superblock row.
2424 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002425 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002426 }
2427
Cheng Chen5ad5b282017-10-05 16:36:06 -07002428#if CONFIG_INTRABC
2429// When intraBC is on, do loop filtering per superblock,
2430// instead of do it after the whole frame has been encoded,
2431// as is in the else branch
2432#else
Cheng Chene94df5c2017-07-19 17:25:33 -07002433// Loopfilter the whole frame.
Cheng Chenf572cd32017-08-25 18:34:51 -07002434#if CONFIG_LPF_SB
2435 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2436 cm->lf.filter_level, 0, 0, 0, 0);
2437#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002438#if CONFIG_OBU
2439 if (endTile == cm->tile_rows * cm->tile_cols - 1)
2440#endif
David Barker3dffa272017-10-18 17:07:26 +01002441#if CONFIG_LOOPFILTER_LEVEL
2442 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
2443 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2444 cm->lf.filter_level[0], cm->lf.filter_level[1], 0,
2445 0);
2446 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2447 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
2448 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2449 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
2450 }
2451#else
2452 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2453 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07002454#endif // CONFIG_LOOPFILTER_LEVEL
Cheng Chenf572cd32017-08-25 18:34:51 -07002455#endif // CONFIG_LPF_SB
Cheng Chen5ad5b282017-10-05 16:36:06 -07002456#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002457 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002458 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002459
2460#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002461 if (cm->large_scale_tile) {
2462 if (n_tiles == 1) {
2463#if CONFIG_ANS
2464 return data_end;
2465#else
2466 // Find the end of the single tile buffer
2467 return aom_reader_find_end(&pbi->tile_data->bit_reader);
2468#endif // CONFIG_ANS
2469 } else {
2470 // Return the end of the last tile buffer
2471 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
2472 }
2473 } else {
2474#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002475#if CONFIG_ANS
2476 return data_end;
2477#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002478#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002479 {
2480 // Get last tile data.
2481 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002482 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002483 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002484#else
2485 TileData *const td = pbi->tile_data + endTile;
2486 return aom_reader_find_end(&td->bit_reader);
2487#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002488#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002489#if CONFIG_EXT_TILE
2490 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002491#endif // CONFIG_EXT_TILE
2492}
2493
Yaowu Xuc27fc142016-08-22 16:08:15 -07002494static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002495 AV1_COMMON *const cm = (AV1_COMMON *)data;
2496 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002497}
2498
Yaowu Xuf883b422016-08-30 14:01:10 -07002499static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002500 struct aom_read_bit_buffer *rb,
2501 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002502 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002503 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002504 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002505 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002506 }
2507
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02002508#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002509 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07002510#else
2511 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002512#endif
anorkin76fb1262017-03-22 15:12:12 -07002513#if CONFIG_COLORSPACE_HEADERS
2514 cm->color_space = aom_rb_read_literal(rb, 5);
2515 cm->transfer_function = aom_rb_read_literal(rb, 5);
2516#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002517 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07002518#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002519 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002520 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07002521 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002522 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002523 cm->subsampling_x = aom_rb_read_bit(rb);
2524 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002525 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07002526 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07002528 if (aom_rb_read_bit(rb))
2529 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002530 "Reserved bit set");
2531 } else {
2532 cm->subsampling_y = cm->subsampling_x = 1;
2533 }
anorkin76fb1262017-03-22 15:12:12 -07002534#if CONFIG_COLORSPACE_HEADERS
2535 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
2536 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
2537 }
2538#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002539 } else {
2540 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
2541 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
2542 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
2543 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002544 if (aom_rb_read_bit(rb))
2545 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002546 "Reserved bit set");
2547 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002548 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002549 "4:4:4 color not supported in profile 0 or 2");
2550 }
2551 }
2552}
2553
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002554#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002555void read_sequence_header(SequenceHeader *seq_params,
2556 struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002557 /* Placeholder for actually reading from the bitstream */
David Barker5e70a112017-10-03 14:28:17 +01002558 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
2559 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002560 // We must always have delta_frame_id_length < frame_id_length,
2561 // in order for a frame to be referenced with a unique delta.
2562 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002563 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002564 seq_params->frame_id_length =
2565 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
David Barker5e70a112017-10-03 14:28:17 +01002566 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002567}
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002568#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002569
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002570static void read_compound_tools(AV1_COMMON *cm,
2571 struct aom_read_bit_buffer *rb) {
2572 (void)cm;
2573 (void)rb;
2574#if CONFIG_INTERINTRA
2575 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
2576 cm->allow_interintra_compound = aom_rb_read_bit(rb);
2577 } else {
2578 cm->allow_interintra_compound = 0;
2579 }
2580#endif // CONFIG_INTERINTRA
Zoe Liu85b66462017-04-20 14:28:19 -07002581#if CONFIG_COMPOUND_SINGLEREF
2582 if (!frame_is_intra_only(cm)) {
2583#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002584 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07002585#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002586 cm->allow_masked_compound = aom_rb_read_bit(rb);
2587 } else {
2588 cm->allow_masked_compound = 0;
2589 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002590}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002591
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002592#if CONFIG_VAR_REFS
2593static void check_valid_ref_frames(AV1_COMMON *cm) {
2594 MV_REFERENCE_FRAME ref_frame;
2595 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
2596 // reference frames: Current encoder invalid ALTREF when ALTREF
2597 // is the same as LAST, but invalid all the other references
2598 // when they are the same as ALTREF.
2599 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2600 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
2601
2602 if (ref_buf->idx != INVALID_IDX) {
2603 ref_buf->is_valid = 1;
2604
2605 MV_REFERENCE_FRAME ref;
2606 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
2607 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
2608 if (buf->is_valid && buf->idx == ref_buf->idx) {
2609 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
2610 ref_buf->is_valid = 0;
2611 break;
2612 } else {
2613 buf->is_valid = 0;
2614 }
2615 }
2616 }
2617 } else {
2618 ref_buf->is_valid = 0;
2619 }
2620 }
2621}
2622#endif // CONFIG_VAR_REFS
2623
Sarah Parker3e579a62017-08-23 16:53:20 -07002624static int read_global_motion_params(WarpedMotionParams *params,
David Barkerd7c8bd52017-09-25 14:47:29 +01002625 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07002626 struct aom_read_bit_buffer *rb,
2627 int allow_hp) {
2628 TransformationType type = aom_rb_read_bit(rb);
2629 if (type != IDENTITY) {
2630#if GLOBAL_TRANS_TYPES > 4
2631 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
2632#else
2633 if (aom_rb_read_bit(rb))
2634 type = ROTZOOM;
2635 else
2636 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
2637#endif // GLOBAL_TRANS_TYPES > 4
2638 }
2639
2640 int trans_bits;
2641 int trans_dec_factor;
2642 int trans_prec_diff;
David Barkerd7c8bd52017-09-25 14:47:29 +01002643 *params = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002644 params->wmtype = type;
2645 switch (type) {
Sarah Parker3e579a62017-08-23 16:53:20 -07002646 case AFFINE:
2647 case ROTZOOM:
2648 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
2649 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2650 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2651 (1 << GM_ALPHA_PREC_BITS)) *
2652 GM_ALPHA_DECODE_FACTOR +
2653 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002654 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
2655 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2656 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
2657 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002658 if (type >= AFFINE) {
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002659 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
2660 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2661 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
2662 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002663 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
2664 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2665 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2666 (1 << GM_ALPHA_PREC_BITS)) *
2667 GM_ALPHA_DECODE_FACTOR +
2668 (1 << WARPEDMODEL_PREC_BITS);
2669 } else {
2670 params->wmmat[4] = -params->wmmat[3];
2671 params->wmmat[5] = params->wmmat[2];
2672 }
2673 // fallthrough intended
2674 case TRANSLATION:
2675 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2676 : GM_ABS_TRANS_BITS;
2677 trans_dec_factor = (type == TRANSLATION)
2678 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
2679 : GM_TRANS_DECODE_FACTOR;
2680 trans_prec_diff = (type == TRANSLATION)
2681 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2682 : GM_TRANS_PREC_DIFF;
2683 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
2684 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2685 (ref_params->wmmat[0] >> trans_prec_diff)) *
2686 trans_dec_factor;
2687 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
2688 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2689 (ref_params->wmmat[1] >> trans_prec_diff)) *
2690 trans_dec_factor;
2691 case IDENTITY: break;
2692 default: assert(0);
2693 }
2694 if (params->wmtype <= AFFINE) {
2695 int good_shear_params = get_shear_params(params);
2696 if (!good_shear_params) return 0;
2697 }
2698
2699 return 1;
2700}
2701
2702static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
2703 int frame;
2704 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002705 const WarpedMotionParams *ref_params =
2706 cm->error_resilient_mode ? &default_warp_params
2707 : &cm->prev_frame->global_motion[frame];
Sarah Parker3e579a62017-08-23 16:53:20 -07002708 int good_params = read_global_motion_params(
David Barkerd7c8bd52017-09-25 14:47:29 +01002709 &cm->global_motion[frame], ref_params, rb, cm->allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002710 if (!good_params)
2711 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2712 "Invalid shear parameters for global motion.");
2713
2714 // TODO(sarahparker, debargha): The logic in the commented out code below
2715 // does not work currently and causes mismatches when resize is on. Fix it
2716 // before turning the optimization back on.
2717 /*
2718 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
2719 if (cm->width == ref_buf->y_crop_width &&
2720 cm->height == ref_buf->y_crop_height) {
2721 read_global_motion_params(&cm->global_motion[frame],
2722 &cm->prev_frame->global_motion[frame], rb,
2723 cm->allow_high_precision_mv);
2724 } else {
David Barkerd7c8bd52017-09-25 14:47:29 +01002725 cm->global_motion[frame] = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002726 }
2727 */
2728 /*
2729 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
2730 frame, cm->current_video_frame, cm->show_frame,
2731 cm->global_motion[frame].wmmat[0],
2732 cm->global_motion[frame].wmmat[1],
2733 cm->global_motion[frame].wmmat[2],
2734 cm->global_motion[frame].wmmat[3]);
2735 */
2736 }
David Barkercba7da72017-09-14 11:24:27 +01002737 memcpy(cm->cur_frame->global_motion, cm->global_motion,
2738 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Sarah Parker3e579a62017-08-23 16:53:20 -07002739}
Sarah Parker3e579a62017-08-23 16:53:20 -07002740
Yaowu Xuf883b422016-08-30 14:01:10 -07002741static size_t read_uncompressed_header(AV1Decoder *pbi,
2742 struct aom_read_bit_buffer *rb) {
2743 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002744 MACROBLOCKD *const xd = &pbi->mb;
2745 BufferPool *const pool = cm->buffer_pool;
2746 RefCntBuffer *const frame_bufs = pool->frame_bufs;
2747 int i, mask, ref_index = 0;
2748 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002749
Yaowu Xuc27fc142016-08-22 16:08:15 -07002750 cm->last_frame_type = cm->frame_type;
2751 cm->last_intra_only = cm->intra_only;
2752
Yaowu Xuc27fc142016-08-22 16:08:15 -07002753 // NOTE: By default all coded frames to be used as a reference
2754 cm->is_reference_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002755
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002756#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002757 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
2758 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002759 "Invalid frame marker");
2760
Yaowu Xuf883b422016-08-30 14:01:10 -07002761 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02002762
2763 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
2764 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
2765
2766 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002767 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002768 "Unsupported bitstream profile");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002769#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002770
Yunqing Wangc2502b52017-07-19 17:44:18 -07002771#if CONFIG_EXT_TILE
2772 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
2773#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002774 if (cm->large_scale_tile) cm->seq_params.frame_id_numbers_present_flag = 0;
Yunqing Wangc2502b52017-07-19 17:44:18 -07002775#endif // CONFIG_REFERENCE_BUFFER
2776#endif // CONFIG_EXT_TILE
2777
Yaowu Xuf883b422016-08-30 14:01:10 -07002778 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002779
2780 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08002781 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01002782 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
2783 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08002784#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002785 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002786 int frame_id_length = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002787 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
2788 /* Compare display_frame_id with ref_frame_id and check valid for
2789 * referencing */
2790 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
2791 cm->valid_for_referencing[existing_frame_idx] == 0)
2792 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2793 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002794 }
2795#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002796 lock_buffer_pool(pool);
2797 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
2798 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002799 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002800 "Buffer %d does not contain a decoded frame",
2801 frame_to_show);
2802 }
2803 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2804 unlock_buffer_pool(pool);
2805
Cheng Chen13fc8192017-08-19 11:49:28 -07002806#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002807 cm->lf.filter_level[0] = 0;
2808 cm->lf.filter_level[1] = 0;
2809#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002810 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07002811#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002812 cm->show_frame = 1;
2813 pbi->refresh_frame_flags = 0;
2814
2815 if (cm->frame_parallel_decode) {
2816 for (i = 0; i < REF_FRAMES; ++i)
2817 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
2818 }
2819
2820 return 0;
2821 }
2822
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002823#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002824 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002825 cm->show_frame = aom_rb_read_bit(rb);
2826 if (cm->frame_type != KEY_FRAME)
2827 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002828#else
2829 cm->frame_type = (FRAME_TYPE)aom_rb_read_literal(rb, 2); // 2 bits
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002830 cm->show_frame = aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002831 cm->intra_only = cm->frame_type == INTRA_ONLY_FRAME;
2832#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002833 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002834#if CONFIG_REFERENCE_BUFFER
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002835#if !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002836 if (frame_is_intra_only(cm)) read_sequence_header(&cm->seq_params, rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002837#endif // !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01002838 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002839 int frame_id_length = cm->seq_params.frame_id_length;
2840 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002841 int prev_frame_id = 0;
2842 if (cm->frame_type != KEY_FRAME) {
2843 prev_frame_id = cm->current_frame_id;
2844 }
2845 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002846
David Barker5e70a112017-10-03 14:28:17 +01002847 if (cm->frame_type != KEY_FRAME) {
2848 int diff_frame_id;
2849 if (cm->current_frame_id > prev_frame_id) {
2850 diff_frame_id = cm->current_frame_id - prev_frame_id;
2851 } else {
2852 diff_frame_id =
2853 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002854 }
David Barker5e70a112017-10-03 14:28:17 +01002855 /* Check current_frame_id for conformance */
2856 if (prev_frame_id == cm->current_frame_id ||
2857 diff_frame_id >= (1 << (frame_id_length - 1))) {
2858 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2859 "Invalid value of current_frame_id");
2860 }
2861 }
2862 /* Check if some frames need to be marked as not valid for referencing */
2863 for (i = 0; i < REF_FRAMES; i++) {
2864 if (cm->frame_type == KEY_FRAME) {
2865 cm->valid_for_referencing[i] = 0;
2866 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
2867 if (cm->ref_frame_id[i] > cm->current_frame_id ||
2868 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002869 cm->valid_for_referencing[i] = 0;
David Barker5e70a112017-10-03 14:28:17 +01002870 } else {
2871 if (cm->ref_frame_id[i] > cm->current_frame_id &&
2872 cm->ref_frame_id[i] <
2873 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
2874 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002875 }
2876 }
2877 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002878#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002879 if (cm->frame_type == KEY_FRAME) {
Jingning Hand8a15a62017-10-30 10:53:42 -07002880 cm->current_video_frame = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002881#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002882 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002883#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002884 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2885
2886 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2887 cm->frame_refs[i].idx = INVALID_IDX;
2888 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002889#if CONFIG_VAR_REFS
2890 cm->frame_refs[i].is_valid = 0;
2891#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002892 }
2893
2894 setup_frame_size(cm, rb);
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07002895 setup_sb_size(cm, rb);
2896
Yaowu Xuc27fc142016-08-22 16:08:15 -07002897 if (pbi->need_resync) {
2898 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
2899 pbi->need_resync = 0;
2900 }
Alex Converseeb780e72016-12-13 12:46:41 -08002901#if CONFIG_ANS && ANS_MAX_SYMBOLS
2902 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
2903#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07002904 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002905#if CONFIG_AMVR
2906 if (cm->allow_screen_content_tools) {
2907 if (aom_rb_read_bit(rb)) {
RogerZhou10a03802017-10-26 11:49:48 -07002908 cm->seq_force_integer_mv = 2;
RogerZhou3b635242017-09-19 10:06:46 -07002909 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002910 cm->seq_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07002911 }
2912 } else {
RogerZhou10a03802017-10-26 11:49:48 -07002913 cm->seq_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07002914 }
2915#endif
Fangwen Fu930c51c2017-05-07 20:39:17 -07002916#if CONFIG_TEMPMV_SIGNALING
2917 cm->use_prev_frame_mvs = 0;
2918#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002919 } else {
hui su24f7b072016-10-12 11:36:24 -07002920 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07002921#if CONFIG_TEMPMV_SIGNALING
2922 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
2923#endif
2924#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
2925// The only way to reset all frame contexts to their default values is with a
2926// keyframe.
2927#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002928 if (cm->error_resilient_mode) {
2929 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
2930 } else {
2931 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933 ? RESET_FRAME_CONTEXT_ALL
2934 : RESET_FRAME_CONTEXT_CURRENT;
2935 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002936 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002937 ? RESET_FRAME_CONTEXT_CURRENT
2938 : RESET_FRAME_CONTEXT_NONE;
2939 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07002940 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941 ? RESET_FRAME_CONTEXT_ALL
2942 : RESET_FRAME_CONTEXT_CURRENT;
2943 }
2944 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07002945#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002946
2947 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002948#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002949 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002950#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002951
Yaowu Xuf883b422016-08-30 14:01:10 -07002952 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002953 setup_frame_size(cm, rb);
Pavel Frolovea3dd3a2017-09-25 16:06:19 +03002954 setup_sb_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002955 if (pbi->need_resync) {
2956 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
2957 pbi->need_resync = 0;
2958 }
Alex Converseeb780e72016-12-13 12:46:41 -08002959#if CONFIG_ANS && ANS_MAX_SYMBOLS
2960 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
2961#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002962 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002963#if CONFIG_OBU
2964 pbi->refresh_frame_flags = (cm->frame_type == S_FRAME)
2965 ? ~(1 << REF_FRAMES)
2966 : aom_rb_read_literal(rb, REF_FRAMES);
2967#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002968 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002969#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002970
Yaowu Xuc27fc142016-08-22 16:08:15 -07002971 if (!pbi->refresh_frame_flags) {
2972 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
2973 // will not be used as a reference
2974 cm->is_reference_frame = 0;
2975 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002976
2977 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002978 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002979 const int idx = cm->ref_frame_map[ref];
Rupert Swarbrick5eb471c2017-10-02 16:06:54 +01002980
2981 // Most of the time, streams start with a keyframe. In that case,
2982 // ref_frame_map will have been filled in at that point and will not
2983 // contain any -1's. However, streams are explicitly allowed to start
2984 // with an intra-only frame, so long as they don't then signal a
2985 // reference to a slot that hasn't been set yet. That's what we are
2986 // checking here.
2987 if (idx == -1)
2988 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2989 "Inter frame requests nonexistent reference");
2990
Yaowu Xuc27fc142016-08-22 16:08:15 -07002991 RefBuffer *const ref_frame = &cm->frame_refs[i];
2992 ref_frame->idx = idx;
2993 ref_frame->buf = &frame_bufs[idx].buf;
Zoe Liu17af2742017-10-06 10:36:42 -07002994#if CONFIG_FRAME_SIGN_BIAS
2995#if CONFIG_OBU
2996 // NOTE: For the scenario of (cm->frame_type != S_FRAME),
2997 // ref_frame_sign_bias will be reset based on frame offsets.
2998 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
2999#endif // CONFIG_OBU
3000#else // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003001#if CONFIG_OBU
3002 cm->ref_frame_sign_bias[LAST_FRAME + i] =
3003 (cm->frame_type == S_FRAME) ? 0 : aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003004#else // !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07003005 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003006#endif // CONFIG_OBU
3007#endif // CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003008#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003009 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003010 int frame_id_length = cm->seq_params.frame_id_length;
3011 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003012 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
3013 int ref_frame_id =
3014 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
3015 (1 << frame_id_length)) %
3016 (1 << frame_id_length));
3017 /* Compare values derived from delta_frame_id_minus1 and
3018 * refresh_frame_flags. Also, check valid for referencing */
3019 if (ref_frame_id != cm->ref_frame_id[ref] ||
3020 cm->valid_for_referencing[ref] == 0)
3021 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3022 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003023 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003024#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003025 }
3026
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07003027#if CONFIG_VAR_REFS
3028 check_valid_ref_frames(cm);
3029#endif // CONFIG_VAR_REFS
3030
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003031#if CONFIG_FRAME_SIZE
3032 if (cm->error_resilient_mode == 0) {
3033 setup_frame_size_with_refs(cm, rb);
3034 } else {
3035 setup_frame_size(cm, rb);
3036 }
3037#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003038 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003039#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003040
RogerZhou3b635242017-09-19 10:06:46 -07003041#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003042 if (cm->seq_force_integer_mv == 2) {
3043 cm->cur_frame_force_integer_mv = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07003044 } else {
RogerZhou10a03802017-10-26 11:49:48 -07003045 cm->cur_frame_force_integer_mv = cm->seq_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003046 }
RogerZhou10a03802017-10-26 11:49:48 -07003047
3048 if (cm->cur_frame_force_integer_mv) {
3049 cm->allow_high_precision_mv = 0;
3050 } else {
3051 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
3052 }
3053#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003054 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
RogerZhou10a03802017-10-26 11:49:48 -07003055#endif
Angie Chiang5678ad92016-11-21 09:38:40 -08003056 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003057#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003058 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08003059 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003060 else
3061 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08003062#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003063 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3064 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003065#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07003066 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003067 &ref_buf->sf, ref_buf->buf->y_crop_width,
3068 ref_buf->buf->y_crop_height, cm->width, cm->height,
3069 cm->use_highbitdepth);
3070#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003071 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003072 &ref_buf->sf, ref_buf->buf->y_crop_width,
3073 ref_buf->buf->y_crop_height, cm->width, cm->height);
3074#endif
3075 }
3076 }
3077 }
Jingning Hanc723b342017-08-24 11:19:46 -07003078
Jingning Hanea255c92017-09-29 08:12:09 -07003079#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003080 if (cm->show_frame == 0) {
3081 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
3082 } else {
3083 cm->frame_offset = cm->current_video_frame;
3084 }
Zoe Liu17af2742017-10-06 10:36:42 -07003085 av1_setup_frame_buf_refs(cm);
3086
3087#if CONFIG_FRAME_SIGN_BIAS
3088#if CONFIG_OBU
3089 if (cm->frame_type != S_FRAME)
3090#endif // CONFIG_OBU
3091 av1_setup_frame_sign_bias(cm);
Zoe Liu17af2742017-10-06 10:36:42 -07003092#endif // CONFIG_FRAME_SIGN_BIAS
3093#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003094
Fangwen Fu8d164de2016-12-14 13:40:54 -08003095#if CONFIG_TEMPMV_SIGNALING
3096 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
3097#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003098
3099#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003100 if (cm->seq_params.frame_id_numbers_present_flag) {
3101 /* If bitmask is set, update reference frame id values and
3102 mark frames as valid for reference */
3103 int refresh_frame_flags =
3104 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
3105 for (i = 0; i < REF_FRAMES; i++) {
3106 if ((refresh_frame_flags >> i) & 1) {
3107 cm->ref_frame_id[i] = cm->current_frame_id;
3108 cm->valid_for_referencing[i] = 1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003109 }
3110 }
3111 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003112#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003113
Yaowu Xuc27fc142016-08-22 16:08:15 -07003114 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003115 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003116#if CONFIG_COLORSPACE_HEADERS
3117 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
3118 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
3119#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003120 get_frame_new_buffer(cm)->color_range = cm->color_range;
3121 get_frame_new_buffer(cm)->render_width = cm->render_width;
3122 get_frame_new_buffer(cm)->render_height = cm->render_height;
3123
3124 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003125 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003126 "Keyframe / intra-only frame required to reset decoder"
3127 " state");
3128 }
3129
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003130#if CONFIG_EXT_TILE
3131 const int might_bwd_adapt =
3132 !(cm->error_resilient_mode || cm->large_scale_tile);
3133#else
3134 const int might_bwd_adapt = !cm->error_resilient_mode;
3135#endif // CONFIG_EXT_TILE
3136 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003137 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003138 ? REFRESH_FRAME_CONTEXT_FORWARD
3139 : REFRESH_FRAME_CONTEXT_BACKWARD;
3140 } else {
3141 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
3142 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003143#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003144 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07003145 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07003146 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003147#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003148
3149 // Generate next_ref_frame_map.
3150 lock_buffer_pool(pool);
3151 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
3152 if (mask & 1) {
3153 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
3154 ++frame_bufs[cm->new_fb_idx].ref_count;
3155 } else {
3156 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3157 }
3158 // Current thread holds the reference frame.
3159 if (cm->ref_frame_map[ref_index] >= 0)
3160 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3161 ++ref_index;
3162 }
3163
3164 for (; ref_index < REF_FRAMES; ++ref_index) {
3165 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3166
3167 // Current thread holds the reference frame.
3168 if (cm->ref_frame_map[ref_index] >= 0)
3169 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3170 }
3171 unlock_buffer_pool(pool);
3172 pbi->hold_ref_buf = 1;
3173
3174 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003175 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003176
Yaowu Xuc27fc142016-08-22 16:08:15 -07003177 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003178 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003179 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003180
hui su0d103572017-03-01 17:58:01 -08003181#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07003182 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003183 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
3184 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
3185 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
3186 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07003187#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3188 if (cm->frame_refs[0].idx <= 0) {
3189 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
3190 }
3191#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003192 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07003193#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003194 }
hui su0d103572017-03-01 17:58:01 -08003195#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003196
3197 setup_segmentation(cm, rb);
3198
Arild Fuldseth07441162016-08-15 15:07:52 +02003199 {
Thomas Davies28444be2017-10-13 18:12:25 +01003200 int delta_q_allowed = 1;
3201#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003202 struct segmentation *const seg = &cm->seg;
3203 int segment_quantizer_active = 0;
3204 for (i = 0; i < MAX_SEGMENTS; i++) {
3205 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3206 segment_quantizer_active = 1;
3207 }
3208 }
Thomas Davies28444be2017-10-13 18:12:25 +01003209 delta_q_allowed = !segment_quantizer_active;
3210#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003211
Thomas Daviesf6936102016-09-05 16:51:31 +01003212 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07003213#if CONFIG_EXT_DELTA_Q
3214 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01003215 cm->delta_lf_present_flag = 0;
Cheng Chen880166a2017-10-02 17:48:48 -07003216#if CONFIG_LOOPFILTER_LEVEL
3217 cm->delta_lf_multi = 0;
3218#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003219#endif
Thomas Davies28444be2017-10-13 18:12:25 +01003220 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003221 cm->delta_q_present_flag = aom_rb_read_bit(rb);
3222 } else {
3223 cm->delta_q_present_flag = 0;
3224 }
3225 if (cm->delta_q_present_flag) {
3226 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01003227 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07003228#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003229 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
3230 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07003231 xd->prev_delta_lf_from_base = 0;
3232 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07003233#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003234 cm->delta_lf_multi = aom_rb_read_bit(rb);
Cheng Chena97394f2017-09-27 15:05:14 -07003235 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3236 xd->prev_delta_lf[lf_id] = 0;
3237#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003238 }
3239#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003240 }
3241 }
RogerZhou3b635242017-09-19 10:06:46 -07003242#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003243 xd->cur_frame_force_integer_mv = cm->cur_frame_force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07003244#endif
Thomas Davies3ab20b42017-09-19 10:30:53 +01003245
Urvang Joshi454280d2016-10-14 16:51:44 -07003246 for (i = 0; i < MAX_SEGMENTS; ++i) {
3247 const int qindex = cm->seg.enabled
3248 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
3249 : cm->base_qindex;
3250 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
3251 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3252 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003253 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003254 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003255 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07003256#if CONFIG_CDEF
3257 if (!cm->all_lossless) {
3258 setup_cdef(cm, rb);
3259 }
3260#endif
3261#if CONFIG_LOOP_RESTORATION
3262 decode_restoration_mode(cm, rb);
3263#endif // CONFIG_LOOP_RESTORATION
3264 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003265 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee6f3c8982017-09-22 21:14:01 -07003266 if (cm->reference_mode != SINGLE_REFERENCE) setup_compound_reference_mode(cm);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003267 read_compound_tools(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003268
Sarah Parkere68a3e42017-02-16 14:03:24 -08003269#if CONFIG_EXT_TX
3270 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
3271#endif // CONFIG_EXT_TX
3272
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003273#if CONFIG_ADAPT_SCAN
3274 cm->use_adapt_scan = aom_rb_read_bit(rb);
3275 // TODO(angiebird): call av1_init_scan_order only when use_adapt_scan
3276 // switches from 1 to 0
3277 if (cm->use_adapt_scan == 0) av1_init_scan_order(cm);
3278#endif // CONFIG_ADAPT_SCAN
3279
Pavel Frolov57c36e12017-09-12 15:00:40 +03003280 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3281 // show_exisiting_frame=1, nor can it take a frame not used as
3282 // a reference, it is probable that by the time it is being
3283 // referred to, the frame buffer it originally points to may
3284 // already get expired and have been reassigned to the current
3285 // newly coded frame. Hence, we need to check whether this is
3286 // the case, and if yes, we have 2 choices:
3287 // (1) Simply disable the use of previous frame mvs; or
3288 // (2) Have cm->prev_frame point to one reference frame buffer,
3289 // e.g. LAST_FRAME.
3290 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3291 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3292 cm->prev_frame =
3293 cm->frame_refs[LAST_FRAME - LAST_FRAME].idx != INVALID_IDX
3294 ? &cm->buffer_pool
3295 ->frame_bufs[cm->frame_refs[LAST_FRAME - LAST_FRAME].idx]
3296 : NULL;
3297 }
Pavel Frolov57c36e12017-09-12 15:00:40 +03003298
3299#if CONFIG_TEMPMV_SIGNALING
3300 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3301 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3302 "Frame wrongly requests previous frame MVs");
3303 }
3304#else
3305 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3306#if CONFIG_FRAME_SUPERRES
3307 cm->width == cm->last_width &&
3308 cm->height == cm->last_height &&
3309#else
3310 cm->width == cm->prev_frame->buf.y_crop_width &&
3311 cm->height == cm->prev_frame->buf.y_crop_height &&
3312#endif // CONFIG_FRAME_SUPERRES
3313 !cm->last_intra_only && cm->last_show_frame &&
3314 (cm->last_frame_type != KEY_FRAME);
3315#endif // CONFIG_TEMPMV_SIGNALING
3316
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003317 if (!frame_is_intra_only(cm)) read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003318
Yaowu Xuc27fc142016-08-22 16:08:15 -07003319 read_tile_info(pbi, rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003320 if (use_compressed_header(cm)) {
3321 sz = aom_rb_read_literal(rb, 16);
3322 if (sz == 0)
3323 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3324 "Invalid header size");
3325 } else {
3326 sz = 0;
3327 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003328 return sz;
3329}
3330
Yaowu Xuf883b422016-08-30 14:01:10 -07003331static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003332 size_t partition_size) {
Thomas Davies2e868ab2017-10-24 10:42:27 +01003333#if CONFIG_NEW_MULTISYMBOL
Thomas Daviese7154832017-10-03 10:12:17 +01003334 (void)pbi;
3335 (void)data;
3336 (void)partition_size;
3337 return 0;
3338#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003339 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07003340 aom_reader r;
Ryanf0e39192017-10-09 09:45:13 -07003341
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003342#if ((CONFIG_RECT_TX_EXT) || (!CONFIG_NEW_MULTISYMBOL || CONFIG_LV_MAP) || \
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02003343 (CONFIG_COMPOUND_SINGLEREF))
Thomas Davies599395e2017-07-21 18:02:48 +01003344 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies599395e2017-07-21 18:02:48 +01003345#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003346
Alex Converse2cdf0d82016-12-13 13:53:09 -08003347#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08003348 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08003349#endif
Alex Converse346440b2017-01-03 13:47:37 -08003350 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
3351 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07003352 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003353 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003354
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003355#if CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -07003356 if (cm->tx_mode == TX_MODE_SELECT)
3357 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07003358#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003359
Thomas Davies985bfc32017-06-27 16:51:26 +01003360#if !CONFIG_NEW_MULTISYMBOL
David Barker16c64e32017-08-23 16:54:59 +01003361 if (cm->tx_mode == TX_MODE_SELECT)
Ryanf0e39192017-10-09 09:45:13 -07003362 for (int i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
David Barker16c64e32017-08-23 16:54:59 +01003363 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Ryanf0e39192017-10-09 09:45:13 -07003364 for (int i = 0; i < SKIP_CONTEXTS; ++i)
Thomas Davies61e3e372017-04-04 16:10:23 +01003365 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
Cheng Chen0a7f2f52017-10-10 15:16:09 -07003366
3367#if CONFIG_JNT_COMP
3368 for (int i = 0; i < COMP_INDEX_CONTEXTS; ++i)
3369 av1_diff_update_prob(&r, &fc->compound_index_probs[i], ACCT_STR);
3370#endif // CONFIG_JNT_COMP
Thomas Davies61e3e372017-04-04 16:10:23 +01003371#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003372
Debargha Mukherjee801cc922017-09-22 17:22:50 -07003373 if (!frame_is_intra_only(cm)) {
Thomas Davies149eda52017-06-12 18:11:55 +01003374#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003375 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01003376#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003377
Yue Chen4d26acb2017-05-01 12:28:34 -07003378#if CONFIG_INTERINTRA
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003379 if (cm->reference_mode != COMPOUND_REFERENCE &&
3380 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01003381#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003382 for (int i = 0; i < BLOCK_SIZE_GROUPS; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003383 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003384 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003385 }
3386 }
Thomas Daviescff91712017-07-07 11:49:55 +01003387#endif
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003388#if !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003389#if CONFIG_EXT_PARTITION_TYPES
3390 int block_sizes_to_update = BLOCK_SIZES_ALL;
3391#else
3392 int block_sizes_to_update = BLOCK_SIZES;
3393#endif
Ryanf0e39192017-10-09 09:45:13 -07003394 for (int i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003395 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003396 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003397 }
3398 }
Debargha Mukherjee371968c2017-10-29 12:30:04 -07003399#endif // !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003400 }
Yue Chen4d26acb2017-05-01 12:28:34 -07003401#endif // CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07003402
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003403#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003404 for (int i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07003405 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003406#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003407
David Barker037ee412017-09-19 12:43:46 +01003408#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003409 read_frame_reference_mode_probs(cm, &r);
David Barker037ee412017-09-19 12:43:46 +01003410#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003411
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003412#if CONFIG_COMPOUND_SINGLEREF
Ryanf0e39192017-10-09 09:45:13 -07003413 for (int i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
Zoe Liu85b66462017-04-20 14:28:19 -07003414 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003415#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003416
Thomas Davies599395e2017-07-21 18:02:48 +01003417#if !CONFIG_NEW_MULTISYMBOL
RogerZhou3b635242017-09-19 10:06:46 -07003418#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003419 if (cm->cur_frame_force_integer_mv == 0) {
RogerZhou3b635242017-09-19 10:06:46 -07003420#endif
Ryanf0e39192017-10-09 09:45:13 -07003421 for (int i = 0; i < NMV_CONTEXTS; ++i)
RogerZhou3b635242017-09-19 10:06:46 -07003422 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
3423#if CONFIG_AMVR
3424 }
3425#endif
Thomas Davies599395e2017-07-21 18:02:48 +01003426#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003427 }
3428
Yaowu Xuf883b422016-08-30 14:01:10 -07003429 return aom_reader_has_error(&r);
Thomas Davies2e868ab2017-10-24 10:42:27 +01003430#endif // CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003431}
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003432
Yaowu Xuc27fc142016-08-22 16:08:15 -07003433#ifdef NDEBUG
3434#define debug_check_frame_counts(cm) (void)0
3435#else // !NDEBUG
3436// Counts should only be incremented when frame_parallel_decoding_mode and
3437// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07003438static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003439 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003440 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003441 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
3442 cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003443 assert(!memcmp(cm->counts.partition, zero_counts.partition,
3444 sizeof(cm->counts.partition)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003445 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
3446 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003447 assert(!memcmp(cm->counts.inter_compound_mode,
3448 zero_counts.inter_compound_mode,
3449 sizeof(cm->counts.inter_compound_mode)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07003450#if CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07003451 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
3452 sizeof(cm->counts.interintra)));
3453 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
3454 sizeof(cm->counts.wedge_interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07003455#endif // CONFIG_INTERINTRA
Sarah Parker6fddd182016-11-10 20:57:20 -08003456 assert(!memcmp(cm->counts.compound_interinter,
3457 zero_counts.compound_interinter,
3458 sizeof(cm->counts.compound_interinter)));
Yue Chencb60b182016-10-13 15:18:22 -07003459 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
3460 sizeof(cm->counts.motion_mode)));
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01003461#if CONFIG_NCOBMC_ADAPT_WEIGHT
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07003462 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
3463 sizeof(cm->counts.ncobmc_mode)));
3464#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003465 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
3466 sizeof(cm->counts.intra_inter)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003467#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003468 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
3469 sizeof(cm->counts.comp_inter_mode)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003470#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07003471 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
3472 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07003473#if CONFIG_EXT_COMP_REFS
3474 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
3475 sizeof(cm->counts.comp_ref_type)));
3476 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
3477 sizeof(cm->counts.uni_comp_ref)));
3478#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003479 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
3480 sizeof(cm->counts.single_ref)));
3481 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
3482 sizeof(cm->counts.comp_ref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003483 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
3484 sizeof(cm->counts.comp_bwdref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003485 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003486 assert(
3487 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
3488 assert(
3489 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003490}
3491#endif // NDEBUG
3492
Yaowu Xuf883b422016-08-30 14:01:10 -07003493static struct aom_read_bit_buffer *init_read_bit_buffer(
3494 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
3495 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003496 rb->bit_offset = 0;
3497 rb->error_handler = error_handler;
3498 rb->error_handler_data = &pbi->common;
3499 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003500 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003501 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
3502 rb->bit_buffer = clear_data;
3503 rb->bit_buffer_end = clear_data + n;
3504 } else {
3505 rb->bit_buffer = data;
3506 rb->bit_buffer_end = data_end;
3507 }
3508 return rb;
3509}
3510
3511//------------------------------------------------------------------------------
3512
Yaowu Xuf883b422016-08-30 14:01:10 -07003513void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
3514 int *height) {
3515 *width = aom_rb_read_literal(rb, 16) + 1;
3516 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003517}
3518
Yaowu Xuf883b422016-08-30 14:01:10 -07003519BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
3520 int profile = aom_rb_read_bit(rb);
3521 profile |= aom_rb_read_bit(rb) << 1;
3522 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003523 return (BITSTREAM_PROFILE)profile;
3524}
3525
Thomas Davies4822e142017-10-10 11:30:36 +01003526static void make_update_tile_list_dec(AV1Decoder *pbi, int start_tile,
3527 int num_tile, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00003528 int i;
Thomas Davies4822e142017-10-10 11:30:36 +01003529 for (i = start_tile; i < start_tile + num_tile; ++i)
3530 ec_ctxs[i - start_tile] = &pbi->tile_data[i].tctx;
Thomas Davies028b57f2017-02-22 16:42:11 +00003531}
Thomas Davies028b57f2017-02-22 16:42:11 +00003532
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003533#if CONFIG_FRAME_SUPERRES
3534void superres_post_decode(AV1Decoder *pbi) {
3535 AV1_COMMON *const cm = &pbi->common;
3536 BufferPool *const pool = cm->buffer_pool;
3537
3538 if (av1_superres_unscaled(cm)) return;
3539
3540 lock_buffer_pool(pool);
3541 av1_superres_upscale(cm, pool);
3542 unlock_buffer_pool(pool);
3543}
3544#endif // CONFIG_FRAME_SUPERRES
3545
Yi Luo10e23002017-07-31 11:54:43 -07003546static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01003547// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
3548// boundary information every frame, since the tile boundaries may
3549// change every frame (particularly when dependent-horztiles is also
3550// enabled); when it is disabled, the only information stored is the frame
3551// boundaries, which only depend on the frame size.
3552#if !CONFIG_LOOPFILTERING_ACROSS_TILES
3553 if (cm->width != cm->last_width || cm->height != cm->last_height)
3554#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
3555 {
Yi Luo10e23002017-07-31 11:54:43 -07003556 int row, col;
3557 for (row = 0; row < cm->mi_rows; ++row) {
3558 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
3559 for (col = 0; col < cm->mi_cols; ++col) {
3560 mi->mbmi.boundary_info = 0;
3561 mi++;
3562 }
3563 }
3564 av1_setup_frame_boundary_info(cm);
3565 }
3566}
3567
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003568size_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi, const uint8_t *data,
3569 const uint8_t *data_end,
3570 const uint8_t **p_data_end) {
3571 AV1_COMMON *const cm = &pbi->common;
3572 MACROBLOCKD *const xd = &pbi->mb;
3573 struct aom_read_bit_buffer rb;
3574 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
3575 size_t first_partition_size;
3576 YV12_BUFFER_CONFIG *new_fb;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003577 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003578
3579#if CONFIG_ADAPT_SCAN
3580 av1_deliver_eob_threshold(cm, xd);
3581#endif
3582#if CONFIG_BITSTREAM_DEBUG
3583 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
3584#endif
3585
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003586 int i;
3587 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003588 cm->global_motion[i] = default_warp_params;
3589 cm->cur_frame->global_motion[i] = default_warp_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003590 }
3591 xd->global_motion = cm->global_motion;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003592
3593 first_partition_size = read_uncompressed_header(
3594 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
3595
3596#if CONFIG_EXT_TILE
3597 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
3598 // or a section of a frame is not allowed.
3599 if (!cm->single_tile_decoding &&
3600 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
3601 pbi->dec_tile_row = -1;
3602 pbi->dec_tile_col = -1;
3603 }
3604#endif // CONFIG_EXT_TILE
3605
3606 pbi->first_partition_size = first_partition_size;
3607 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
3608 new_fb = get_frame_new_buffer(cm);
3609 xd->cur_buf = new_fb;
3610#if CONFIG_INTRABC
3611#if CONFIG_HIGHBITDEPTH
3612 av1_setup_scale_factors_for_frame(
3613 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3614 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3615 cm->use_highbitdepth);
3616#else
3617 av1_setup_scale_factors_for_frame(
3618 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3619 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
3620#endif // CONFIG_HIGHBITDEPTH
3621#endif // CONFIG_INTRABC
3622
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003623 if (cm->show_existing_frame) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003624 // showing a frame directly
3625 *p_data_end = data + aom_rb_bytes_read(&rb);
3626 return 0;
3627 }
3628
3629 data += aom_rb_bytes_read(&rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003630 if (first_partition_size)
3631 if (!read_is_valid(data, first_partition_size, data_end))
3632 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3633 "Truncated packet or corrupt header length");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003634
3635 cm->setup_mi(cm);
3636
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003637 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3638 // show_exisiting_frame=1, nor can it take a frame not used as
3639 // a reference, it is probable that by the time it is being
3640 // referred to, the frame buffer it originally points to may
3641 // already get expired and have been reassigned to the current
3642 // newly coded frame. Hence, we need to check whether this is
3643 // the case, and if yes, we have 2 choices:
3644 // (1) Simply disable the use of previous frame mvs; or
3645 // (2) Have cm->prev_frame point to one reference frame buffer,
3646 // e.g. LAST_FRAME.
3647 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3648 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3649 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
3650 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
3651 : NULL;
3652 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003653
3654#if CONFIG_TEMPMV_SIGNALING
3655 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3656 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3657 "Frame wrongly requests previous frame MVs");
3658 }
3659#else
3660 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3661#if CONFIG_FRAME_SUPERRES
3662 cm->width == cm->last_width &&
3663 cm->height == cm->last_height &&
3664#else
3665 cm->width == cm->prev_frame->buf.y_crop_width &&
3666 cm->height == cm->prev_frame->buf.y_crop_height &&
3667#endif // CONFIG_FRAME_SUPERRES
3668 !cm->last_intra_only && cm->last_show_frame &&
3669 (cm->last_frame_type != KEY_FRAME);
3670#endif // CONFIG_TEMPMV_SIGNALING
3671
Jingning Hanea255c92017-09-29 08:12:09 -07003672#if CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003673 av1_setup_motion_field(cm);
Jingning Hanea255c92017-09-29 08:12:09 -07003674#endif // CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003675
3676 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
3677#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3678 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
3679 // use the default frame context values
3680 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3681 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3682 } else {
3683 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
3684 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
3685 }
3686#else
3687 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
3688 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
3689#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3690 if (!cm->fc->initialized)
3691 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3692 "Uninitialized entropy context.");
3693
3694 av1_zero(cm->counts);
3695
3696 xd->corrupted = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003697 if (first_partition_size) {
3698 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
3699 if (new_fb->corrupted)
3700 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3701 "Decode failed. Frame data header is corrupted.");
3702 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003703 return first_partition_size;
3704}
3705
3706void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
3707 const uint8_t *data_end,
3708 const uint8_t **p_data_end, int startTile,
3709 int endTile, int initialize_flag) {
3710 AV1_COMMON *const cm = &pbi->common;
3711 MACROBLOCKD *const xd = &pbi->mb;
3712 int context_updated = 0;
3713
3714#if CONFIG_LOOP_RESTORATION
3715 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3716 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3717 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3718 av1_alloc_restoration_buffers(cm);
3719 }
3720#endif
3721
Cheng Chend8184da2017-09-26 18:15:22 -07003722#if !CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003723 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3724 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
3725 }
3726#endif
3727
3728 // If encoded in frame parallel mode, frame context is ready after decoding
3729 // the frame header.
3730 if (cm->frame_parallel_decode && initialize_flag &&
3731 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
3732 AVxWorker *const worker = pbi->frame_worker_owner;
3733 FrameWorkerData *const frame_worker_data = worker->data1;
3734 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
3735 context_updated = 1;
3736#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3737 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3738#else
3739 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3740#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3741 }
3742 av1_frameworker_lock_stats(worker);
3743 pbi->cur_buf->row = -1;
3744 pbi->cur_buf->col = -1;
3745 frame_worker_data->frame_context_ready = 1;
3746 // Signal the main thread that context is ready.
3747 av1_frameworker_signal_stats(worker);
3748 av1_frameworker_unlock_stats(worker);
3749 }
3750
3751 dec_setup_frame_boundary_info(cm);
3752
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003753#if CONFIG_OBU
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003754 *p_data_end = decode_tiles(pbi, data, data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003755#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003756 *p_data_end =
3757 decode_tiles(pbi, data + pbi->uncomp_hdr_size + pbi->first_partition_size,
3758 data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003759#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003760
3761 if (endTile != cm->tile_rows * cm->tile_cols - 1) {
3762 return;
3763 }
3764
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02003765#if CONFIG_STRIPED_LOOP_RESTORATION
3766 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3767 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3768 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3769 av1_loop_restoration_save_boundary_lines(&pbi->cur_buf->buf, cm);
3770 }
3771#endif
3772
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003773#if CONFIG_CDEF
3774 if (!cm->skip_loop_filter && !cm->all_lossless) {
3775 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
3776 }
3777#endif // CONFIG_CDEF
3778
3779#if CONFIG_FRAME_SUPERRES
3780 superres_post_decode(pbi);
3781#endif // CONFIG_FRAME_SUPERRES
3782
3783#if CONFIG_LOOP_RESTORATION
3784 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3785 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3786 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3787 aom_extend_frame_borders((YV12_BUFFER_CONFIG *)xd->cur_buf);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01003788 av1_loop_restoration_filter_frame((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
3789 cm->rst_info, 7, NULL);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003790 }
3791#endif // CONFIG_LOOP_RESTORATION
3792
3793 if (!xd->corrupted) {
3794 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies4822e142017-10-10 11:30:36 +01003795#if CONFIG_SIMPLE_BWD_ADAPT
3796 const int num_bwd_ctxs = 1;
3797#else
3798 const int num_bwd_ctxs = cm->tile_rows * cm->tile_cols;
3799#endif
3800 FRAME_CONTEXT **tile_ctxs =
3801 aom_malloc(num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx));
3802 aom_cdf_prob **cdf_ptrs = aom_malloc(
3803 num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
3804#if CONFIG_SIMPLE_BWD_ADAPT
3805 make_update_tile_list_dec(pbi, cm->largest_tile_id, num_bwd_ctxs,
3806 tile_ctxs);
3807#else
3808 make_update_tile_list_dec(pbi, 0, num_bwd_ctxs, tile_ctxs);
3809#endif
Angie Chiang85e3b962017-10-01 16:04:43 -07003810#if CONFIG_SYMBOLRATE
3811 av1_dump_symbol_rate(cm);
3812#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003813 av1_adapt_intra_frame_probs(cm);
3814 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003815 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003816 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003817 num_bwd_ctxs);
Debargha Mukherjee43061b32017-10-13 16:50:17 -07003818 av1_average_tile_loopfilter_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
3819 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003820#if CONFIG_ADAPT_SCAN
3821 av1_adapt_scan_order(cm);
3822#endif // CONFIG_ADAPT_SCAN
3823
3824 if (!frame_is_intra_only(cm)) {
3825 av1_adapt_inter_frame_probs(cm);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003826#if !CONFIG_NEW_MULTISYMBOL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003827 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003828#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003829 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies4822e142017-10-10 11:30:36 +01003830 cdf_ptrs, num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003831 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003832 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003833 }
3834 aom_free(tile_ctxs);
3835 aom_free(cdf_ptrs);
3836 } else {
3837 debug_check_frame_counts(cm);
3838 }
3839 } else {
3840 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3841 "Decode failed. Frame data is corrupted.");
3842 }
3843
3844#if CONFIG_INSPECTION
3845 if (pbi->inspect_cb != NULL) {
3846 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
3847 }
3848#endif
3849
3850// Non frame parallel update frame context here.
3851#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3852 if (!context_updated) cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3853#else
3854 if (!cm->error_resilient_mode && !context_updated)
3855 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3856#endif
3857}
3858
3859#if CONFIG_OBU
3860
3861static OBU_TYPE read_obu_header(struct aom_read_bit_buffer *rb,
3862 uint32_t *header_size) {
3863 OBU_TYPE obu_type;
3864 int obu_extension_flag;
3865
3866 *header_size = 1;
3867
3868 obu_type = (OBU_TYPE)aom_rb_read_literal(rb, 5);
3869 aom_rb_read_literal(rb, 2); // reserved
3870 obu_extension_flag = aom_rb_read_bit(rb);
3871 if (obu_extension_flag) {
3872 *header_size += 1;
3873 aom_rb_read_literal(rb, 3); // temporal_id
3874 aom_rb_read_literal(rb, 2);
3875 aom_rb_read_literal(rb, 2);
3876 aom_rb_read_literal(rb, 1); // reserved
3877 }
3878
3879 return obu_type;
3880}
3881
3882static uint32_t read_temporal_delimiter_obu() { return 0; }
3883
3884static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
3885 struct aom_read_bit_buffer *rb) {
3886 AV1_COMMON *const cm = &pbi->common;
David Barker5e70a112017-10-03 14:28:17 +01003887 SequenceHeader *const seq_params = &cm->seq_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003888 uint32_t saved_bit_offset = rb->bit_offset;
3889
3890 cm->profile = av1_read_profile(rb);
3891 aom_rb_read_literal(rb, 4); // level
3892
3893 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
3894 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003895 // We must always have delta_frame_id_length < frame_id_length,
3896 // in order for a frame to be referenced with a unique delta.
3897 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003898 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003899 seq_params->frame_id_length =
3900 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003901 }
3902
3903 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
3904
3905 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
3906}
3907
3908static uint32_t read_frame_header_obu(AV1Decoder *pbi, const uint8_t *data,
3909 const uint8_t *data_end,
3910 const uint8_t **p_data_end) {
3911 size_t header_size;
3912
3913 header_size =
3914 av1_decode_frame_headers_and_setup(pbi, data, data_end, p_data_end);
3915 return (uint32_t)(pbi->uncomp_hdr_size + header_size);
3916}
3917
3918static uint32_t read_tile_group_header(AV1Decoder *pbi,
3919 struct aom_read_bit_buffer *rb,
3920 int *startTile, int *endTile) {
3921 AV1_COMMON *const cm = &pbi->common;
3922 uint32_t saved_bit_offset = rb->bit_offset;
3923
3924 *startTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
3925 *endTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
3926
3927 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
3928}
3929
3930static uint32_t read_one_tile_group_obu(AV1Decoder *pbi,
3931 struct aom_read_bit_buffer *rb,
3932 int is_first_tg, const uint8_t *data,
3933 const uint8_t *data_end,
3934 const uint8_t **p_data_end,
3935 int *is_last_tg) {
3936 AV1_COMMON *const cm = &pbi->common;
3937 int startTile, endTile;
3938 uint32_t header_size, tg_payload_size;
3939
3940 header_size = read_tile_group_header(pbi, rb, &startTile, &endTile);
3941 data += header_size;
3942 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, startTile,
3943 endTile, is_first_tg);
3944 tg_payload_size = (uint32_t)(*p_data_end - data);
3945
3946 // TODO(shan): For now, assume all tile groups received in order
3947 *is_last_tg = endTile == cm->tile_rows * cm->tile_cols - 1;
3948
3949 return header_size + tg_payload_size;
3950}
3951
3952void av1_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
3953 const uint8_t *data_end,
3954 const uint8_t **p_data_end) {
3955 AV1_COMMON *const cm = &pbi->common;
3956 int frame_decoding_finished = 0;
3957 int is_first_tg_obu_received = 1;
3958 int frame_header_received = 0;
3959 int frame_header_size = 0;
3960
3961 // decode frame as a series of OBUs
3962 while (!frame_decoding_finished && !cm->error.error_code) {
3963 struct aom_read_bit_buffer rb;
3964 uint8_t clear_data[80];
3965 uint32_t obu_size, obu_header_size, obu_payload_size = 0;
3966 OBU_TYPE obu_type;
3967
3968 init_read_bit_buffer(pbi, &rb, data + 4, data_end, clear_data);
3969
3970 // every obu is preceded by 4-byte size of obu (obu header + payload size)
3971 // The obu size is only needed for tile group OBUs
3972 obu_size = mem_get_le32(data);
3973 obu_type = read_obu_header(&rb, &obu_header_size);
3974 data += (4 + obu_header_size);
3975
3976 switch (obu_type) {
3977 case OBU_TD: obu_payload_size = read_temporal_delimiter_obu(); break;
3978 case OBU_SEQUENCE_HEADER:
3979 obu_payload_size = read_sequence_header_obu(pbi, &rb);
3980 break;
3981 case OBU_FRAME_HEADER:
3982 // Only decode first frame header received
3983 if (!frame_header_received) {
3984 frame_header_size = obu_payload_size =
3985 read_frame_header_obu(pbi, data, data_end, p_data_end);
3986 frame_header_received = 1;
3987 } else {
3988 obu_payload_size = frame_header_size;
3989 }
3990 if (cm->show_existing_frame) frame_decoding_finished = 1;
3991 break;
3992 case OBU_TILE_GROUP:
3993 obu_payload_size = read_one_tile_group_obu(
3994 pbi, &rb, is_first_tg_obu_received, data, data + obu_size - 1,
3995 p_data_end, &frame_decoding_finished);
3996 is_first_tg_obu_received = 0;
3997 break;
3998 default: break;
3999 }
4000 data += obu_payload_size;
4001 }
4002}
4003#endif