blob: bcb4b322f8bf4933cf71cd81a573b4be8d38d80f [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <stdlib.h> // qsort()
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
17#include "./aom_scale_rtcd.h"
Jingning Han1aab8182016-06-03 11:09:06 -070018#include "./av1_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070019
Steinar Midtskogen2fd70ee2016-09-02 10:02:30 +020020#include "aom/aom_codec.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070021#include "aom_dsp/aom_dsp_common.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070022#include "aom_dsp/binary_codes_reader.h"
Jingning Han1aab8182016-06-03 11:09:06 -070023#include "aom_dsp/bitreader.h"
24#include "aom_dsp/bitreader_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070025#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070026#include "aom_ports/mem.h"
27#include "aom_ports/mem_ops.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070028#include "aom_scale/aom_scale.h"
29#include "aom_util/aom_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Fergus Simpson350a9b72017-04-17 15:08:45 -070031#if CONFIG_BITSTREAM_DEBUG
32#include "aom_util/debug_util.h"
33#endif // CONFIG_BITSTREAM_DEBUG
34
Yaowu Xuc27fc142016-08-22 16:08:15 -070035#include "av1/common/alloccommon.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050036#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010037#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070038#endif
Nathan E. Egge2cf03b12017-02-22 16:19:59 -050039#if CONFIG_INSPECTION
40#include "av1/decoder/inspection.h"
41#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#include "av1/common/entropy.h"
44#include "av1/common/entropymode.h"
Thomas Davies6519beb2016-10-19 14:46:07 +010045#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/common/idct.h"
Jingning Hanc723b342017-08-24 11:19:46 -070047#include "av1/common/mvref_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/common/pred_common.h"
49#include "av1/common/quant_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/common/reconinter.h"
Jingning Han1aab8182016-06-03 11:09:06 -070051#include "av1/common/reconintra.h"
Fergus Simpsond2bcbb52017-05-22 23:15:05 -070052#if CONFIG_FRAME_SUPERRES
53#include "av1/common/resize.h"
54#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -070055#include "av1/common/seg_common.h"
Jingning Han1aab8182016-06-03 11:09:06 -070056#include "av1/common/thread_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/common/tile_common.h"
58
59#include "av1/decoder/decodeframe.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/decoder/decodemv.h"
61#include "av1/decoder/decoder.h"
Angie Chiang133733c2017-03-17 12:50:20 -070062#if CONFIG_LV_MAP
63#include "av1/decoder/decodetxb.h"
64#endif
Jingning Han1aab8182016-06-03 11:09:06 -070065#include "av1/decoder/detokenize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/decoder/dsubexp.h"
Angie Chiang85e3b962017-10-01 16:04:43 -070067#include "av1/decoder/symbolrate.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070069#if CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070070#include "av1/common/warped_motion.h"
Debargha Mukherjee3b6c5442017-03-30 08:22:00 -070071#endif // CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -070072
Yaowu Xuf883b422016-08-30 14:01:10 -070073#define MAX_AV1_HEADER_SIZE 80
Michael Bebenita6048d052016-08-25 14:40:54 -070074#define ACCT_STR __func__
Yaowu Xuc27fc142016-08-22 16:08:15 -070075
Luc Trudeaue3980282017-04-25 23:17:21 -040076#if CONFIG_CFL
77#include "av1/common/cfl.h"
78#endif
79
Ola Hugosson1e7f2d02017-09-22 21:36:26 +020080#if CONFIG_STRIPED_LOOP_RESTORATION && !CONFIG_LOOP_RESTORATION
81#error "striped_loop_restoration requires loop_restoration"
82#endif
83
Rupert Swarbrick6c545212017-09-01 17:17:25 +010084#if CONFIG_LOOP_RESTORATION
85static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
86 MACROBLOCKD *xd,
87 aom_reader *const r, int plane,
88 int rtile_idx);
89#endif
90
Thomas Davies80188d12016-10-26 16:08:35 -070091static struct aom_read_bit_buffer *init_read_bit_buffer(
92 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
93 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]);
94static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
95 size_t partition_size);
96static size_t read_uncompressed_header(AV1Decoder *pbi,
97 struct aom_read_bit_buffer *rb);
98
Yaowu Xuf883b422016-08-30 14:01:10 -070099static int is_compound_reference_allowed(const AV1_COMMON *cm) {
Zoe Liu5a978832017-08-15 16:33:34 -0700100#if CONFIG_ONE_SIDED_COMPOUND // Normative in decoder
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +0200101 return !frame_is_intra_only(cm);
102#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700103 int i;
104 if (frame_is_intra_only(cm)) return 0;
105 for (i = 1; i < INTER_REFS_PER_FRAME; ++i)
106 if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) return 1;
107
108 return 0;
Zoe Liu5a978832017-08-15 16:33:34 -0700109#endif // CONFIG_ONE_SIDED_COMPOUND
Yaowu Xuc27fc142016-08-22 16:08:15 -0700110}
111
Yaowu Xuf883b422016-08-30 14:01:10 -0700112static void setup_compound_reference_mode(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700113 cm->comp_fwd_ref[0] = LAST_FRAME;
114 cm->comp_fwd_ref[1] = LAST2_FRAME;
115 cm->comp_fwd_ref[2] = LAST3_FRAME;
116 cm->comp_fwd_ref[3] = GOLDEN_FRAME;
117
118 cm->comp_bwd_ref[0] = BWDREF_FRAME;
Zoe Liu043c2272017-07-19 12:40:29 -0700119 cm->comp_bwd_ref[1] = ALTREF2_FRAME;
120 cm->comp_bwd_ref[2] = ALTREF_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121}
122
123static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
124 return len != 0 && len <= (size_t)(end - start);
125}
126
Yaowu Xuf883b422016-08-30 14:01:10 -0700127static int decode_unsigned_max(struct aom_read_bit_buffer *rb, int max) {
128 const int data = aom_rb_read_literal(rb, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 return data > max ? max : data;
130}
131
Thomas Daedef636d5c2017-06-29 13:48:27 -0700132static TX_MODE read_tx_mode(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800133#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -0800134 TX_MODE tx_mode;
135#endif
Thomas Daedef636d5c2017-06-29 13:48:27 -0700136 if (cm->all_lossless) return ONLY_4X4;
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400137#if CONFIG_VAR_TX_NO_TX_MODE
138 (void)rb;
139 return TX_MODE_SELECT;
140#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800141#if CONFIG_TX64X64
142 tx_mode = aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800143 if (tx_mode == ALLOW_32X32) tx_mode += aom_rb_read_bit(rb);
144 return tx_mode;
145#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700146 return aom_rb_read_bit(rb) ? TX_MODE_SELECT : aom_rb_read_literal(rb, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -0800147#endif // CONFIG_TX64X64
Nathan E. Eggea33304f2017-06-28 20:48:34 -0400148#endif // CONFIG_VAR_TX_NO_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149}
150
Thomas Davies2e868ab2017-10-24 10:42:27 +0100151#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700152static void read_inter_mode_probs(FRAME_CONTEXT *fc, aom_reader *r) {
Yaowu Xu8af861b2016-11-01 12:12:11 -0700153 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700154 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700155 av1_diff_update_prob(r, &fc->newmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700157 av1_diff_update_prob(r, &fc->zeromv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700159 av1_diff_update_prob(r, &fc->refmv_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700161 av1_diff_update_prob(r, &fc->drl_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162}
Thomas Davies149eda52017-06-12 18:11:55 +0100163#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700164
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165static REFERENCE_MODE read_frame_reference_mode(
Yaowu Xuf883b422016-08-30 14:01:10 -0700166 const AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700167 if (is_compound_reference_allowed(cm)) {
Zoe Liub05e5d12017-02-07 14:32:53 -0800168#if CONFIG_REF_ADAPT
169 return aom_rb_read_bit(rb) ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
170#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 return aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172 ? REFERENCE_MODE_SELECT
Yaowu Xuf883b422016-08-30 14:01:10 -0700173 : (aom_rb_read_bit(rb) ? COMPOUND_REFERENCE : SINGLE_REFERENCE);
Zoe Liub05e5d12017-02-07 14:32:53 -0800174#endif // CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175 } else {
176 return SINGLE_REFERENCE;
177 }
178}
179
Thomas Davies2e868ab2017-10-24 10:42:27 +0100180#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -0700181static void read_frame_reference_mode_probs(AV1_COMMON *cm, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +0100183 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700184
185 if (cm->reference_mode == REFERENCE_MODE_SELECT)
186 for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700187 av1_diff_update_prob(r, &fc->comp_inter_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188
189 if (cm->reference_mode != COMPOUND_REFERENCE) {
190 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100191 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192 for (j = 0; j < (SINGLE_REFS - 1); ++j) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700193 av1_diff_update_prob(r, &fc->single_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700194 }
195 }
196 }
197
198 if (cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700199#if CONFIG_EXT_COMP_REFS
200 for (i = 0; i < COMP_REF_TYPE_CONTEXTS; ++i)
201 av1_diff_update_prob(r, &fc->comp_ref_type_prob[i], ACCT_STR);
202
Thomas Davies894cc812017-06-22 17:51:33 +0100203 for (i = 0; i < UNI_COMP_REF_CONTEXTS; ++i) {
204 int j;
Zoe Liuc082bbc2017-05-17 13:31:37 -0700205 for (j = 0; j < (UNIDIR_COMP_REFS - 1); ++j)
206 av1_diff_update_prob(r, &fc->uni_comp_ref_prob[i][j], ACCT_STR);
Thomas Davies894cc812017-06-22 17:51:33 +0100207 }
Zoe Liuc082bbc2017-05-17 13:31:37 -0700208#endif // CONFIG_EXT_COMP_REFS
209
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210 for (i = 0; i < REF_CONTEXTS; ++i) {
Thomas Davies894cc812017-06-22 17:51:33 +0100211 int j;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 for (j = 0; j < (FWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700213 av1_diff_update_prob(r, &fc->comp_ref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 for (j = 0; j < (BWD_REFS - 1); ++j)
Michael Bebenita6048d052016-08-25 14:40:54 -0700215 av1_diff_update_prob(r, &fc->comp_bwdref_prob[i][j], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216 }
217 }
218}
219
Yaowu Xuf883b422016-08-30 14:01:10 -0700220static void update_mv_probs(aom_prob *p, int n, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221 int i;
Michael Bebenita6048d052016-08-25 14:40:54 -0700222 for (i = 0; i < n; ++i) av1_diff_update_prob(r, &p[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700223}
224
Yaowu Xuf883b422016-08-30 14:01:10 -0700225static void read_mv_probs(nmv_context *ctx, int allow_hp, aom_reader *r) {
Thomas9ac55082016-09-23 18:04:17 +0100226 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227 if (allow_hp) {
228 for (i = 0; i < 2; ++i) {
229 nmv_component *const comp_ctx = &ctx->comps[i];
230 update_mv_probs(&comp_ctx->class0_hp, 1, r);
231 update_mv_probs(&comp_ctx->hp, 1, r);
232 }
233 }
234}
Thomas Davies599395e2017-07-21 18:02:48 +0100235#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236
237static void inverse_transform_block(MACROBLOCKD *xd, int plane,
Lester Lu432012f2017-08-17 14:39:29 -0700238#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700239 PREDICTION_MODE mode,
240#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241 const TX_TYPE tx_type,
242 const TX_SIZE tx_size, uint8_t *dst,
Jingning Han1be18782016-10-21 11:48:15 -0700243 int stride, int16_t scan_line, int eob) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700244 struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han1be18782016-10-21 11:48:15 -0700245 tran_low_t *const dqcoeff = pd->dqcoeff;
Lester Lu708c1ec2017-06-14 14:54:49 -0700246 av1_inverse_transform_block(xd, dqcoeff,
Lester Lu432012f2017-08-17 14:39:29 -0700247#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700248 mode,
249#endif
Sarah Parker99e7daa2017-08-29 10:30:13 -0700250#if CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
251 xd->mrc_mask,
252#endif // CONFIG_MRC_TX && SIGNAL_ANY_MRC_MASK
Lester Lu708c1ec2017-06-14 14:54:49 -0700253 tx_type, tx_size, dst, stride, eob);
Jingning Han1be18782016-10-21 11:48:15 -0700254 memset(dqcoeff, 0, (scan_line + 1) * sizeof(dqcoeff[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255}
256
Lester Lu9c521922017-07-11 11:16:45 -0700257static int get_block_idx(const MACROBLOCKD *xd, int plane, int row, int col) {
258 const int bsize = xd->mi[0]->mbmi.sb_type;
259 const struct macroblockd_plane *pd = &xd->plane[plane];
260#if CONFIG_CHROMA_SUB8X8
261 const BLOCK_SIZE plane_bsize =
262 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Lester Lu9c521922017-07-11 11:16:45 -0700263#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -0700264 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
265#endif // CONFIG_CHROMA_SUB8X8
Lester Lu9c521922017-07-11 11:16:45 -0700266 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
hui su0c6244b2017-07-12 17:11:43 -0700267 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Lester Lu9c521922017-07-11 11:16:45 -0700268 const uint8_t txh_unit = tx_size_high_unit[tx_size];
269 return row * max_blocks_wide + col * txh_unit;
270}
271
Alex Converse8aca36d2017-01-31 12:33:15 -0800272static void predict_and_reconstruct_intra_block(
273 AV1_COMMON *cm, MACROBLOCKD *const xd, aom_reader *const r,
274 MB_MODE_INFO *const mbmi, int plane, int row, int col, TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500275 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700276 const int block_idx = get_block_idx(xd, plane, row, col);
David Barker761b1ac2017-09-25 11:23:03 +0100277 av1_predict_intra_block_facade(cm, xd, plane, block_idx, col, row, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700278
279 if (!mbmi->skip) {
Angie Chiang3d005e42017-04-02 16:31:35 -0700280 struct macroblockd_plane *const pd = &xd->plane[plane];
Angie Chiang133733c2017-03-17 12:50:20 -0700281#if CONFIG_LV_MAP
282 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700283 int eob;
284 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700285 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700286 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700287 const TX_TYPE tx_type =
288 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700289#else // CONFIG_LV_MAP
Jingning Han19b5c8f2017-07-06 15:10:12 -0700290 const TX_TYPE tx_type =
291 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700292 const SCAN_ORDER *scan_order = get_scan(cm, tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700293 int16_t max_scan_line = 0;
294 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700295 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
Jingning Han1be18782016-10-21 11:48:15 -0700296 tx_type, &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700297#endif // CONFIG_LV_MAP
Angie Chiang3d005e42017-04-02 16:31:35 -0700298 if (eob) {
299 uint8_t *dst =
300 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Hui Su400bf652017-08-15 15:42:19 -0700301 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700302#if CONFIG_LGT_FROM_PRED
Lester Lu918fe692017-08-17 14:39:29 -0700303 mbmi->mode,
Lester Lu708c1ec2017-06-14 14:54:49 -0700304#endif
Hui Su400bf652017-08-15 15:42:19 -0700305 tx_type, tx_size, dst, pd->dst.stride,
306 max_scan_line, eob);
Angie Chiang3d005e42017-04-02 16:31:35 -0700307 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308 }
Luc Trudeaue3980282017-04-25 23:17:21 -0400309#if CONFIG_CFL
Luc Trudeaue784b3f2017-08-14 15:25:28 -0400310 if (plane == AOM_PLANE_Y && xd->cfl->store_y) {
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400311 cfl_store_tx(xd, row, col, tx_size, mbmi->sb_type);
Luc Trudeaue3980282017-04-25 23:17:21 -0400312 }
Sebastien Alaiwanc4559ca2017-09-27 09:47:30 +0200313#endif // CONFIG_CFL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700314}
315
Sebastien Alaiwanfb838772017-10-24 12:02:54 +0200316#if !CONFIG_COEF_INTERLEAVE
Angie Chiangff6d8902016-10-21 11:02:09 -0700317static void decode_reconstruct_tx(AV1_COMMON *cm, MACROBLOCKD *const xd,
318 aom_reader *r, MB_MODE_INFO *const mbmi,
Jingning Han8fd62b72016-10-21 12:55:54 -0700319 int plane, BLOCK_SIZE plane_bsize,
Jingning Hana65f3052017-06-23 10:52:05 -0700320 int blk_row, int blk_col, int block,
321 TX_SIZE tx_size, int *eob_total) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700322 const struct macroblockd_plane *const pd = &xd->plane[plane];
323 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
324 const int tx_row = blk_row >> (1 - pd->subsampling_y);
325 const int tx_col = blk_col >> (1 - pd->subsampling_x);
326 const TX_SIZE plane_tx_size =
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700327 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700328 : mbmi->inter_tx_size[tx_row][tx_col];
Jingning Han5f614262016-10-27 14:27:43 -0700329 // Scale to match transform block unit.
Jingning Hanf64062f2016-11-02 16:22:18 -0700330 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
331 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700332
333 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
334
335 if (tx_size == plane_tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500336 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang133733c2017-03-17 12:50:20 -0700337#if CONFIG_LV_MAP
Angie Chiang133733c2017-03-17 12:50:20 -0700338 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700339 int eob;
Jingning Hana65f3052017-06-23 10:52:05 -0700340 av1_read_coeffs_txb_facade(cm, xd, r, blk_row, blk_col, block, plane,
Jingning Hanaba246d2017-06-14 12:00:16 -0700341 pd->dqcoeff, tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700342 // tx_type will be read out in av1_read_coeffs_txb_facade
hui su45b64752017-07-12 16:54:35 -0700343 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700344 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700345#else // CONFIG_LV_MAP
hui su45b64752017-07-12 16:54:35 -0700346 const TX_TYPE tx_type =
Jingning Han19b5c8f2017-07-06 15:10:12 -0700347 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, plane_tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700348 const SCAN_ORDER *sc = get_scan(cm, plane_tx_size, tx_type, mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700349 int16_t max_scan_line = 0;
Angie Chiang5c0568a2017-03-21 16:00:39 -0700350 const int eob = av1_decode_block_tokens(
351 cm, xd, plane, sc, blk_col, blk_row, plane_tx_size, tx_type,
352 &max_scan_line, r, mbmi->segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700353#endif // CONFIG_LV_MAP
Lester Lu708c1ec2017-06-14 14:54:49 -0700354 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700355#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700356 mbmi->mode,
357#endif
358 tx_type, plane_tx_size,
Jingning Han9ca05b72017-01-03 14:41:36 -0800359 &pd->dst.buf[(blk_row * pd->dst.stride + blk_col)
360 << tx_size_wide_log2[0]],
361 pd->dst.stride, max_scan_line, eob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700362 *eob_total += eob;
363 } else {
Yue Chend6bdd462017-07-19 16:05:43 -0700364#if CONFIG_RECT_TX_EXT
365 int is_qttx = plane_tx_size == quarter_txsize_lookup[plane_bsize];
366 const TX_SIZE sub_txs = is_qttx ? plane_tx_size : sub_tx_size_map[tx_size];
367 if (is_qttx) assert(blk_row == 0 && blk_col == 0 && block == 0);
368#else
Jingning Hanf64062f2016-11-02 16:22:18 -0700369 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Urvang Joshidff57e02017-09-29 11:15:48 -0700370 assert(IMPLIES(tx_size <= TX_4X4, sub_txs == tx_size));
371 assert(IMPLIES(tx_size > TX_4X4, sub_txs < tx_size));
Yue Chend6bdd462017-07-19 16:05:43 -0700372#endif
Jingning Hanf64062f2016-11-02 16:22:18 -0700373 const int bsl = tx_size_wide_unit[sub_txs];
Jingning Hana65f3052017-06-23 10:52:05 -0700374 int sub_step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700375 int i;
376
377 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700378
379 for (i = 0; i < 4; ++i) {
Yue Chend6bdd462017-07-19 16:05:43 -0700380#if CONFIG_RECT_TX_EXT
381 int is_wide_tx = tx_size_wide_unit[sub_txs] > tx_size_high_unit[sub_txs];
382 const int offsetr =
383 is_qttx ? (is_wide_tx ? i * tx_size_high_unit[sub_txs] : 0)
384 : blk_row + ((i >> 1) * bsl);
385 const int offsetc =
386 is_qttx ? (is_wide_tx ? 0 : i * tx_size_wide_unit[sub_txs])
387 : blk_col + (i & 0x01) * bsl;
388#else
Jingning Han5f614262016-10-27 14:27:43 -0700389 const int offsetr = blk_row + (i >> 1) * bsl;
390 const int offsetc = blk_col + (i & 0x01) * bsl;
Yue Chend6bdd462017-07-19 16:05:43 -0700391#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700392
393 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
394
Jingning Han8fd62b72016-10-21 12:55:54 -0700395 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize, offsetr,
Jingning Hana65f3052017-06-23 10:52:05 -0700396 offsetc, block, sub_txs, eob_total);
397 block += sub_step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700398 }
399 }
400}
Sebastien Alaiwanfb838772017-10-24 12:02:54 +0200401#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700402
Sebastien Alaiwanfb838772017-10-24 12:02:54 +0200403#if CONFIG_COEF_INTERLEAVE || (!CONFIG_EXT_TX && CONFIG_RECT_TX)
Angie Chiangff6d8902016-10-21 11:02:09 -0700404static int reconstruct_inter_block(AV1_COMMON *cm, MACROBLOCKD *const xd,
Alex Converse8aca36d2017-01-31 12:33:15 -0800405 aom_reader *const r, int segment_id,
406 int plane, int row, int col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700407 TX_SIZE tx_size) {
Luc Trudeau005feb62017-02-22 13:34:01 -0500408 PLANE_TYPE plane_type = get_plane_type(plane);
Angie Chiang752ccce2017-04-09 13:41:13 -0700409 int block_idx = get_block_idx(xd, plane, row, col);
Yaowu Xud6ea71c2016-11-07 10:24:14 -0800410 struct macroblockd_plane *const pd = &xd->plane[plane];
Yushin Cho77bba8d2016-11-04 16:36:56 -0700411
Angie Chiang133733c2017-03-17 12:50:20 -0700412#if CONFIG_LV_MAP
413 (void)segment_id;
Jingning Han1be18782016-10-21 11:48:15 -0700414 int16_t max_scan_line = 0;
Angie Chiang29b0fad2017-03-20 16:18:45 -0700415 int eob;
416 av1_read_coeffs_txb_facade(cm, xd, r, row, col, block_idx, plane, pd->dqcoeff,
Angie Chiang0eac3192017-06-19 09:57:30 -0700417 tx_size, &max_scan_line, &eob);
Angie Chiangb6d770c2017-04-14 16:27:57 -0700418 // tx_type will be read out in av1_read_coeffs_txb_facade
Jingning Han19b5c8f2017-07-06 15:10:12 -0700419 const TX_TYPE tx_type =
420 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiang133733c2017-03-17 12:50:20 -0700421#else // CONFIG_LV_MAP
422 int16_t max_scan_line = 0;
Jingning Han19b5c8f2017-07-06 15:10:12 -0700423 const TX_TYPE tx_type =
424 av1_get_tx_type(plane_type, xd, row, col, block_idx, tx_size);
Angie Chiangbd99b382017-06-20 15:11:16 -0700425 const SCAN_ORDER *scan_order =
426 get_scan(cm, tx_size, tx_type, &xd->mi[0]->mbmi);
Jingning Han1be18782016-10-21 11:48:15 -0700427 const int eob =
Angie Chiang5c0568a2017-03-21 16:00:39 -0700428 av1_decode_block_tokens(cm, xd, plane, scan_order, col, row, tx_size,
429 tx_type, &max_scan_line, r, segment_id);
Angie Chiang133733c2017-03-17 12:50:20 -0700430#endif // CONFIG_LV_MAP
Jingning Hanca14dda2016-12-09 09:36:00 -0800431 uint8_t *dst =
432 &pd->dst.buf[(row * pd->dst.stride + col) << tx_size_wide_log2[0]];
Jingning Han1be18782016-10-21 11:48:15 -0700433 if (eob)
Lester Lu708c1ec2017-06-14 14:54:49 -0700434 inverse_transform_block(xd, plane,
Lester Lu432012f2017-08-17 14:39:29 -0700435#if CONFIG_LGT_FROM_PRED
Lester Lu708c1ec2017-06-14 14:54:49 -0700436 xd->mi[0]->mbmi.mode,
437#endif
438 tx_type, tx_size, dst, pd->dst.stride,
Jingning Hanca14dda2016-12-09 09:36:00 -0800439 max_scan_line, eob);
Yushin Chod0b77ac2017-10-20 17:33:16 -0700440
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441 return eob;
442}
Sebastien Alaiwanfb838772017-10-24 12:02:54 +0200443#endif // CONFIG_SUPER_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700444
Angie Chiang44701f22017-02-27 10:36:44 -0800445static void set_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
446 BLOCK_SIZE bsize, int mi_row, int mi_col, int bw,
447 int bh, int x_mis, int y_mis) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700448 const int offset = mi_row * cm->mi_stride + mi_col;
449 int x, y;
450 const TileInfo *const tile = &xd->tile;
451
452 xd->mi = cm->mi_grid_visible + offset;
453 xd->mi[0] = &cm->mi[offset];
454 // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of
455 // passing bsize from decode_partition().
456 xd->mi[0]->mbmi.sb_type = bsize;
Angie Chiang394c3372016-11-03 11:13:15 -0700457#if CONFIG_RD_DEBUG
458 xd->mi[0]->mbmi.mi_row = mi_row;
459 xd->mi[0]->mbmi.mi_col = mi_col;
460#endif
Luc Trudeau780d2492017-06-15 22:26:41 -0400461#if CONFIG_CFL
462 xd->cfl->mi_row = mi_row;
463 xd->cfl->mi_col = mi_col;
464#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700465 for (y = 0; y < y_mis; ++y)
Jingning Han97d85482016-07-15 11:06:05 -0700466 for (x = !y; x < x_mis; ++x) xd->mi[y * cm->mi_stride + x] = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467
Jingning Hanfaad0e12016-12-07 10:54:57 -0800468 set_plane_n4(xd, bw, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700469 set_skip_context(xd, mi_row, mi_col);
470
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700471 // Distance of Mb to the various image edges. These are specified to 8th pel
472 // as they are always compared to values that are in 1/8th pel units
473 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800474#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700475 cm->dependent_horz_tiles,
476#endif // CONFIG_DEPENDENT_HORZTILES
477 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478
Jingning Han91d9a792017-04-18 12:01:52 -0700479 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
480 mi_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700481}
482
Yue Chen64550b62017-01-12 12:18:22 -0800483static void decode_mbmi_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -0800484 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485#if CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800486 PARTITION_TYPE partition,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen64550b62017-01-12 12:18:22 -0800488 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700489 AV1_COMMON *const cm = &pbi->common;
Jingning Han85dc03f2016-12-06 16:03:10 -0800490 const int bw = mi_size_wide[bsize];
491 const int bh = mi_size_high[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700492 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
493 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Nathan E. Eggeebbd4792016-10-05 19:30:15 -0400494
Michael Bebenita6048d052016-08-25 14:40:54 -0700495#if CONFIG_ACCOUNTING
496 aom_accounting_set_context(&pbi->accounting, mi_col, mi_row);
497#endif
Yue Chen64550b62017-01-12 12:18:22 -0800498 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499#if CONFIG_EXT_PARTITION_TYPES
500 xd->mi[0]->mbmi.partition = partition;
501#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700502 av1_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700503 if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) {
504 const BLOCK_SIZE uv_subsize =
505 ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y];
506 if (uv_subsize == BLOCK_INVALID)
Yaowu Xuf883b422016-08-30 14:01:10 -0700507 aom_internal_error(xd->error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700508 "Invalid block size.");
509 }
510
Angie Chiangd0916d92017-03-10 17:54:18 -0800511 int reader_corrupted_flag = aom_reader_has_error(r);
512 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yue Chen64550b62017-01-12 12:18:22 -0800513}
514
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700515#if CONFIG_NCOBMC_ADAPT_WEIGHT
516static void set_mode_info_offsets(AV1_COMMON *const cm, MACROBLOCKD *const xd,
517 int mi_row, int mi_col) {
518 const int offset = mi_row * cm->mi_stride + mi_col;
519 xd->mi = cm->mi_grid_visible + offset;
520 xd->mi[0] = &cm->mi[offset];
521}
522
523static void get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *xd, int mi_row,
524 int mi_col, int bsize, int mode) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700525 uint8_t *pred_buf[4][MAX_MB_PLANE];
526 int pred_stride[MAX_MB_PLANE] = { MAX_SB_SIZE, MAX_SB_SIZE, MAX_SB_SIZE };
527 // target block in pxl
528 int pxl_row = mi_row << MI_SIZE_LOG2;
529 int pxl_col = mi_col << MI_SIZE_LOG2;
530
531 int plane;
532#if CONFIG_HIGHBITDEPTH
533 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
534 int len = sizeof(uint16_t);
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700535 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE,
536 len);
537 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE,
538 len);
539 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE,
540 len);
541 ASSIGN_ALIGNED_PTRS_HBD(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE,
542 len);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700543 } else {
544#endif // CONFIG_HIGHBITDEPTH
Debargha Mukherjee5d108a32017-10-05 19:47:08 -0700545 ASSIGN_ALIGNED_PTRS(pred_buf[0], cm->ncobmcaw_buf[0], MAX_SB_SQUARE);
546 ASSIGN_ALIGNED_PTRS(pred_buf[1], cm->ncobmcaw_buf[1], MAX_SB_SQUARE);
547 ASSIGN_ALIGNED_PTRS(pred_buf[2], cm->ncobmcaw_buf[2], MAX_SB_SQUARE);
548 ASSIGN_ALIGNED_PTRS(pred_buf[3], cm->ncobmcaw_buf[3], MAX_SB_SQUARE);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700549#if CONFIG_HIGHBITDEPTH
550 }
551#endif
552 av1_get_ext_blk_preds(cm, xd, bsize, mi_row, mi_col, pred_buf, pred_stride);
553 av1_get_ori_blk_pred(cm, xd, bsize, mi_row, mi_col, pred_buf[3], pred_stride);
554 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
555 build_ncobmc_intrpl_pred(cm, xd, plane, pxl_row, pxl_col, bsize, pred_buf,
556 pred_stride, mode);
557 }
558}
559
560static void av1_get_ncobmc_recon(AV1_COMMON *const cm, MACROBLOCKD *const xd,
561 int bsize, const int mi_row, const int mi_col,
562 const NCOBMC_MODE modes) {
563 const int mi_width = mi_size_wide[bsize];
564 const int mi_height = mi_size_high[bsize];
565
566 assert(bsize >= BLOCK_8X8);
567
568 reset_xd_boundary(xd, mi_row, mi_height, mi_col, mi_width, cm->mi_rows,
569 cm->mi_cols);
570 get_ncobmc_recon(cm, xd, mi_row, mi_col, bsize, modes);
571}
572
573static void recon_ncobmc_intrpl_pred(AV1_COMMON *const cm,
574 MACROBLOCKD *const xd, int mi_row,
575 int mi_col, BLOCK_SIZE bsize) {
576 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
577 const int mi_width = mi_size_wide[bsize];
578 const int mi_height = mi_size_high[bsize];
579 const int hbs = AOMMAX(mi_size_wide[bsize] / 2, mi_size_high[bsize] / 2);
580 const BLOCK_SIZE sqr_blk = bsize_2_sqr_bsize[bsize];
581 if (mi_width > mi_height) {
582 // horizontal partition
583 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
584 xd->mi += hbs;
585 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col + hbs,
586 mbmi->ncobmc_mode[1]);
587 } else if (mi_height > mi_width) {
588 // vertical partition
589 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
590 xd->mi += hbs * xd->mi_stride;
591 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row + hbs, mi_col,
592 mbmi->ncobmc_mode[1]);
593 } else {
594 av1_get_ncobmc_recon(cm, xd, sqr_blk, mi_row, mi_col, mbmi->ncobmc_mode[0]);
595 }
596 set_mode_info_offsets(cm, xd, mi_row, mi_col);
597 // restore dst buffer and mode info
598 av1_setup_dst_planes(xd->plane, bsize, get_frame_new_buffer(cm), mi_row,
599 mi_col);
600}
601#endif // CONFIG_NCOBMC_ADAPT_WEIGHT
602
Yue Chen64550b62017-01-12 12:18:22 -0800603static void decode_token_and_recon_block(AV1Decoder *const pbi,
604 MACROBLOCKD *const xd, int mi_row,
605 int mi_col, aom_reader *r,
606 BLOCK_SIZE bsize) {
607 AV1_COMMON *const cm = &pbi->common;
608 const int bw = mi_size_wide[bsize];
609 const int bh = mi_size_high[bsize];
610 const int x_mis = AOMMIN(bw, cm->mi_cols - mi_col);
611 const int y_mis = AOMMIN(bh, cm->mi_rows - mi_row);
Yue Chen64550b62017-01-12 12:18:22 -0800612
Angie Chiang44701f22017-02-27 10:36:44 -0800613 set_offsets(cm, xd, bsize, mi_row, mi_col, bw, bh, x_mis, y_mis);
614 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400615#if CONFIG_CFL && CONFIG_CHROMA_SUB8X8
616 CFL_CTX *const cfl = xd->cfl;
617 cfl->is_chroma_reference = is_chroma_reference(
618 mi_row, mi_col, bsize, cfl->subsampling_x, cfl->subsampling_y);
619#endif // CONFIG_CFL && CONFIG_CHROMA_SUB8X8
Yue Chen19e7aa82016-11-30 14:05:39 -0800620
Arild Fuldseth07441162016-08-15 15:07:52 +0200621 if (cm->delta_q_present_flag) {
622 int i;
623 for (i = 0; i < MAX_SEGMENTS; i++) {
Fangwen Fu6160df22017-04-24 09:45:51 -0700624#if CONFIG_EXT_DELTA_Q
Cheng Chen49d30e62017-08-28 20:59:27 -0700625 const int current_qindex =
626 av1_get_qindex(&cm->seg, i, xd->current_qindex);
Fangwen Fu6160df22017-04-24 09:45:51 -0700627#else
Cheng Chen49d30e62017-08-28 20:59:27 -0700628 const int current_qindex = xd->current_qindex;
629#endif // CONFIG_EXT_DELTA_Q
630 int j;
631 for (j = 0; j < MAX_MB_PLANE; ++j) {
632 const int dc_delta_q = j == 0 ? cm->y_dc_delta_q : cm->uv_dc_delta_q;
633 const int ac_delta_q = j == 0 ? 0 : cm->uv_ac_delta_q;
634
635 xd->plane[j].seg_dequant[i][0] =
636 av1_dc_quant(current_qindex, dc_delta_q, cm->bit_depth);
637 xd->plane[j].seg_dequant[i][1] =
638 av1_ac_quant(current_qindex, ac_delta_q, cm->bit_depth);
639 }
Arild Fuldseth07441162016-08-15 15:07:52 +0200640 }
641 }
Timothy B. Terriberrya2d5cde2017-05-10 18:33:50 -0700642 if (mbmi->skip) av1_reset_skip_context(xd, mi_row, mi_col, bsize);
Jingning Hand39cc722016-12-02 14:03:26 -0800643
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800644#if CONFIG_COEF_INTERLEAVE
645 {
646 const struct macroblockd_plane *const pd_y = &xd->plane[0];
647 const struct macroblockd_plane *const pd_c = &xd->plane[1];
648 const TX_SIZE tx_log2_y = mbmi->tx_size;
hui su0c6244b2017-07-12 17:11:43 -0700649 const TX_SIZE tx_log2_c = av1_get_uv_tx_size(mbmi, pd_c);
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800650 const int tx_sz_y = (1 << tx_log2_y);
651 const int tx_sz_c = (1 << tx_log2_c);
652 const int num_4x4_w_y = pd_y->n4_w;
653 const int num_4x4_h_y = pd_y->n4_h;
654 const int num_4x4_w_c = pd_c->n4_w;
655 const int num_4x4_h_c = pd_c->n4_h;
656 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
657 pd_y->subsampling_x);
658 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
659 pd_y->subsampling_y);
660 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
661 pd_c->subsampling_x);
662 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
663 pd_c->subsampling_y);
664
665 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
666 // i.e. when the SB is splitted by tile boundaries.
667 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
668 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
669 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
670 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800671 const int tu_num_c = tu_num_w_c * tu_num_h_c;
672
673 if (!is_inter_block(mbmi)) {
674 int tu_idx_c = 0;
675 int row_y, col_y, row_c, col_c;
676 int plane;
677
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800678 for (plane = 0; plane <= 1; ++plane) {
679 if (mbmi->palette_mode_info.palette_size[plane])
680 av1_decode_palette_tokens(xd, plane, r);
681 }
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800682
683 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
684 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
685 // luma
686 predict_and_reconstruct_intra_block(
687 cm, xd, r, mbmi, 0, row_y * tx_sz_y, col_y * tx_sz_y, tx_log2_y);
688 // chroma
689 if (tu_idx_c < tu_num_c) {
690 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
691 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
692 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c,
693 col_c, tx_log2_c);
694 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c,
695 col_c, tx_log2_c);
696 tu_idx_c++;
697 }
698 }
699 }
700
701 // In 422 case, it's possilbe that Chroma has more TUs than Luma
702 while (tu_idx_c < tu_num_c) {
703 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
704 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
705 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 1, row_c, col_c,
706 tx_log2_c);
707 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, 2, row_c, col_c,
708 tx_log2_c);
709 tu_idx_c++;
710 }
711 } else {
712 // Prediction
Jingning Hanc44009c2017-05-06 11:36:49 -0700713 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL,
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800714 AOMMAX(bsize, BLOCK_8X8));
715
716 // Reconstruction
717 if (!mbmi->skip) {
718 int eobtotal = 0;
719 int tu_idx_c = 0;
720 int row_y, col_y, row_c, col_c;
721
722 for (row_y = 0; row_y < tu_num_h_y; row_y++) {
723 for (col_y = 0; col_y < tu_num_w_y; col_y++) {
724 // luma
725 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 0,
726 row_y * tx_sz_y,
727 col_y * tx_sz_y, tx_log2_y);
728 // chroma
729 if (tu_idx_c < tu_num_c) {
730 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
731 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
732 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
733 1, row_c, col_c, tx_log2_c);
734 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id,
735 2, row_c, col_c, tx_log2_c);
736 tu_idx_c++;
737 }
738 }
739 }
740
741 // In 422 case, it's possilbe that Chroma has more TUs than Luma
742 while (tu_idx_c < tu_num_c) {
743 row_c = (tu_idx_c / tu_num_w_c) * tx_sz_c;
744 col_c = (tu_idx_c % tu_num_w_c) * tx_sz_c;
745 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 1,
746 row_c, col_c, tx_log2_c);
747 eobtotal += reconstruct_inter_block(cm, xd, r, mbmi->segment_id, 2,
748 row_c, col_c, tx_log2_c);
749 tu_idx_c++;
750 }
751
Alex Converse64d7ef62017-03-22 18:09:16 -0700752 // TODO(CONFIG_COEF_INTERLEAVE owners): bring eob == 0 corner case
753 // into line with the defaut configuration
754 if (bsize >= BLOCK_8X8 && eobtotal == 0) mbmi->skip = 1;
iole moccagattaf25a4cf2016-11-11 23:57:57 -0800755 }
756 }
757 }
Angie Chiang133733c2017-03-17 12:50:20 -0700758#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759 if (!is_inter_block(mbmi)) {
760 int plane;
Yushin Choa8810392017-09-06 15:16:14 -0700761
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 for (plane = 0; plane <= 1; ++plane) {
763 if (mbmi->palette_mode_info.palette_size[plane])
Yaowu Xuf883b422016-08-30 14:01:10 -0700764 av1_decode_palette_tokens(xd, plane, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700765 }
Yushin Choa8810392017-09-06 15:16:14 -0700766
Yaowu Xuc27fc142016-08-22 16:08:15 -0700767 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
768 const struct macroblockd_plane *const pd = &xd->plane[plane];
hui su0c6244b2017-07-12 17:11:43 -0700769 const TX_SIZE tx_size = av1_get_tx_size(plane, xd);
Jingning Han2d64f122016-10-21 12:44:29 -0700770 const int stepr = tx_size_high_unit[tx_size];
771 const int stepc = tx_size_wide_unit[tx_size];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -0700772#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800773 const BLOCK_SIZE plane_bsize =
774 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Han41bb3392016-12-14 10:46:48 -0800775#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -0700776 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
777#endif // CONFIG_CHROMA_SUB8X8
Yaowu Xuc27fc142016-08-22 16:08:15 -0700778 int row, col;
Jingning Hanbafee8d2016-12-02 10:25:03 -0800779 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
780 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Jingning Hand3a64432017-04-06 17:04:17 -0700781 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
782 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800783 continue;
Jingning Han5b701742017-07-19 14:39:07 -0700784 int blk_row, blk_col;
785 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
786 int mu_blocks_wide =
787 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
788 int mu_blocks_high =
789 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
790 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
791 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800792
Jingning Han5b701742017-07-19 14:39:07 -0700793 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
Luc Trudeauda9397a2017-07-21 12:00:22 -0400794 const int unit_height = AOMMIN(mu_blocks_high + row, max_blocks_high);
Jingning Han5b701742017-07-19 14:39:07 -0700795 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
Jingning Han5b701742017-07-19 14:39:07 -0700796 const int unit_width = AOMMIN(mu_blocks_wide + col, max_blocks_wide);
797
798 for (blk_row = row; blk_row < unit_height; blk_row += stepr)
799 for (blk_col = col; blk_col < unit_width; blk_col += stepc)
800 predict_and_reconstruct_intra_block(cm, xd, r, mbmi, plane,
801 blk_row, blk_col, tx_size);
802 }
803 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 }
805 } else {
Yue Chen9ab6d712017-01-12 15:50:46 -0800806 int ref;
807
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200808#if CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700809 for (ref = 0; ref < 1 + is_inter_anyref_comp_mode(mbmi->mode); ++ref)
810#else
811 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200812#endif // CONFIG_COMPOUND_SINGLEREF
Yushin Cho127c5832017-07-28 16:39:04 -0700813 {
Zoe Liu85b66462017-04-20 14:28:19 -0700814 const MV_REFERENCE_FRAME frame =
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200815#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -0700816 has_second_ref(mbmi) ? mbmi->ref_frame[ref] : mbmi->ref_frame[0];
817#else
Yushin Cho127c5832017-07-28 16:39:04 -0700818 mbmi->ref_frame[ref];
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +0200819#endif // CONFIG_COMPOUND_SINGLEREF
Alex Converse28744302017-04-13 14:46:22 -0700820 if (frame < LAST_FRAME) {
821#if CONFIG_INTRABC
822 assert(is_intrabc_block(mbmi));
823 assert(frame == INTRA_FRAME);
824 assert(ref == 0);
825#else
826 assert(0);
827#endif // CONFIG_INTRABC
828 } else {
829 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
Yue Chen9ab6d712017-01-12 15:50:46 -0800830
Alex Converse28744302017-04-13 14:46:22 -0700831 xd->block_refs[ref] = ref_buf;
832 if ((!av1_is_valid_scale(&ref_buf->sf)))
833 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
834 "Reference frame has invalid dimensions");
835 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col,
836 &ref_buf->sf);
837 }
Yue Chen9ab6d712017-01-12 15:50:46 -0800838 }
Yue Chen69f18e12016-09-08 14:48:15 -0700839
Jingning Hanc44009c2017-05-06 11:36:49 -0700840 av1_build_inter_predictors_sb(cm, xd, mi_row, mi_col, NULL, bsize);
Sarah Parker4c10a3c2017-04-10 19:37:59 -0700841
Yue Chencb60b182016-10-13 15:18:22 -0700842#if CONFIG_MOTION_VAR
843 if (mbmi->motion_mode == OBMC_CAUSAL) {
Yue Chenf27b1602017-01-13 11:11:43 -0800844#if CONFIG_NCOBMC
845 av1_build_ncobmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
846#else
Yue Chen894fcce2016-10-21 16:50:52 -0700847 av1_build_obmc_inter_predictors_sb(cm, xd, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -0800848#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 }
Yue Chencb60b182016-10-13 15:18:22 -0700850#endif // CONFIG_MOTION_VAR
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -0700851#if CONFIG_NCOBMC_ADAPT_WEIGHT
852 if (mbmi->motion_mode == NCOBMC_ADAPT_WEIGHT) {
853 int plane;
854 recon_ncobmc_intrpl_pred(cm, xd, mi_row, mi_col, bsize);
855 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
856 get_pred_from_intrpl_buf(xd, mi_row, mi_col, bsize, plane);
857 }
858 }
859#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860 // Reconstruction
861 if (!mbmi->skip) {
862 int eobtotal = 0;
863 int plane;
864
865 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
866 const struct macroblockd_plane *const pd = &xd->plane[plane];
Timothy B. Terriberry81ec2612017-04-26 16:53:47 -0700867#if CONFIG_CHROMA_SUB8X8
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800868 const BLOCK_SIZE plane_bsize =
869 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
Jingning Han41bb3392016-12-14 10:46:48 -0800870#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -0700871 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
872#endif // CONFIG_CHROMA_SUB8X8
Jingning Hanbafee8d2016-12-02 10:25:03 -0800873 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
874 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700875 int row, col;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800876
Jingning Hand3a64432017-04-06 17:04:17 -0700877 if (!is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
878 pd->subsampling_y))
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800879 continue;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800880
Jingning Hanc2b797f2017-07-19 09:37:11 -0700881 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
882 int mu_blocks_wide =
883 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
884 int mu_blocks_high =
885 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
886
887 mu_blocks_wide = AOMMIN(max_blocks_wide, mu_blocks_wide);
888 mu_blocks_high = AOMMIN(max_blocks_high, mu_blocks_high);
889
Rupert Swarbrick4e7b7d62017-09-28 17:30:44 +0100890 const TX_SIZE max_tx_size = get_vartx_max_txsize(
891 mbmi, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hanf64062f2016-11-02 16:22:18 -0700892 const int bh_var_tx = tx_size_high_unit[max_tx_size];
893 const int bw_var_tx = tx_size_wide_unit[max_tx_size];
Jingning Hana65f3052017-06-23 10:52:05 -0700894 int block = 0;
895 int step =
896 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
Jingning Hanc2b797f2017-07-19 09:37:11 -0700897
898 for (row = 0; row < max_blocks_high; row += mu_blocks_high) {
899 for (col = 0; col < max_blocks_wide; col += mu_blocks_wide) {
900 int blk_row, blk_col;
901 const int unit_height =
902 AOMMIN(mu_blocks_high + row, max_blocks_high);
903 const int unit_width =
904 AOMMIN(mu_blocks_wide + col, max_blocks_wide);
905 for (blk_row = row; blk_row < unit_height; blk_row += bh_var_tx) {
906 for (blk_col = col; blk_col < unit_width; blk_col += bw_var_tx) {
907 decode_reconstruct_tx(cm, xd, r, mbmi, plane, plane_bsize,
908 blk_row, blk_col, block, max_tx_size,
909 &eobtotal);
910 block += step;
911 }
912 }
Jingning Hana65f3052017-06-23 10:52:05 -0700913 }
914 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700915 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916 }
917 }
Luc Trudeaub05eeae2017-08-18 15:14:30 -0400918#if CONFIG_CFL && CONFIG_CHROMA_SUB8X8
919 if (mbmi->uv_mode != UV_CFL_PRED) {
920#if CONFIG_DEBUG
921 if (cfl->is_chroma_reference) {
922 cfl_clear_sub8x8_val(cfl);
923 }
924#endif
925 if (!cfl->is_chroma_reference && is_inter_block(mbmi)) {
926 cfl_store_block(xd, mbmi->sb_type, mbmi->tx_size);
927 }
928 }
929#endif // CONFIG_CFL && CONFIG_CHROMA_SUB8X8
Angie Chiang133733c2017-03-17 12:50:20 -0700930#endif // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700931
Angie Chiangd0916d92017-03-10 17:54:18 -0800932 int reader_corrupted_flag = aom_reader_has_error(r);
933 aom_merge_corrupted_flag(&xd->corrupted, reader_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700934}
935
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -0700936#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -0800937static void detoken_and_recon_sb(AV1Decoder *const pbi, MACROBLOCKD *const xd,
938 int mi_row, int mi_col, aom_reader *r,
939 BLOCK_SIZE bsize) {
940 AV1_COMMON *const cm = &pbi->common;
941 const int hbs = mi_size_wide[bsize] >> 1;
Yue Chen9ab6d712017-01-12 15:50:46 -0800942#if CONFIG_EXT_PARTITION_TYPES
943 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
944#endif
945 PARTITION_TYPE partition;
946 BLOCK_SIZE subsize;
947 const int has_rows = (mi_row + hbs) < cm->mi_rows;
948 const int has_cols = (mi_col + hbs) < cm->mi_cols;
949
950 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
951
952 partition = get_partition(cm, mi_row, mi_col, bsize);
953 subsize = subsize_lookup[partition][bsize];
954
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700955 switch (partition) {
956 case PARTITION_NONE:
957 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
958 break;
959 case PARTITION_HORZ:
960 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
961 if (has_rows)
962 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
963 break;
964 case PARTITION_VERT:
965 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
966 if (has_cols)
967 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
968 break;
969 case PARTITION_SPLIT:
970 detoken_and_recon_sb(pbi, xd, mi_row, mi_col, r, subsize);
971 detoken_and_recon_sb(pbi, xd, mi_row, mi_col + hbs, r, subsize);
972 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col, r, subsize);
973 detoken_and_recon_sb(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize);
974 break;
Yue Chen9ab6d712017-01-12 15:50:46 -0800975#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100976#if CONFIG_EXT_PARTITION_TYPES_AB
977#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
978#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -0700979 case PARTITION_HORZ_A:
980 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
981 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
982 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, subsize);
983 break;
984 case PARTITION_HORZ_B:
985 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
986 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
987 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
988 bsize2);
989 break;
990 case PARTITION_VERT_A:
991 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize2);
992 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col, r, bsize2);
993 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, subsize);
994 break;
995 case PARTITION_VERT_B:
996 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, subsize);
997 decode_token_and_recon_block(pbi, xd, mi_row, mi_col + hbs, r, bsize2);
998 decode_token_and_recon_block(pbi, xd, mi_row + hbs, mi_col + hbs, r,
999 bsize2);
1000 break;
Yue Chen9ab6d712017-01-12 15:50:46 -08001001#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001002 default: assert(0 && "Invalid partition type");
Yue Chen9ab6d712017-01-12 15:50:46 -08001003 }
1004}
1005#endif
1006
Yue Chen64550b62017-01-12 12:18:22 -08001007static void decode_block(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yue Chen64550b62017-01-12 12:18:22 -08001008 int mi_row, int mi_col, aom_reader *r,
1009#if CONFIG_EXT_PARTITION_TYPES
1010 PARTITION_TYPE partition,
1011#endif // CONFIG_EXT_PARTITION_TYPES
1012 BLOCK_SIZE bsize) {
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001013 decode_mbmi_block(pbi, xd, mi_row, mi_col, r,
Yue Chen64550b62017-01-12 12:18:22 -08001014#if CONFIG_EXT_PARTITION_TYPES
1015 partition,
1016#endif
1017 bsize);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001018
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07001019#if !(CONFIG_MOTION_VAR && NC_MODE_INFO)
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001020 decode_token_and_recon_block(pbi, xd, mi_row, mi_col, r, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08001021#endif
Yue Chen64550b62017-01-12 12:18:22 -08001022}
1023
Yaowu Xuf883b422016-08-30 14:01:10 -07001024static PARTITION_TYPE read_partition(AV1_COMMON *cm, MACROBLOCKD *xd,
1025 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001026 int has_rows, int has_cols,
Jingning Han1beb0102016-12-07 11:08:30 -08001027 BLOCK_SIZE bsize) {
Alex Converse55c6bde2017-01-12 15:55:31 -08001028#if CONFIG_UNPOISON_PARTITION_CTX
1029 const int ctx =
1030 partition_plane_context(xd, mi_row, mi_col, has_rows, has_cols, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08001031#else
Jingning Han1beb0102016-12-07 11:08:30 -08001032 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Alex Converse55c6bde2017-01-12 15:55:31 -08001033#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001034 PARTITION_TYPE p;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00001035 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1036 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001037
Jingning Han5fe79db2017-03-27 15:10:30 -07001038 aom_cdf_prob *partition_cdf = (ctx >= 0) ? ec_ctx->partition_cdf[ctx] : NULL;
Jingning Han5fe79db2017-03-27 15:10:30 -07001039
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01001040 if (has_rows && has_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001041#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01001042 const int num_partition_types =
1043 (mi_width_log2_lookup[bsize] > mi_width_log2_lookup[BLOCK_8X8])
1044 ? EXT_PARTITION_TYPES
1045 : PARTITION_TYPES;
Alex Converse57795a42017-03-14 12:18:25 -07001046#else
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01001047 const int num_partition_types = PARTITION_TYPES;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001048#endif // CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01001049 p = (PARTITION_TYPE)aom_read_symbol(r, partition_cdf, num_partition_types,
1050 ACCT_STR);
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01001051 } else if (!has_rows && has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001052 assert(bsize > BLOCK_8X8);
1053 aom_cdf_prob cdf[2];
1054 partition_gather_vert_alike(cdf, partition_cdf);
1055 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
1056 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_HORZ;
1057 // gather cols
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01001058 } else if (has_rows && !has_cols) {
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001059 assert(bsize > BLOCK_8X8);
1060 aom_cdf_prob cdf[2];
1061 partition_gather_horz_alike(cdf, partition_cdf);
1062 assert(cdf[1] == AOM_ICDF(CDF_PROB_TOP));
1063 p = aom_read_cdf(r, cdf, 2, ACCT_STR) ? PARTITION_SPLIT : PARTITION_VERT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01001064 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001065 p = PARTITION_SPLIT;
Rupert Swarbrickb95cf122017-07-31 16:51:12 +01001066 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067
Yaowu Xuc27fc142016-08-22 16:08:15 -07001068 return p;
1069}
1070
Yaowu Xuc27fc142016-08-22 16:08:15 -07001071// TODO(slavarnway): eliminate bsize and subsize in future commits
Yaowu Xuf883b422016-08-30 14:01:10 -07001072static void decode_partition(AV1Decoder *const pbi, MACROBLOCKD *const xd,
Yaowu Xuf883b422016-08-30 14:01:10 -07001073 int mi_row, int mi_col, aom_reader *r,
Jingning Hanea10ad42017-07-20 11:19:08 -07001074 BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001075 AV1_COMMON *const cm = &pbi->common;
Jingning Hanff17e162016-12-07 17:58:18 -08001076 const int num_8x8_wh = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001077 const int hbs = num_8x8_wh >> 1;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001078#if CONFIG_EXT_PARTITION_TYPES && CONFIG_EXT_PARTITION_TYPES_AB
1079 const int qbs = num_8x8_wh >> 2;
1080#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001081 PARTITION_TYPE partition;
1082 BLOCK_SIZE subsize;
1083#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001084 const int quarter_step = num_8x8_wh / 4;
1085 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001086#if !CONFIG_EXT_PARTITION_TYPES_AB
1087 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
1088#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001089#endif
1090 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1091 const int has_cols = (mi_col + hbs) < cm->mi_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001092
1093 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1094
Jingning Hancd959762017-03-27 14:49:59 -07001095 partition = (bsize < BLOCK_8X8) ? PARTITION_NONE
1096 : read_partition(cm, xd, mi_row, mi_col, r,
1097 has_rows, has_cols, bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001098 subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001099
Rupert Swarbrick415c8f12017-10-09 16:26:23 +01001100 // Check the bitstream is conformant: if there is subsampling on the
1101 // chroma planes, subsize must subsample to a valid block size.
1102 const struct macroblockd_plane *const pd_u = &xd->plane[1];
1103 if (get_plane_block_size(subsize, pd_u) == BLOCK_INVALID) {
1104 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1105 "Block size %dx%d invalid with this subsampling mode",
1106 block_size_wide[subsize], block_size_high[subsize]);
1107 }
1108
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001109#define DEC_BLOCK_STX_ARG
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001110#if CONFIG_EXT_PARTITION_TYPES
1111#define DEC_BLOCK_EPT_ARG partition,
1112#else
1113#define DEC_BLOCK_EPT_ARG
1114#endif
1115#define DEC_BLOCK(db_r, db_c, db_subsize) \
1116 decode_block(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, \
1117 DEC_BLOCK_EPT_ARG(db_subsize))
1118#define DEC_PARTITION(db_r, db_c, db_subsize) \
1119 decode_partition(pbi, xd, DEC_BLOCK_STX_ARG(db_r), (db_c), r, (db_subsize))
1120
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001121 switch (partition) {
1122 case PARTITION_NONE: DEC_BLOCK(mi_row, mi_col, subsize); break;
1123 case PARTITION_HORZ:
1124 DEC_BLOCK(mi_row, mi_col, subsize);
1125 if (has_rows) DEC_BLOCK(mi_row + hbs, mi_col, subsize);
1126 break;
1127 case PARTITION_VERT:
1128 DEC_BLOCK(mi_row, mi_col, subsize);
1129 if (has_cols) DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1130 break;
1131 case PARTITION_SPLIT:
1132 DEC_PARTITION(mi_row, mi_col, subsize);
1133 DEC_PARTITION(mi_row, mi_col + hbs, subsize);
1134 DEC_PARTITION(mi_row + hbs, mi_col, subsize);
1135 DEC_PARTITION(mi_row + hbs, mi_col + hbs, subsize);
1136 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001138#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001139 case PARTITION_HORZ_A:
1140 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
1141 DEC_BLOCK(mi_row + qbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
1142 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
1143 break;
1144 case PARTITION_HORZ_B:
1145 DEC_BLOCK(mi_row, mi_col, subsize);
1146 DEC_BLOCK(mi_row + hbs, mi_col, get_subsize(bsize, PARTITION_HORZ_4));
1147 if (mi_row + 3 * qbs < cm->mi_rows)
1148 DEC_BLOCK(mi_row + 3 * qbs, mi_col,
1149 get_subsize(bsize, PARTITION_HORZ_4));
1150 break;
1151 case PARTITION_VERT_A:
1152 DEC_BLOCK(mi_row, mi_col, get_subsize(bsize, PARTITION_VERT_4));
1153 DEC_BLOCK(mi_row, mi_col + qbs, get_subsize(bsize, PARTITION_VERT_4));
1154 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1155 break;
1156 case PARTITION_VERT_B:
1157 DEC_BLOCK(mi_row, mi_col, subsize);
1158 DEC_BLOCK(mi_row, mi_col + hbs, get_subsize(bsize, PARTITION_VERT_4));
1159 if (mi_col + 3 * qbs < cm->mi_cols)
1160 DEC_BLOCK(mi_row, mi_col + 3 * qbs,
1161 get_subsize(bsize, PARTITION_VERT_4));
1162 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001163#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001164 case PARTITION_HORZ_A:
1165 DEC_BLOCK(mi_row, mi_col, bsize2);
1166 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
1167 DEC_BLOCK(mi_row + hbs, mi_col, subsize);
1168 break;
1169 case PARTITION_HORZ_B:
1170 DEC_BLOCK(mi_row, mi_col, subsize);
1171 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1172 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1173 break;
1174 case PARTITION_VERT_A:
1175 DEC_BLOCK(mi_row, mi_col, bsize2);
1176 DEC_BLOCK(mi_row + hbs, mi_col, bsize2);
1177 DEC_BLOCK(mi_row, mi_col + hbs, subsize);
1178 break;
1179 case PARTITION_VERT_B:
1180 DEC_BLOCK(mi_row, mi_col, subsize);
1181 DEC_BLOCK(mi_row, mi_col + hbs, bsize2);
1182 DEC_BLOCK(mi_row + hbs, mi_col + hbs, bsize2);
1183 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001184#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001185 case PARTITION_HORZ_4:
1186 for (i = 0; i < 4; ++i) {
1187 int this_mi_row = mi_row + i * quarter_step;
1188 if (i > 0 && this_mi_row >= cm->mi_rows) break;
1189 DEC_BLOCK(this_mi_row, mi_col, subsize);
1190 }
1191 break;
1192 case PARTITION_VERT_4:
1193 for (i = 0; i < 4; ++i) {
1194 int this_mi_col = mi_col + i * quarter_step;
1195 if (i > 0 && this_mi_col >= cm->mi_cols) break;
1196 DEC_BLOCK(mi_row, this_mi_col, subsize);
1197 }
1198 break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001199#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001200 default: assert(0 && "Invalid partition type");
Yaowu Xuc27fc142016-08-22 16:08:15 -07001201 }
1202
Rupert Swarbrick668d3d92017-09-06 16:09:51 +01001203#undef DEC_PARTITION
1204#undef DEC_BLOCK
1205#undef DEC_BLOCK_EPT_ARG
1206#undef DEC_BLOCK_STX_ARG
1207
Yaowu Xuc27fc142016-08-22 16:08:15 -07001208#if CONFIG_EXT_PARTITION_TYPES
Alex Converseffabff32017-03-27 09:52:19 -07001209 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001210#else
1211 // update partition context
1212 if (bsize >= BLOCK_8X8 &&
1213 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
Jingning Han1beb0102016-12-07 11:08:30 -08001214 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01001215#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xud71be782016-10-14 08:47:03 -07001216
Cheng Chenf572cd32017-08-25 18:34:51 -07001217#if CONFIG_LPF_SB
1218 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001219 int filt_lvl;
1220 if (mi_row == 0 && mi_col == 0) {
1221 filt_lvl = aom_read_literal(r, 6, ACCT_STR);
Cheng Chen41d37c22017-09-08 19:00:21 -07001222 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
1223 cm->mi_grid_visible[0]->mbmi.delta = 0;
1224 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07001225 } else {
1226 int prev_mi_row, prev_mi_col;
1227 if (mi_col - MAX_MIB_SIZE < 0) {
1228 prev_mi_row = mi_row - MAX_MIB_SIZE;
1229 prev_mi_col = mi_col;
1230 } else {
1231 prev_mi_row = mi_row;
1232 prev_mi_col = mi_col - MAX_MIB_SIZE;
1233 }
Cheng Chenc7855b12017-09-05 10:49:08 -07001234
Cheng Chen41d37c22017-09-08 19:00:21 -07001235 MB_MODE_INFO *curr_mbmi =
1236 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
1237 MB_MODE_INFO *prev_mbmi =
1238 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
1239 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07001240
Cheng Chen41d37c22017-09-08 19:00:21 -07001241 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
1242 const int reuse_prev_lvl = aom_read_symbol(
1243 r, xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2, ACCT_STR);
1244 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
1245
Cheng Chenc7855b12017-09-05 10:49:08 -07001246 if (reuse_prev_lvl) {
Cheng Chena4b27de2017-08-31 16:05:19 -07001247 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001248 curr_mbmi->delta = 0;
1249 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001250 } else {
Cheng Chen41d37c22017-09-08 19:00:21 -07001251 const int delta_ctx = prev_mbmi->delta;
1252 unsigned int delta = aom_read_symbol(
1253 r, xd->tile_ctx->lpf_delta_cdf[delta_ctx], DELTA_RANGE, ACCT_STR);
1254 curr_mbmi->delta = delta;
Cheng Chenf89ca3e2017-09-07 14:47:47 -07001255 delta *= LPF_STEP;
Cheng Chenc7855b12017-09-05 10:49:08 -07001256
1257 if (delta) {
Cheng Chen41d37c22017-09-08 19:00:21 -07001258 const int sign_ctx = prev_mbmi->sign;
1259 const int sign = aom_read_symbol(
1260 r, xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2, ACCT_STR);
1261 curr_mbmi->sign = sign;
Cheng Chenc7855b12017-09-05 10:49:08 -07001262 filt_lvl = sign ? prev_lvl + delta : prev_lvl - delta;
1263 } else {
1264 filt_lvl = prev_lvl;
Cheng Chen41d37c22017-09-08 19:00:21 -07001265 curr_mbmi->sign = 0;
Cheng Chenc7855b12017-09-05 10:49:08 -07001266 }
Cheng Chena4b27de2017-08-31 16:05:19 -07001267 }
1268 }
Cheng Chen5589d712017-09-05 12:03:25 -07001269
1270 av1_loop_filter_sb_level_init(cm, mi_row, mi_col, filt_lvl);
Cheng Chenf572cd32017-08-25 18:34:51 -07001271 }
1272#endif
1273
Jean-Marc Valin01435132017-02-18 14:12:53 -05001274#if CONFIG_CDEF
Jingning Handf068332017-05-09 09:03:17 -07001275 if (bsize == cm->sb_size) {
Cheng Chenf5bdeac2017-07-24 14:31:30 -07001276 int width_step = mi_size_wide[BLOCK_64X64];
1277 int height_step = mi_size_wide[BLOCK_64X64];
1278 int w, h;
1279 for (h = 0; (h < mi_size_high[cm->sb_size]) && (mi_row + h < cm->mi_rows);
1280 h += height_step) {
1281 for (w = 0; (w < mi_size_wide[cm->sb_size]) && (mi_col + w < cm->mi_cols);
1282 w += width_step) {
1283 if (!cm->all_lossless && !sb_all_skip(cm, mi_row + h, mi_col + w))
1284 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1285 ->mbmi.cdef_strength =
1286 aom_read_literal(r, cm->cdef_bits, ACCT_STR);
1287 else
1288 cm->mi_grid_visible[(mi_row + h) * cm->mi_stride + (mi_col + w)]
1289 ->mbmi.cdef_strength = -1;
1290 }
Yaowu Xud71be782016-10-14 08:47:03 -07001291 }
1292 }
Jean-Marc Valin01435132017-02-18 14:12:53 -05001293#endif // CONFIG_CDEF
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001294#if CONFIG_LOOP_RESTORATION
1295 for (int plane = 0; plane < MAX_MB_PLANE; ++plane) {
1296 int rcol0, rcol1, rrow0, rrow1, nhtiles;
1297 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1298 &rcol0, &rcol1, &rrow0, &rrow1,
1299 &nhtiles)) {
1300 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1301 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
1302 int rtile_idx = rcol + rrow * nhtiles;
1303 loop_restoration_read_sb_coeffs(cm, xd, r, plane, rtile_idx);
1304 }
1305 }
1306 }
1307 }
1308#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001309}
1310
Yaowu Xuc27fc142016-08-22 16:08:15 -07001311static void setup_bool_decoder(const uint8_t *data, const uint8_t *data_end,
1312 const size_t read_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07001313 struct aom_internal_error_info *error_info,
Alex Converseeb780e72016-12-13 12:46:41 -08001314 aom_reader *r,
1315#if CONFIG_ANS && ANS_MAX_SYMBOLS
1316 int window_size,
1317#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
1318 aom_decrypt_cb decrypt_cb, void *decrypt_state) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001319 // Validate the calculated partition length. If the buffer
1320 // described by the partition can't be fully read, then restrict
1321 // it to the portion that can be (for EC mode) or throw an error.
1322 if (!read_is_valid(data, read_size, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07001323 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001324 "Truncated packet or corrupt tile length");
1325
Alex Converse2cdf0d82016-12-13 13:53:09 -08001326#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08001327 r->window_size = window_size;
Alex Converse2cdf0d82016-12-13 13:53:09 -08001328#endif
Alex Converse346440b2017-01-03 13:47:37 -08001329 if (aom_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07001330 aom_internal_error(error_info, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001331 "Failed to allocate bool decoder %d", 1);
1332}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001333
Yaowu Xuf883b422016-08-30 14:01:10 -07001334static void setup_segmentation(AV1_COMMON *const cm,
1335 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001336 struct segmentation *const seg = &cm->seg;
1337 int i, j;
1338
1339 seg->update_map = 0;
1340 seg->update_data = 0;
Ryandd8df162017-09-27 15:40:13 -07001341 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001342
Yaowu Xuf883b422016-08-30 14:01:10 -07001343 seg->enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001344 if (!seg->enabled) return;
1345
1346 // Segmentation map update
1347 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1348 seg->update_map = 1;
1349 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001350 seg->update_map = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001351 }
1352 if (seg->update_map) {
1353 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
1354 seg->temporal_update = 0;
1355 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001356 seg->temporal_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001357 }
1358 }
1359
1360 // Segmentation data update
Yaowu Xuf883b422016-08-30 14:01:10 -07001361 seg->update_data = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001362 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001363 seg->abs_delta = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001364
Yaowu Xuf883b422016-08-30 14:01:10 -07001365 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001366
1367 for (i = 0; i < MAX_SEGMENTS; i++) {
1368 for (j = 0; j < SEG_LVL_MAX; j++) {
1369 int data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07001370 const int feature_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001371 if (feature_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001372 av1_enable_segfeature(seg, i, j);
1373 data = decode_unsigned_max(rb, av1_seg_feature_data_max(j));
1374 if (av1_is_segfeature_signed(j))
1375 data = aom_rb_read_bit(rb) ? -data : data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001376 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001377 av1_set_segdata(seg, i, j, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001378 }
1379 }
1380 }
1381}
1382
1383#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001384static void decode_restoration_mode(AV1_COMMON *cm,
1385 struct aom_read_bit_buffer *rb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001386 int p;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001387 RestorationInfo *rsi;
1388 for (p = 0; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001389 rsi = &cm->rst_info[p];
1390 if (aom_rb_read_bit(rb)) {
1391 rsi->frame_restoration_type =
1392 aom_rb_read_bit(rb) ? RESTORE_SGRPROJ : RESTORE_WIENER;
1393 } else {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001394 rsi->frame_restoration_type =
1395 aom_rb_read_bit(rb) ? RESTORE_SWITCHABLE : RESTORE_NONE;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001396 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001397 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001398 cm->rst_info[0].restoration_tilesize = RESTORATION_TILESIZE_MAX;
1399 cm->rst_info[1].restoration_tilesize = RESTORATION_TILESIZE_MAX;
1400 cm->rst_info[2].restoration_tilesize = RESTORATION_TILESIZE_MAX;
1401 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
1402 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1403 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001404 cm->rst_info[0].restoration_tilesize = RESTORATION_TILESIZE_MAX >> 2;
1405 cm->rst_info[1].restoration_tilesize = RESTORATION_TILESIZE_MAX >> 2;
1406 cm->rst_info[2].restoration_tilesize = RESTORATION_TILESIZE_MAX >> 2;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001407 rsi = &cm->rst_info[0];
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001408 rsi->restoration_tilesize <<= aom_rb_read_bit(rb);
1409 if (rsi->restoration_tilesize != (RESTORATION_TILESIZE_MAX >> 2)) {
1410 rsi->restoration_tilesize <<= aom_rb_read_bit(rb);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001411 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001412 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001413 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
1414 if (s && (cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
1415 cm->rst_info[2].frame_restoration_type != RESTORE_NONE)) {
1416 cm->rst_info[1].restoration_tilesize =
1417 cm->rst_info[0].restoration_tilesize >> (aom_rb_read_bit(rb) * s);
1418 } else {
1419 cm->rst_info[1].restoration_tilesize = cm->rst_info[0].restoration_tilesize;
1420 }
1421 cm->rst_info[2].restoration_tilesize = cm->rst_info[1].restoration_tilesize;
Debargha Mukherjee7a5587a2017-08-31 07:41:30 -07001422
1423 cm->rst_info[0].procunit_width = cm->rst_info[0].procunit_height =
1424 RESTORATION_PROC_UNIT_SIZE;
1425 cm->rst_info[1].procunit_width = cm->rst_info[2].procunit_width =
1426 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_x;
1427 cm->rst_info[1].procunit_height = cm->rst_info[2].procunit_height =
1428 RESTORATION_PROC_UNIT_SIZE >> cm->subsampling_y;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001429}
1430
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001431static void read_wiener_filter(int wiener_win, WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001432 WienerInfo *ref_wiener_info, aom_reader *rb) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001433 memset(wiener_info->vfilter, 0, sizeof(wiener_info->vfilter));
1434 memset(wiener_info->hfilter, 0, sizeof(wiener_info->hfilter));
1435
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001436 if (wiener_win == WIENER_WIN)
1437 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] =
1438 aom_read_primitive_refsubexpfin(
1439 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1440 WIENER_FILT_TAP0_SUBEXP_K,
1441 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1442 WIENER_FILT_TAP0_MINV;
1443 else
1444 wiener_info->vfilter[0] = wiener_info->vfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001445 wiener_info->vfilter[1] = wiener_info->vfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001446 aom_read_primitive_refsubexpfin(
1447 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1448 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001449 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001450 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001451 wiener_info->vfilter[2] = wiener_info->vfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001452 aom_read_primitive_refsubexpfin(
1453 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1454 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001455 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001456 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001457 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001458 wiener_info->vfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001459 -2 * (wiener_info->vfilter[0] + wiener_info->vfilter[1] +
1460 wiener_info->vfilter[2]);
1461
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001462 if (wiener_win == WIENER_WIN)
1463 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] =
1464 aom_read_primitive_refsubexpfin(
1465 rb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1466 WIENER_FILT_TAP0_SUBEXP_K,
1467 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, ACCT_STR) +
1468 WIENER_FILT_TAP0_MINV;
1469 else
1470 wiener_info->hfilter[0] = wiener_info->hfilter[WIENER_WIN - 1] = 0;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001471 wiener_info->hfilter[1] = wiener_info->hfilter[WIENER_WIN - 2] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001472 aom_read_primitive_refsubexpfin(
1473 rb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1474 WIENER_FILT_TAP1_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001475 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001476 WIENER_FILT_TAP1_MINV;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001477 wiener_info->hfilter[2] = wiener_info->hfilter[WIENER_WIN - 3] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001478 aom_read_primitive_refsubexpfin(
1479 rb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1480 WIENER_FILT_TAP2_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001481 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, ACCT_STR) +
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001482 WIENER_FILT_TAP2_MINV;
David Barker1e8e6b92017-01-13 13:45:51 +00001483 // The central element has an implicit +WIENER_FILT_STEP
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001484 wiener_info->hfilter[WIENER_HALFWIN] =
David Barker1e8e6b92017-01-13 13:45:51 +00001485 -2 * (wiener_info->hfilter[0] + wiener_info->hfilter[1] +
1486 wiener_info->hfilter[2]);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001487 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001488}
1489
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001490static void read_sgrproj_filter(SgrprojInfo *sgrproj_info,
1491 SgrprojInfo *ref_sgrproj_info, aom_reader *rb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001492 sgrproj_info->ep = aom_read_literal(rb, SGRPROJ_PARAMS_BITS, ACCT_STR);
1493 sgrproj_info->xqd[0] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001494 aom_read_primitive_refsubexpfin(
1495 rb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001496 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001497 SGRPROJ_PRJ_MIN0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001498 sgrproj_info->xqd[1] =
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001499 aom_read_primitive_refsubexpfin(
1500 rb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
Debargha Mukherjeee23d5c32017-04-13 15:33:58 -07001501 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, ACCT_STR) +
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001502 SGRPROJ_PRJ_MIN1;
1503 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001504}
1505
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001506static void loop_restoration_read_sb_coeffs(const AV1_COMMON *const cm,
1507 MACROBLOCKD *xd,
1508 aom_reader *const r, int plane,
1509 int rtile_idx) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001510 const RestorationInfo *rsi = &cm->rst_info[plane];
1511 RestorationUnitInfo *rui = &rsi->unit_info[rtile_idx];
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001512 if (rsi->frame_restoration_type == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001513
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001514 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
1515 WienerInfo *wiener_info = xd->wiener_info + plane;
1516 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001517
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001518 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001519 rui->restoration_type =
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001520 aom_read_symbol(r, xd->tile_ctx->switchable_restore_cdf,
1521 RESTORE_SWITCHABLE_TYPES, ACCT_STR);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001522 switch (rui->restoration_type) {
1523 case RESTORE_WIENER:
1524 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
1525 break;
1526 case RESTORE_SGRPROJ:
1527 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
1528 break;
1529 default: assert(rui->restoration_type == RESTORE_NONE); break;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001530 }
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001531 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001532#if CONFIG_NEW_MULTISYMBOL
1533 if (aom_read_symbol(r, xd->tile_ctx->wiener_restore_cdf, 2, ACCT_STR)) {
1534#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001535 if (aom_read(r, RESTORE_NONE_WIENER_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001536#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001537 rui->restoration_type = RESTORE_WIENER;
1538 read_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001539 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001540 rui->restoration_type = RESTORE_NONE;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001541 }
1542 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001543#if CONFIG_NEW_MULTISYMBOL
1544 if (aom_read_symbol(r, xd->tile_ctx->sgrproj_restore_cdf, 2, ACCT_STR)) {
1545#else
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001546 if (aom_read(r, RESTORE_NONE_SGRPROJ_PROB, ACCT_STR)) {
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001547#endif // CONFIG_NEW_MULTISYMBOL
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001548 rui->restoration_type = RESTORE_SGRPROJ;
1549 read_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, r);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001550 } else {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001551 rui->restoration_type = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001552 }
1553 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001554}
1555#endif // CONFIG_LOOP_RESTORATION
1556
Yaowu Xuf883b422016-08-30 14:01:10 -07001557static void setup_loopfilter(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001558 struct loopfilter *lf = &cm->lf;
Cheng Chenf572cd32017-08-25 18:34:51 -07001559#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07001560#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07001561 lf->filter_level[0] = aom_rb_read_literal(rb, 6);
1562 lf->filter_level[1] = aom_rb_read_literal(rb, 6);
1563 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07001564 lf->filter_level_u = aom_rb_read_literal(rb, 6);
1565 lf->filter_level_v = aom_rb_read_literal(rb, 6);
1566 }
Cheng Chen179479f2017-08-04 10:56:39 -07001567#else
1568 lf->filter_level = aom_rb_read_literal(rb, 6);
Cheng Chene94df5c2017-07-19 17:25:33 -07001569#endif
Cheng Chenf572cd32017-08-25 18:34:51 -07001570#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07001571 lf->sharpness_level = aom_rb_read_literal(rb, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001572
1573 // Read in loop filter deltas applied at the MB level based on mode or ref
1574 // frame.
1575 lf->mode_ref_delta_update = 0;
1576
Yaowu Xuf883b422016-08-30 14:01:10 -07001577 lf->mode_ref_delta_enabled = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001578 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001579 lf->mode_ref_delta_update = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001580 if (lf->mode_ref_delta_update) {
1581 int i;
1582
1583 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001584 if (aom_rb_read_bit(rb))
1585 lf->ref_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001586
1587 for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07001588 if (aom_rb_read_bit(rb))
1589 lf->mode_deltas[i] = aom_rb_read_inv_signed_literal(rb, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590 }
1591 }
1592}
1593
Jean-Marc Valin01435132017-02-18 14:12:53 -05001594#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01001595static void setup_cdef(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001596 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001597#if CONFIG_CDEF_SINGLEPASS
1598 cm->cdef_pri_damping = cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
1599#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02001600 cm->cdef_pri_damping = aom_rb_read_literal(rb, 1) + 5;
1601 cm->cdef_sec_damping = aom_rb_read_literal(rb, 2) + 3;
Steinar Midtskogen59782122017-07-20 08:49:43 +02001602#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001603 cm->cdef_bits = aom_rb_read_literal(rb, 2);
1604 cm->nb_cdef_strengths = 1 << cm->cdef_bits;
1605 for (i = 0; i < cm->nb_cdef_strengths; i++) {
1606 cm->cdef_strengths[i] = aom_rb_read_literal(rb, CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02001607 cm->cdef_uv_strengths[i] = cm->subsampling_x == cm->subsampling_y
1608 ? aom_rb_read_literal(rb, CDEF_STRENGTH_BITS)
1609 : 0;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04001610 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001611}
Jean-Marc Valin01435132017-02-18 14:12:53 -05001612#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001613
Yaowu Xuf883b422016-08-30 14:01:10 -07001614static INLINE int read_delta_q(struct aom_read_bit_buffer *rb) {
1615 return aom_rb_read_bit(rb) ? aom_rb_read_inv_signed_literal(rb, 6) : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001616}
1617
Yaowu Xuf883b422016-08-30 14:01:10 -07001618static void setup_quantization(AV1_COMMON *const cm,
1619 struct aom_read_bit_buffer *rb) {
1620 cm->base_qindex = aom_rb_read_literal(rb, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001621 cm->y_dc_delta_q = read_delta_q(rb);
1622 cm->uv_dc_delta_q = read_delta_q(rb);
1623 cm->uv_ac_delta_q = read_delta_q(rb);
1624 cm->dequant_bit_depth = cm->bit_depth;
1625#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07001626 cm->using_qmatrix = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001627 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001628 cm->min_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
1629 cm->max_qmlevel = aom_rb_read_literal(rb, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001630 } else {
1631 cm->min_qmlevel = 0;
1632 cm->max_qmlevel = 0;
1633 }
1634#endif
1635}
1636
Alex Converse05a3e7d2017-05-16 12:20:07 -07001637// Build y/uv dequant values based on segmentation.
Yaowu Xuf883b422016-08-30 14:01:10 -07001638static void setup_segmentation_dequant(AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001639#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001640 const int using_qm = cm->using_qmatrix;
1641 const int minqm = cm->min_qmlevel;
1642 const int maxqm = cm->max_qmlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001643#endif
Alex Converse05a3e7d2017-05-16 12:20:07 -07001644 // When segmentation is disabled, only the first value is used. The
1645 // remaining are don't cares.
1646 const int max_segments = cm->seg.enabled ? MAX_SEGMENTS : 1;
1647 for (int i = 0; i < max_segments; ++i) {
1648 const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
1649 cm->y_dequant[i][0] = av1_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
1650 cm->y_dequant[i][1] = av1_ac_quant(qindex, 0, cm->bit_depth);
1651 cm->uv_dequant[i][0] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001652 av1_dc_quant(qindex, cm->uv_dc_delta_q, cm->bit_depth);
Alex Converse05a3e7d2017-05-16 12:20:07 -07001653 cm->uv_dequant[i][1] =
Yaowu Xuf883b422016-08-30 14:01:10 -07001654 av1_ac_quant(qindex, cm->uv_ac_delta_q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001655#if CONFIG_AOM_QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001656 const int lossless = qindex == 0 && cm->y_dc_delta_q == 0 &&
1657 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
1658 // NB: depends on base index so there is only 1 set per frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07001659 // No quant weighting when lossless or signalled not using QM
Alex Converse05a3e7d2017-05-16 12:20:07 -07001660 const int qmlevel = (lossless || using_qm == 0)
1661 ? NUM_QM_LEVELS - 1
1662 : aom_get_qmlevel(cm->base_qindex, minqm, maxqm);
Thomas Davies6675adf2017-05-04 17:39:21 +01001663 for (int j = 0; j < TX_SIZES_ALL; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001664 cm->y_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 0, j, 1);
1665 cm->y_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 0, j, 0);
1666 cm->uv_iqmatrix[i][1][j] = aom_iqmatrix(cm, qmlevel, 1, j, 1);
1667 cm->uv_iqmatrix[i][0][j] = aom_iqmatrix(cm, qmlevel, 1, j, 0);
1668 }
Alex Converse05a3e7d2017-05-16 12:20:07 -07001669#endif // CONFIG_AOM_QM
Yaowu Xuc27fc142016-08-22 16:08:15 -07001670#if CONFIG_NEW_QUANT
Alex Converse05a3e7d2017-05-16 12:20:07 -07001671 for (int dq = 0; dq < QUANT_PROFILES; dq++) {
1672 for (int b = 0; b < COEF_BANDS; ++b) {
1673 av1_get_dequant_val_nuq(cm->y_dequant[i][b != 0], b,
1674 cm->y_dequant_nuq[i][dq][b], NULL, dq);
1675 av1_get_dequant_val_nuq(cm->uv_dequant[i][b != 0], b,
1676 cm->uv_dequant_nuq[i][dq][b], NULL, dq);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677 }
1678 }
1679#endif // CONFIG_NEW_QUANT
1680 }
1681}
1682
Angie Chiang5678ad92016-11-21 09:38:40 -08001683static InterpFilter read_frame_interp_filter(struct aom_read_bit_buffer *rb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001684 return aom_rb_read_bit(rb) ? SWITCHABLE
Angie Chiang6305abe2016-10-24 12:24:44 -07001685 : aom_rb_read_literal(rb, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001686}
1687
Yaowu Xuf883b422016-08-30 14:01:10 -07001688static void setup_render_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001689#if CONFIG_FRAME_SUPERRES
1690 cm->render_width = cm->superres_upscaled_width;
1691 cm->render_height = cm->superres_upscaled_height;
1692#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001693 cm->render_width = cm->width;
1694 cm->render_height = cm->height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001695#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuf883b422016-08-30 14:01:10 -07001696 if (aom_rb_read_bit(rb))
1697 av1_read_frame_size(rb, &cm->render_width, &cm->render_height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001698}
1699
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001700#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001701// TODO(afergs): make "struct aom_read_bit_buffer *const rb"?
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001702static void setup_superres(AV1_COMMON *const cm, struct aom_read_bit_buffer *rb,
1703 int *width, int *height) {
1704 cm->superres_upscaled_width = *width;
1705 cm->superres_upscaled_height = *height;
Fergus Simpsone7508412017-03-14 18:14:09 -07001706 if (aom_rb_read_bit(rb)) {
Urvang Joshide71d142017-10-05 12:12:15 -07001707 cm->superres_scale_denominator =
Fergus Simpsone7508412017-03-14 18:14:09 -07001708 (uint8_t)aom_rb_read_literal(rb, SUPERRES_SCALE_BITS);
Urvang Joshide71d142017-10-05 12:12:15 -07001709 cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001710 // Don't edit cm->width or cm->height directly, or the buffers won't get
1711 // resized correctly
Urvang Joshi69fde2e2017-10-09 15:34:18 -07001712 av1_calculate_scaled_superres_size(width, height,
1713 cm->superres_scale_denominator);
Fergus Simpsone7508412017-03-14 18:14:09 -07001714 } else {
1715 // 1:1 scaling - ie. no scaling, scale not provided
Urvang Joshide71d142017-10-05 12:12:15 -07001716 cm->superres_scale_denominator = SCALE_NUMERATOR;
Fergus Simpsone7508412017-03-14 18:14:09 -07001717 }
1718}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07001719#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07001720
Yaowu Xuf883b422016-08-30 14:01:10 -07001721static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001722#if CONFIG_SIZE_LIMIT
1723 if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
Yaowu Xuf883b422016-08-30 14:01:10 -07001724 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001725 "Dimensions of %dx%d beyond allowed size of %dx%d.",
1726 width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1727#endif
1728 if (cm->width != width || cm->height != height) {
1729 const int new_mi_rows =
1730 ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1731 const int new_mi_cols =
1732 ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1733
Yaowu Xuf883b422016-08-30 14:01:10 -07001734 // Allocations in av1_alloc_context_buffers() depend on individual
Yaowu Xuc27fc142016-08-22 16:08:15 -07001735 // dimensions as well as the overall size.
1736 if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001737 if (av1_alloc_context_buffers(cm, width, height))
1738 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001739 "Failed to allocate context buffers");
1740 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001741 av1_set_mb_mi(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001742 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001743 av1_init_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001744 cm->width = width;
1745 cm->height = height;
1746 }
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01001747
1748 ensure_mv_buffer(cm->cur_frame, cm);
1749 cm->cur_frame->width = cm->width;
1750 cm->cur_frame->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001751}
1752
Yaowu Xuf883b422016-08-30 14:01:10 -07001753static void setup_frame_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001754 int width, height;
1755 BufferPool *const pool = cm->buffer_pool;
Yaowu Xuf883b422016-08-30 14:01:10 -07001756 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001757#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001758 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001759#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001760 setup_render_size(cm, rb);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001761 resize_context_buffers(cm, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001762
1763 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001764 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001765 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1766 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001767#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001768 cm->use_highbitdepth,
1769#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001770 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001771 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1772 pool->cb_priv)) {
1773 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001774 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001775 "Failed to allocate frame buffer");
1776 }
1777 unlock_buffer_pool(pool);
1778
1779 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1780 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1781 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1782 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001783#if CONFIG_COLORSPACE_HEADERS
1784 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1785 cm->transfer_function;
1786 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1787 cm->chroma_sample_position;
1788#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001789 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1790 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1791 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1792}
1793
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07001794static void setup_sb_size(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
1795 (void)rb;
1796#if CONFIG_EXT_PARTITION
1797 set_sb_size(cm, aom_rb_read_bit(rb) ? BLOCK_128X128 : BLOCK_64X64);
1798#else
1799 set_sb_size(cm, BLOCK_64X64);
1800#endif // CONFIG_EXT_PARTITION
1801}
1802
Yaowu Xuf883b422016-08-30 14:01:10 -07001803static INLINE int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001804 int ref_xss, int ref_yss,
Yaowu Xuf883b422016-08-30 14:01:10 -07001805 aom_bit_depth_t this_bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001806 int this_xss, int this_yss) {
1807 return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1808 ref_yss == this_yss;
1809}
1810
Yaowu Xuf883b422016-08-30 14:01:10 -07001811static void setup_frame_size_with_refs(AV1_COMMON *cm,
1812 struct aom_read_bit_buffer *rb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001813 int width, height;
1814 int found = 0, i;
1815 int has_valid_ref_frame = 0;
1816 BufferPool *const pool = cm->buffer_pool;
1817 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001818 if (aom_rb_read_bit(rb)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001819 YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
1820 width = buf->y_crop_width;
1821 height = buf->y_crop_height;
1822 cm->render_width = buf->render_width;
1823 cm->render_height = buf->render_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001824#if CONFIG_FRAME_SUPERRES
1825 setup_superres(cm, rb, &width, &height);
1826#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001827 found = 1;
1828 break;
1829 }
1830 }
1831
1832 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001833 av1_read_frame_size(rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001834#if CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001835 setup_superres(cm, rb, &width, &height);
Fergus Simpson7b2d1442017-05-22 17:18:33 -07001836#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07001837 setup_render_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001838 }
1839
1840 if (width <= 0 || height <= 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001841 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001842 "Invalid frame size");
1843
1844 // Check to make sure at least one of frames that this frame references
1845 // has valid dimensions.
1846 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1847 RefBuffer *const ref_frame = &cm->frame_refs[i];
1848 has_valid_ref_frame |=
1849 valid_ref_frame_size(ref_frame->buf->y_crop_width,
1850 ref_frame->buf->y_crop_height, width, height);
1851 }
1852 if (!has_valid_ref_frame)
Yaowu Xuf883b422016-08-30 14:01:10 -07001853 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001854 "Referenced frame has invalid size");
1855 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1856 RefBuffer *const ref_frame = &cm->frame_refs[i];
1857 if (!valid_ref_frame_img_fmt(ref_frame->buf->bit_depth,
1858 ref_frame->buf->subsampling_x,
1859 ref_frame->buf->subsampling_y, cm->bit_depth,
1860 cm->subsampling_x, cm->subsampling_y))
Yaowu Xuf883b422016-08-30 14:01:10 -07001861 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001862 "Referenced frame has incompatible color format");
1863 }
1864
1865 resize_context_buffers(cm, width, height);
1866
1867 lock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001868 if (aom_realloc_frame_buffer(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001869 get_frame_new_buffer(cm), cm->width, cm->height, cm->subsampling_x,
1870 cm->subsampling_y,
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02001871#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001872 cm->use_highbitdepth,
1873#endif
Yaowu Xu671f2bd2016-09-30 15:07:57 -07001874 AOM_BORDER_IN_PIXELS, cm->byte_alignment,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001875 &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
1876 pool->cb_priv)) {
1877 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07001878 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001879 "Failed to allocate frame buffer");
1880 }
1881 unlock_buffer_pool(pool);
1882
1883 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
1884 pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
1885 pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
1886 pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07001887#if CONFIG_COLORSPACE_HEADERS
1888 pool->frame_bufs[cm->new_fb_idx].buf.transfer_function =
1889 cm->transfer_function;
1890 pool->frame_bufs[cm->new_fb_idx].buf.chroma_sample_position =
1891 cm->chroma_sample_position;
1892#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001893 pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range;
1894 pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width;
1895 pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height;
1896}
1897
David Barker1a191122017-09-06 15:24:16 +01001898static void read_tile_group_range(AV1Decoder *pbi,
1899 struct aom_read_bit_buffer *const rb) {
1900 AV1_COMMON *const cm = &pbi->common;
1901 const int num_bits = cm->log2_tile_rows + cm->log2_tile_cols;
1902 const int num_tiles =
1903 cm->tile_rows * cm->tile_cols; // Note: May be < (1<<num_bits)
1904 pbi->tg_start = aom_rb_read_literal(rb, num_bits);
1905 pbi->tg_size = 1 + aom_rb_read_literal(rb, num_bits);
1906 if (pbi->tg_start + pbi->tg_size > num_tiles)
1907 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1908 "Tile group extends past last tile in frame");
1909}
1910
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001911#if CONFIG_MAX_TILE
1912
1913// Same function as av1_read_uniform but reading from uncompresses header wb
1914static int rb_read_uniform(struct aom_read_bit_buffer *const rb, int n) {
1915 const int l = get_unsigned_bits(n);
1916 const int m = (1 << l) - n;
1917 const int v = aom_rb_read_literal(rb, l - 1);
1918 assert(l != 0);
1919 if (v < m)
1920 return v;
1921 else
1922 return (v << 1) - m + aom_rb_read_literal(rb, 1);
1923}
1924
1925static void read_tile_info_max_tile(AV1_COMMON *const cm,
1926 struct aom_read_bit_buffer *const rb) {
1927 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
1928 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
1929 int width_sb = width_mi >> MAX_MIB_SIZE_LOG2;
1930 int height_sb = height_mi >> MAX_MIB_SIZE_LOG2;
1931 int start_sb, size_sb, i;
1932
1933 av1_get_tile_limits(cm);
1934 cm->uniform_tile_spacing_flag = aom_rb_read_bit(rb);
1935
1936 // Read tile columns
1937 if (cm->uniform_tile_spacing_flag) {
1938 cm->log2_tile_cols = cm->min_log2_tile_cols;
1939 while (cm->log2_tile_cols < cm->max_log2_tile_cols) {
1940 if (!aom_rb_read_bit(rb)) {
1941 break;
1942 }
1943 cm->log2_tile_cols++;
1944 }
1945 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001946 for (i = 0, start_sb = 0; width_sb > 0 && i < MAX_TILE_COLS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001947 size_sb = 1 + rb_read_uniform(rb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB));
1948 cm->tile_col_start_sb[i] = start_sb;
1949 start_sb += size_sb;
1950 width_sb -= size_sb;
1951 }
1952 cm->tile_cols = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001953 cm->tile_col_start_sb[i] = start_sb + width_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001954 }
1955 av1_calculate_tile_cols(cm);
1956
1957 // Read tile rows
1958 if (cm->uniform_tile_spacing_flag) {
1959 cm->log2_tile_rows = cm->min_log2_tile_rows;
1960 while (cm->log2_tile_rows < cm->max_log2_tile_rows) {
1961 if (!aom_rb_read_bit(rb)) {
1962 break;
1963 }
1964 cm->log2_tile_rows++;
1965 }
1966 } else {
Dominic Symesf58f1112017-09-25 12:47:40 +02001967 for (i = 0, start_sb = 0; height_sb > 0 && i < MAX_TILE_ROWS; i++) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001968 size_sb =
1969 1 + rb_read_uniform(rb, AOMMIN(height_sb, cm->max_tile_height_sb));
1970 cm->tile_row_start_sb[i] = start_sb;
1971 start_sb += size_sb;
1972 height_sb -= size_sb;
1973 }
1974 cm->tile_rows = i;
Dominic Symesf58f1112017-09-25 12:47:40 +02001975 cm->tile_row_start_sb[i] = start_sb + height_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02001976 }
1977 av1_calculate_tile_rows(cm);
1978}
1979#endif
1980
Yaowu Xuf883b422016-08-30 14:01:10 -07001981static void read_tile_info(AV1Decoder *const pbi,
1982 struct aom_read_bit_buffer *const rb) {
1983 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001984#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001985 cm->single_tile_decoding = 0;
1986 if (cm->large_scale_tile) {
1987 struct loopfilter *lf = &cm->lf;
1988
1989 // Figure out single_tile_decoding by loopfilter_level.
1990 cm->single_tile_decoding = (!lf->filter_level) ? 1 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001991// Read the tile width/height
1992#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001993 if (cm->sb_size == BLOCK_128X128) {
1994 cm->tile_width = aom_rb_read_literal(rb, 5) + 1;
1995 cm->tile_height = aom_rb_read_literal(rb, 5) + 1;
1996 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001997#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07001998 cm->tile_width = aom_rb_read_literal(rb, 6) + 1;
1999 cm->tile_height = aom_rb_read_literal(rb, 6) + 1;
2000#if CONFIG_EXT_PARTITION
2001 }
2002#endif // CONFIG_EXT_PARTITION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002003
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002004 cm->tile_width <<= cm->mib_size_log2;
2005 cm->tile_height <<= cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002006
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002007 cm->tile_width = AOMMIN(cm->tile_width, cm->mi_cols);
2008 cm->tile_height = AOMMIN(cm->tile_height, cm->mi_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002009
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002010 // Get the number of tiles
2011 cm->tile_cols = 1;
2012 while (cm->tile_cols * cm->tile_width < cm->mi_cols) ++cm->tile_cols;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002013
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002014 cm->tile_rows = 1;
2015 while (cm->tile_rows * cm->tile_height < cm->mi_rows) ++cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002016
Yunqing Wang42015d12017-10-17 15:43:49 -07002017#if CONFIG_DEPENDENT_HORZTILES
2018 cm->dependent_horz_tiles = 0;
2019#endif
2020#if CONFIG_LOOPFILTERING_ACROSS_TILES
2021 if (cm->tile_cols * cm->tile_rows > 1)
2022 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
2023 else
2024 cm->loop_filter_across_tiles_enabled = 1;
2025#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
2026
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002027 if (cm->tile_cols * cm->tile_rows > 1) {
2028 // Read the number of bytes used to store tile size
2029 pbi->tile_col_size_bytes = aom_rb_read_literal(rb, 2) + 1;
2030 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
2031 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002032 } else {
2033#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002034
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002035#if CONFIG_MAX_TILE
2036 read_tile_info_max_tile(cm, rb);
2037#else
2038 int min_log2_tile_cols, max_log2_tile_cols, max_ones;
2039 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002040
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002041 // columns
2042 max_ones = max_log2_tile_cols - min_log2_tile_cols;
2043 cm->log2_tile_cols = min_log2_tile_cols;
2044 while (max_ones-- && aom_rb_read_bit(rb)) cm->log2_tile_cols++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002045
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002046 if (cm->log2_tile_cols > 6)
2047 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2048 "Invalid number of tile columns");
2049
2050 // rows
2051 cm->log2_tile_rows = aom_rb_read_bit(rb);
2052 if (cm->log2_tile_rows) cm->log2_tile_rows += aom_rb_read_bit(rb);
2053
Rupert Swarbrick5a010aa2017-09-26 16:16:48 +01002054 cm->tile_width =
2055 get_tile_size(cm->mi_cols, cm->log2_tile_cols, &cm->tile_cols);
2056 cm->tile_height =
2057 get_tile_size(cm->mi_rows, cm->log2_tile_rows, &cm->tile_rows);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002058
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002059#endif // CONFIG_MAX_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002060#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002061 if (cm->tile_rows > 1)
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002062 cm->dependent_horz_tiles = aom_rb_read_bit(rb);
2063 else
2064 cm->dependent_horz_tiles = 0;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002065#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002066#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07002067 if (cm->tile_cols * cm->tile_rows > 1)
2068 cm->loop_filter_across_tiles_enabled = aom_rb_read_bit(rb);
2069 else
2070 cm->loop_filter_across_tiles_enabled = 1;
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002071#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08002072
Thomas Daviesb25ba502017-07-18 10:18:24 +01002073 // tile size magnitude
2074 pbi->tile_size_bytes = aom_rb_read_literal(rb, 2) + 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002075#if CONFIG_EXT_TILE
2076 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002077#endif // CONFIG_EXT_TILE
Thomas Davies4974e522016-11-07 17:44:05 +00002078
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002079// each tile group header is in its own tile group OBU
2080#if !CONFIG_OBU
Thomas Davies80188d12016-10-26 16:08:35 -07002081 // Store an index to the location of the tile group information
2082 pbi->tg_size_bit_offset = rb->bit_offset;
David Barker1a191122017-09-06 15:24:16 +01002083 read_tile_group_range(pbi, rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002084#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002085}
2086
Yaowu Xu4ff59b52017-04-24 12:41:56 -07002087static int mem_get_varsize(const uint8_t *src, int sz) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002088 switch (sz) {
2089 case 1: return src[0];
2090 case 2: return mem_get_le16(src);
2091 case 3: return mem_get_le24(src);
2092 case 4: return mem_get_le32(src);
James Zern88896732017-06-23 15:55:09 -07002093 default: assert(0 && "Invalid size"); return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002094 }
2095}
2096
2097#if CONFIG_EXT_TILE
2098// Reads the next tile returning its size and adjusting '*data' accordingly
2099// based on 'is_last'.
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002100static void get_ls_tile_buffer(
2101 const uint8_t *const data_end, struct aom_internal_error_info *error_info,
2102 const uint8_t **data, aom_decrypt_cb decrypt_cb, void *decrypt_state,
2103 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS], int tile_size_bytes,
2104 int col, int row, int tile_copy_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002105 size_t size;
2106
2107 size_t copy_size = 0;
2108 const uint8_t *copy_data = NULL;
2109
2110 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002111 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002112 "Truncated packet or corrupt tile length");
2113 if (decrypt_cb) {
2114 uint8_t be_data[4];
2115 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2116
2117 // Only read number of bytes in cm->tile_size_bytes.
2118 size = mem_get_varsize(be_data, tile_size_bytes);
2119 } else {
2120 size = mem_get_varsize(*data, tile_size_bytes);
2121 }
2122
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002123 // If tile_copy_mode = 1, then the top bit of the tile header indicates copy
2124 // mode.
2125 if (tile_copy_mode && (size >> (tile_size_bytes * 8 - 1)) == 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002126 // The remaining bits in the top byte signal the row offset
2127 int offset = (size >> (tile_size_bytes - 1) * 8) & 0x7f;
2128
2129 // Currently, only use tiles in same column as reference tiles.
2130 copy_data = tile_buffers[row - offset][col].data;
2131 copy_size = tile_buffers[row - offset][col].size;
2132 size = 0;
2133 }
2134
2135 *data += tile_size_bytes;
2136
2137 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002138 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002139 "Truncated packet or corrupt tile size");
2140
2141 if (size > 0) {
2142 tile_buffers[row][col].data = *data;
2143 tile_buffers[row][col].size = size;
2144 } else {
2145 tile_buffers[row][col].data = copy_data;
2146 tile_buffers[row][col].size = copy_size;
2147 }
2148
2149 *data += size;
2150
2151 tile_buffers[row][col].raw_data_end = *data;
2152}
2153
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002154static void get_ls_tile_buffers(
Yaowu Xuf883b422016-08-30 14:01:10 -07002155 AV1Decoder *pbi, const uint8_t *data, const uint8_t *data_end,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002156 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002157 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002158 const int tile_cols = cm->tile_cols;
2159 const int tile_rows = cm->tile_rows;
2160 const int have_tiles = tile_cols * tile_rows > 1;
2161
2162 if (!have_tiles) {
Jingning Han99ffce62017-04-25 15:48:41 -07002163 const size_t tile_size = data_end - data;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002164 tile_buffers[0][0].data = data;
2165 tile_buffers[0][0].size = tile_size;
2166 tile_buffers[0][0].raw_data_end = NULL;
2167 } else {
2168 // We locate only the tile buffers that are required, which are the ones
2169 // specified by pbi->dec_tile_col and pbi->dec_tile_row. Also, we always
2170 // need the last (bottom right) tile buffer, as we need to know where the
2171 // end of the compressed frame buffer is for proper superframe decoding.
2172
2173 const uint8_t *tile_col_data_end[MAX_TILE_COLS];
2174 const uint8_t *const data_start = data;
2175
Yaowu Xuf883b422016-08-30 14:01:10 -07002176 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002177 const int single_row = pbi->dec_tile_row >= 0;
2178 const int tile_rows_start = single_row ? dec_tile_row : 0;
2179 const int tile_rows_end = single_row ? tile_rows_start + 1 : tile_rows;
Yaowu Xuf883b422016-08-30 14:01:10 -07002180 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002181 const int single_col = pbi->dec_tile_col >= 0;
2182 const int tile_cols_start = single_col ? dec_tile_col : 0;
2183 const int tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2184
2185 const int tile_col_size_bytes = pbi->tile_col_size_bytes;
2186 const int tile_size_bytes = pbi->tile_size_bytes;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002187 const int tile_copy_mode =
2188 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256) ? 1
2189 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002190 size_t tile_col_size;
2191 int r, c;
2192
2193 // Read tile column sizes for all columns (we need the last tile buffer)
2194 for (c = 0; c < tile_cols; ++c) {
2195 const int is_last = c == tile_cols - 1;
2196 if (!is_last) {
2197 tile_col_size = mem_get_varsize(data, tile_col_size_bytes);
2198 data += tile_col_size_bytes;
2199 tile_col_data_end[c] = data + tile_col_size;
2200 } else {
2201 tile_col_size = data_end - data;
2202 tile_col_data_end[c] = data_end;
2203 }
2204 data += tile_col_size;
2205 }
2206
2207 data = data_start;
2208
2209 // Read the required tile sizes.
2210 for (c = tile_cols_start; c < tile_cols_end; ++c) {
2211 const int is_last = c == tile_cols - 1;
2212
2213 if (c > 0) data = tile_col_data_end[c - 1];
2214
2215 if (!is_last) data += tile_col_size_bytes;
2216
2217 // Get the whole of the last column, otherwise stop at the required tile.
2218 for (r = 0; r < (is_last ? tile_rows : tile_rows_end); ++r) {
2219 tile_buffers[r][c].col = c;
2220
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002221 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2222 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2223 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002224 }
2225 }
2226
2227 // If we have not read the last column, then read it to get the last tile.
2228 if (tile_cols_end != tile_cols) {
2229 c = tile_cols - 1;
2230
2231 data = tile_col_data_end[c - 1];
2232
2233 for (r = 0; r < tile_rows; ++r) {
2234 tile_buffers[r][c].col = c;
2235
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002236 get_ls_tile_buffer(tile_col_data_end[c], &pbi->common.error, &data,
2237 pbi->decrypt_cb, pbi->decrypt_state, tile_buffers,
2238 tile_size_bytes, c, r, tile_copy_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002239 }
2240 }
2241 }
2242}
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002243#endif // CONFIG_EXT_TILE
2244
Yaowu Xuc27fc142016-08-22 16:08:15 -07002245// Reads the next tile returning its size and adjusting '*data' accordingly
2246// based on 'is_last'.
2247static void get_tile_buffer(const uint8_t *const data_end,
2248 const int tile_size_bytes, int is_last,
Yaowu Xuf883b422016-08-30 14:01:10 -07002249 struct aom_internal_error_info *error_info,
2250 const uint8_t **data, aom_decrypt_cb decrypt_cb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002251 void *decrypt_state, TileBufferDec *const buf) {
2252 size_t size;
2253
2254 if (!is_last) {
Yaowu Xu0a79a1b2017-02-17 13:04:54 -08002255 if (!read_is_valid(*data, tile_size_bytes, data_end))
Yaowu Xuf883b422016-08-30 14:01:10 -07002256 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002257 "Truncated packet or corrupt tile length");
2258
2259 if (decrypt_cb) {
2260 uint8_t be_data[4];
2261 decrypt_cb(decrypt_state, *data, be_data, tile_size_bytes);
2262 size = mem_get_varsize(be_data, tile_size_bytes);
2263 } else {
2264 size = mem_get_varsize(*data, tile_size_bytes);
2265 }
2266 *data += tile_size_bytes;
2267
2268 if (size > (size_t)(data_end - *data))
Yaowu Xuf883b422016-08-30 14:01:10 -07002269 aom_internal_error(error_info, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002270 "Truncated packet or corrupt tile size");
2271 } else {
2272 size = data_end - *data;
2273 }
2274
2275 buf->data = *data;
2276 buf->size = size;
2277
2278 *data += size;
2279}
2280
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002281static void get_tile_buffers(AV1Decoder *pbi, const uint8_t *data,
2282 const uint8_t *data_end,
2283 TileBufferDec (*const tile_buffers)[MAX_TILE_COLS],
2284 int startTile, int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002285 AV1_COMMON *const cm = &pbi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07002286 int r, c;
2287 const int tile_cols = cm->tile_cols;
2288 const int tile_rows = cm->tile_rows;
2289 int tc = 0;
2290 int first_tile_in_tg = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002291 struct aom_read_bit_buffer rb_tg_hdr;
2292 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002293#if !CONFIG_OBU
James Zern6efba482017-04-20 20:53:49 -07002294 const size_t hdr_size = pbi->uncomp_hdr_size + pbi->first_partition_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002295 const int tg_size_bit_offset = pbi->tg_size_bit_offset;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002296#else
2297 const int tg_size_bit_offset = 0;
2298#endif
2299
Fangwen Fu73126c02017-02-08 22:37:47 -08002300#if CONFIG_DEPENDENT_HORZTILES
2301 int tile_group_start_col = 0;
2302 int tile_group_start_row = 0;
2303#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002304
Thomas Davies4822e142017-10-10 11:30:36 +01002305#if CONFIG_SIMPLE_BWD_ADAPT
2306 size_t max_tile_size = 0;
2307 cm->largest_tile_id = 0;
2308#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002309 for (r = 0; r < tile_rows; ++r) {
2310 for (c = 0; c < tile_cols; ++c, ++tc) {
Thomas Davies80188d12016-10-26 16:08:35 -07002311 TileBufferDec *const buf = &tile_buffers[r][c];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002312#if CONFIG_OBU
2313 const int is_last = (tc == endTile);
2314 const size_t hdr_offset = 0;
2315#else
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002316 const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
James Zern6efba482017-04-20 20:53:49 -07002317 const size_t hdr_offset = (tc && tc == first_tile_in_tg) ? hdr_size : 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002318#endif
2319
2320 if (tc < startTile || tc > endTile) continue;
Thomas Davies80188d12016-10-26 16:08:35 -07002321
Rupert Swarbrickcd757392017-09-01 13:57:53 +01002322 if (data + hdr_offset >= data_end)
2323 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2324 "Data ended before all tiles were read.");
Thomas Davies80188d12016-10-26 16:08:35 -07002325 buf->col = c;
2326 if (hdr_offset) {
2327 init_read_bit_buffer(pbi, &rb_tg_hdr, data, data_end, clear_data);
2328 rb_tg_hdr.bit_offset = tg_size_bit_offset;
David Barker1a191122017-09-06 15:24:16 +01002329 read_tile_group_range(pbi, &rb_tg_hdr);
Fangwen Fu73126c02017-02-08 22:37:47 -08002330#if CONFIG_DEPENDENT_HORZTILES
David Barker1a191122017-09-06 15:24:16 +01002331 tile_group_start_row = r;
2332 tile_group_start_col = c;
Fangwen Fu73126c02017-02-08 22:37:47 -08002333#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002334 }
2335 first_tile_in_tg += tc == first_tile_in_tg ? pbi->tg_size : 0;
2336 data += hdr_offset;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00002337 get_tile_buffer(data_end, pbi->tile_size_bytes, is_last,
2338 &pbi->common.error, &data, pbi->decrypt_cb,
2339 pbi->decrypt_state, buf);
Fangwen Fu73126c02017-02-08 22:37:47 -08002340#if CONFIG_DEPENDENT_HORZTILES
2341 cm->tile_group_start_row[r][c] = tile_group_start_row;
2342 cm->tile_group_start_col[r][c] = tile_group_start_col;
2343#endif
Thomas Davies4822e142017-10-10 11:30:36 +01002344#if CONFIG_SIMPLE_BWD_ADAPT
2345 if (buf->size > max_tile_size) {
2346 max_tile_size = buf->size;
2347 cm->largest_tile_id = r * tile_cols + c;
2348 }
2349#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002350 }
2351 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002352}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002353
David Barker5c06a642017-08-18 13:18:16 +01002354#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002355static void dec_setup_across_tile_boundary_info(
2356 const AV1_COMMON *const cm, const TileInfo *const tile_info) {
Frederic Barbier94e38562017-08-16 14:38:48 +02002357 if (tile_info->mi_row_start >= tile_info->mi_row_end ||
2358 tile_info->mi_col_start >= tile_info->mi_col_end)
2359 return;
2360
David Barker5c06a642017-08-18 13:18:16 +01002361 if (!cm->loop_filter_across_tiles_enabled) {
Yi Luo10e23002017-07-31 11:54:43 -07002362 av1_setup_across_tile_boundary_info(cm, tile_info);
2363 }
2364}
David Barker5c06a642017-08-18 13:18:16 +01002365#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002366
Yaowu Xuf883b422016-08-30 14:01:10 -07002367static const uint8_t *decode_tiles(AV1Decoder *pbi, const uint8_t *data,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002368 const uint8_t *data_end, int startTile,
2369 int endTile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002370 AV1_COMMON *const cm = &pbi->common;
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002371#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002372 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002373#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002374 const int tile_cols = cm->tile_cols;
2375 const int tile_rows = cm->tile_rows;
2376 const int n_tiles = tile_cols * tile_rows;
clang-format67948d32016-09-07 22:40:40 -07002377 TileBufferDec(*const tile_buffers)[MAX_TILE_COLS] = pbi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002378#if CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07002379 const int dec_tile_row = AOMMIN(pbi->dec_tile_row, tile_rows);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002380 const int single_row = pbi->dec_tile_row >= 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002381 const int dec_tile_col = AOMMIN(pbi->dec_tile_col, tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002382 const int single_col = pbi->dec_tile_col >= 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002383#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002384 int tile_rows_start;
2385 int tile_rows_end;
2386 int tile_cols_start;
2387 int tile_cols_end;
2388 int inv_col_order;
2389 int inv_row_order;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002390 int tile_row, tile_col;
2391
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002392#if CONFIG_EXT_TILE
2393 if (cm->large_scale_tile) {
2394 tile_rows_start = single_row ? dec_tile_row : 0;
2395 tile_rows_end = single_row ? dec_tile_row + 1 : tile_rows;
2396 tile_cols_start = single_col ? dec_tile_col : 0;
2397 tile_cols_end = single_col ? tile_cols_start + 1 : tile_cols;
2398 inv_col_order = pbi->inv_tile_order && !single_col;
2399 inv_row_order = pbi->inv_tile_order && !single_row;
2400 } else {
2401#endif // CONFIG_EXT_TILE
2402 tile_rows_start = 0;
2403 tile_rows_end = tile_rows;
2404 tile_cols_start = 0;
2405 tile_cols_end = tile_cols;
2406 inv_col_order = pbi->inv_tile_order;
2407 inv_row_order = pbi->inv_tile_order;
2408#if CONFIG_EXT_TILE
2409 }
2410#endif // CONFIG_EXT_TILE
2411
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002412#if !CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002413 if (cm->lf.filter_level && !cm->skip_loop_filter &&
2414 pbi->lf_worker.data1 == NULL) {
2415 CHECK_MEM_ERROR(cm, pbi->lf_worker.data1,
Yaowu Xuf883b422016-08-30 14:01:10 -07002416 aom_memalign(32, sizeof(LFWorkerData)));
2417 pbi->lf_worker.hook = (AVxWorkerHook)av1_loop_filter_worker;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002418 if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002419 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002420 "Loop filter thread creation failed");
2421 }
2422 }
2423
2424 if (cm->lf.filter_level && !cm->skip_loop_filter) {
2425 LFWorkerData *const lf_data = (LFWorkerData *)pbi->lf_worker.data1;
2426 // Be sure to sync as we might be resuming after a failed frame decode.
2427 winterface->sync(&pbi->lf_worker);
Yaowu Xuf883b422016-08-30 14:01:10 -07002428 av1_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm,
2429 pbi->mb.plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002430 }
Cheng Chen9ac7a0f2017-10-17 20:36:46 -07002431#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002432
2433 assert(tile_rows <= MAX_TILE_ROWS);
2434 assert(tile_cols <= MAX_TILE_COLS);
2435
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002436#if CONFIG_EXT_TILE
2437 if (cm->large_scale_tile)
2438 get_ls_tile_buffers(pbi, data, data_end, tile_buffers);
2439 else
2440#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002441 get_tile_buffers(pbi, data, data_end, tile_buffers, startTile, endTile);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002442
2443 if (pbi->tile_data == NULL || n_tiles != pbi->allocated_tiles) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002444 aom_free(pbi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002445 CHECK_MEM_ERROR(cm, pbi->tile_data,
Yaowu Xuf883b422016-08-30 14:01:10 -07002446 aom_memalign(32, n_tiles * (sizeof(*pbi->tile_data))));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002447 pbi->allocated_tiles = n_tiles;
2448 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002449#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002450 if (pbi->acct_enabled) {
2451 aom_accounting_reset(&pbi->accounting);
2452 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002453#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002454 // Load all tile information into tile_data.
2455 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2456 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2457 const TileBufferDec *const buf = &tile_buffers[tile_row][tile_col];
2458 TileData *const td = pbi->tile_data + tile_cols * tile_row + tile_col;
2459
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002460 if (tile_row * cm->tile_cols + tile_col < startTile ||
2461 tile_row * cm->tile_cols + tile_col > endTile)
2462 continue;
2463
Yaowu Xuc27fc142016-08-22 16:08:15 -07002464 td->cm = cm;
2465 td->xd = pbi->mb;
2466 td->xd.corrupted = 0;
2467 td->xd.counts =
2468 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD
2469 ? &cm->counts
2470 : NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -07002471 av1_zero(td->dqcoeff);
2472 av1_tile_init(&td->xd.tile, td->cm, tile_row, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002473 setup_bool_decoder(buf->data, data_end, buf->size, &cm->error,
Alex Converseeb780e72016-12-13 12:46:41 -08002474 &td->bit_reader,
2475#if CONFIG_ANS && ANS_MAX_SYMBOLS
2476 1 << cm->ans_window_size_log2,
2477#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
2478 pbi->decrypt_cb, pbi->decrypt_state);
Michael Bebenita6048d052016-08-25 14:40:54 -07002479#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002480 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002481 td->bit_reader.accounting = &pbi->accounting;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002482 } else {
David Barkerd971f402016-10-25 13:52:07 +01002483 td->bit_reader.accounting = NULL;
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002484 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002485#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002486 av1_init_macroblockd(cm, &td->xd,
Luc Trudeauf8164152017-04-11 16:20:51 -04002487#if CONFIG_CFL
2488 &td->cfl,
2489#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07002490 td->dqcoeff);
Yushin Choc49ef3a2017-03-13 17:27:25 -07002491
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00002492 // Initialise the tile context from the frame context
2493 td->tctx = *cm->fc;
2494 td->xd.tile_ctx = &td->tctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002495 td->xd.plane[0].color_index_map = td->color_index_map[0];
2496 td->xd.plane[1].color_index_map = td->color_index_map[1];
Sarah Parker5c6744b2017-08-25 17:27:45 -07002497#if CONFIG_MRC_TX
2498 td->xd.mrc_mask = td->mrc_mask;
2499#endif // CONFIG_MRC_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002500 }
2501 }
2502
2503 for (tile_row = tile_rows_start; tile_row < tile_rows_end; ++tile_row) {
2504 const int row = inv_row_order ? tile_rows - 1 - tile_row : tile_row;
2505 int mi_row = 0;
2506 TileInfo tile_info;
2507
Yaowu Xuf883b422016-08-30 14:01:10 -07002508 av1_tile_set_row(&tile_info, cm, row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002509
2510 for (tile_col = tile_cols_start; tile_col < tile_cols_end; ++tile_col) {
2511 const int col = inv_col_order ? tile_cols - 1 - tile_col : tile_col;
2512 TileData *const td = pbi->tile_data + tile_cols * row + col;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002513
2514 if (tile_row * cm->tile_cols + tile_col < startTile ||
2515 tile_row * cm->tile_cols + tile_col > endTile)
2516 continue;
2517
Michael Bebenita6048d052016-08-25 14:40:54 -07002518#if CONFIG_ACCOUNTING
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002519 if (pbi->acct_enabled) {
David Barkerd971f402016-10-25 13:52:07 +01002520 td->bit_reader.accounting->last_tell_frac =
2521 aom_reader_tell_frac(&td->bit_reader);
Nathan E. Eggeeb64fc22016-10-05 19:33:48 -04002522 }
Michael Bebenita6048d052016-08-25 14:40:54 -07002523#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002524
Yaowu Xuf883b422016-08-30 14:01:10 -07002525 av1_tile_set_col(&tile_info, cm, col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002527#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002528 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
2529 if (!cm->dependent_horz_tiles || tile_row == 0 ||
2530 tile_info.tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002531 av1_zero_above_context(cm, tile_info.mi_col_start,
2532 tile_info.mi_col_end);
2533 }
2534#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002535 av1_zero_above_context(cm, tile_info.mi_col_start, tile_info.mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002536#endif
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002537#if CONFIG_LOOP_RESTORATION
2538 for (int p = 0; p < MAX_MB_PLANE; ++p) {
2539 set_default_wiener(td->xd.wiener_info + p);
2540 set_default_sgrproj(td->xd.sgrproj_info + p);
2541 }
2542#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002543
David Barker5c06a642017-08-18 13:18:16 +01002544#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luo10e23002017-07-31 11:54:43 -07002545 dec_setup_across_tile_boundary_info(cm, &tile_info);
David Barker5c06a642017-08-18 13:18:16 +01002546#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yi Luof190a162017-07-13 16:16:56 -07002547
Yaowu Xuc27fc142016-08-22 16:08:15 -07002548 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
2549 mi_row += cm->mib_size) {
2550 int mi_col;
2551
Yaowu Xuf883b422016-08-30 14:01:10 -07002552 av1_zero_left_context(&td->xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002553
2554 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
2555 mi_col += cm->mib_size) {
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002556#if CONFIG_NCOBMC_ADAPT_WEIGHT
2557 alloc_ncobmc_pred_buffer(&td->xd);
2558 set_sb_mi_boundaries(cm, &td->xd, mi_row, mi_col);
2559#endif
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002560 decode_partition(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2561 cm->sb_size);
Wei-Ting Lin3122b7d2017-08-30 17:26:58 -07002562#if NC_MODE_INFO && CONFIG_MOTION_VAR
Yue Chen9ab6d712017-01-12 15:50:46 -08002563 detoken_and_recon_sb(pbi, &td->xd, mi_row, mi_col, &td->bit_reader,
2564 cm->sb_size);
2565#endif
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07002566#if CONFIG_NCOBMC_ADAPT_WEIGHT
2567 free_ncobmc_pred_buffer(&td->xd);
2568#endif
Cheng Chen5ad5b282017-10-05 16:36:06 -07002569#if CONFIG_LPF_SB
2570 if (USE_LOOP_FILTER_SUPERBLOCK) {
2571 // apply deblocking filtering right after each superblock is decoded
2572 const int guess_filter_lvl = FAKE_FILTER_LEVEL;
2573 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2574 guess_filter_lvl, 0, 1, mi_row, mi_col);
2575 }
2576#endif // CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07002577 }
Angie Chiangd0916d92017-03-10 17:54:18 -08002578 aom_merge_corrupted_flag(&pbi->mb.corrupted, td->xd.corrupted);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002579 if (pbi->mb.corrupted)
Yaowu Xuf883b422016-08-30 14:01:10 -07002580 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002581 "Failed to decode tile data");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002582 }
2583 }
2584
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002585#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002586 assert(mi_row > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002587#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002588
Yaowu Xuc27fc142016-08-22 16:08:15 -07002589 // After loopfiltering, the last 7 row pixels in each superblock row may
2590 // still be changed by the longest loopfilter of the next superblock row.
2591 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002592 av1_frameworker_broadcast(pbi->cur_buf, mi_row << cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002593 }
2594
Cheng Chen5ad5b282017-10-05 16:36:06 -07002595#if CONFIG_INTRABC
2596// When intraBC is on, do loop filtering per superblock,
2597// instead of do it after the whole frame has been encoded,
2598// as is in the else branch
2599#else
Cheng Chene94df5c2017-07-19 17:25:33 -07002600// Loopfilter the whole frame.
Cheng Chenf572cd32017-08-25 18:34:51 -07002601#if CONFIG_LPF_SB
2602 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2603 cm->lf.filter_level, 0, 0, 0, 0);
2604#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002605#if CONFIG_OBU
2606 if (endTile == cm->tile_rows * cm->tile_cols - 1)
2607#endif
David Barker3dffa272017-10-18 17:07:26 +01002608#if CONFIG_LOOPFILTER_LEVEL
2609 if (cm->lf.filter_level[0] || cm->lf.filter_level[1]) {
2610 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2611 cm->lf.filter_level[0], cm->lf.filter_level[1], 0,
2612 0);
2613 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2614 cm->lf.filter_level_u, cm->lf.filter_level_u, 1, 0);
2615 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2616 cm->lf.filter_level_v, cm->lf.filter_level_v, 2, 0);
2617 }
2618#else
2619 av1_loop_filter_frame(get_frame_new_buffer(cm), cm, &pbi->mb,
2620 cm->lf.filter_level, 0, 0);
Cheng Chen13fc8192017-08-19 11:49:28 -07002621#endif // CONFIG_LOOPFILTER_LEVEL
Cheng Chenf572cd32017-08-25 18:34:51 -07002622#endif // CONFIG_LPF_SB
Cheng Chen5ad5b282017-10-05 16:36:06 -07002623#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002624 if (cm->frame_parallel_decode)
Yaowu Xuf883b422016-08-30 14:01:10 -07002625 av1_frameworker_broadcast(pbi->cur_buf, INT_MAX);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002626
2627#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002628 if (cm->large_scale_tile) {
2629 if (n_tiles == 1) {
2630#if CONFIG_ANS
2631 return data_end;
2632#else
2633 // Find the end of the single tile buffer
2634 return aom_reader_find_end(&pbi->tile_data->bit_reader);
2635#endif // CONFIG_ANS
2636 } else {
2637 // Return the end of the last tile buffer
2638 return tile_buffers[tile_rows - 1][tile_cols - 1].raw_data_end;
2639 }
2640 } else {
2641#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002642#if CONFIG_ANS
2643 return data_end;
2644#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002645#if !CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07002646 {
2647 // Get last tile data.
2648 TileData *const td = pbi->tile_data + tile_cols * tile_rows - 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002649 return aom_reader_find_end(&td->bit_reader);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002650 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002651#else
2652 TileData *const td = pbi->tile_data + endTile;
2653 return aom_reader_find_end(&td->bit_reader);
2654#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655#endif // CONFIG_ANS
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002656#if CONFIG_EXT_TILE
2657 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002658#endif // CONFIG_EXT_TILE
2659}
2660
Yaowu Xuc27fc142016-08-22 16:08:15 -07002661static void error_handler(void *data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002662 AV1_COMMON *const cm = (AV1_COMMON *)data;
2663 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, "Truncated packet");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002664}
2665
Yaowu Xuf883b422016-08-30 14:01:10 -07002666static void read_bitdepth_colorspace_sampling(AV1_COMMON *cm,
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002667 struct aom_read_bit_buffer *rb,
2668 int allow_lowbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002669 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002670 cm->bit_depth = aom_rb_read_bit(rb) ? AOM_BITS_12 : AOM_BITS_10;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002671 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002672 cm->bit_depth = AOM_BITS_8;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002673 }
2674
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02002675#if CONFIG_HIGHBITDEPTH
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02002676 cm->use_highbitdepth = cm->bit_depth > AOM_BITS_8 || !allow_lowbitdepth;
James Zern91adea52017-06-15 23:27:26 -07002677#else
2678 (void)allow_lowbitdepth;
Sebastien Alaiwan98378132017-01-04 11:23:09 +01002679#endif
anorkin76fb1262017-03-22 15:12:12 -07002680#if CONFIG_COLORSPACE_HEADERS
2681 cm->color_space = aom_rb_read_literal(rb, 5);
2682 cm->transfer_function = aom_rb_read_literal(rb, 5);
2683#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002684 cm->color_space = aom_rb_read_literal(rb, 3);
anorkin76fb1262017-03-22 15:12:12 -07002685#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002686 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002687 // [16,235] (including xvycc) vs [0,255] range
Yaowu Xuf883b422016-08-30 14:01:10 -07002688 cm->color_range = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002689 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002690 cm->subsampling_x = aom_rb_read_bit(rb);
2691 cm->subsampling_y = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692 if (cm->subsampling_x == 1 && cm->subsampling_y == 1)
Yaowu Xuf883b422016-08-30 14:01:10 -07002693 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002694 "4:2:0 color not supported in profile 1 or 3");
Yaowu Xuf883b422016-08-30 14:01:10 -07002695 if (aom_rb_read_bit(rb))
2696 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002697 "Reserved bit set");
2698 } else {
2699 cm->subsampling_y = cm->subsampling_x = 1;
2700 }
anorkin76fb1262017-03-22 15:12:12 -07002701#if CONFIG_COLORSPACE_HEADERS
2702 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
2703 cm->chroma_sample_position = aom_rb_read_literal(rb, 2);
2704 }
2705#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002706 } else {
2707 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
2708 // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed.
2709 // 4:2:2 or 4:4:0 chroma sampling is not allowed.
2710 cm->subsampling_y = cm->subsampling_x = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002711 if (aom_rb_read_bit(rb))
2712 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002713 "Reserved bit set");
2714 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002715 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002716 "4:4:4 color not supported in profile 0 or 2");
2717 }
2718 }
2719}
2720
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002721#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002722void read_sequence_header(SequenceHeader *seq_params,
2723 struct aom_read_bit_buffer *rb) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002724 /* Placeholder for actually reading from the bitstream */
David Barker5e70a112017-10-03 14:28:17 +01002725 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
2726 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002727 // We must always have delta_frame_id_length < frame_id_length,
2728 // in order for a frame to be referenced with a unique delta.
2729 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002730 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002731 seq_params->frame_id_length =
2732 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
David Barker5e70a112017-10-03 14:28:17 +01002733 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002734}
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002735#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002736
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002737static void read_compound_tools(AV1_COMMON *cm,
2738 struct aom_read_bit_buffer *rb) {
2739 (void)cm;
2740 (void)rb;
2741#if CONFIG_INTERINTRA
2742 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
2743 cm->allow_interintra_compound = aom_rb_read_bit(rb);
2744 } else {
2745 cm->allow_interintra_compound = 0;
2746 }
2747#endif // CONFIG_INTERINTRA
2748#if CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
Zoe Liu85b66462017-04-20 14:28:19 -07002749#if CONFIG_COMPOUND_SINGLEREF
2750 if (!frame_is_intra_only(cm)) {
2751#else // !CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002752 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
Zoe Liu85b66462017-04-20 14:28:19 -07002753#endif // CONFIG_COMPOUND_SINGLEREF
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002754 cm->allow_masked_compound = aom_rb_read_bit(rb);
2755 } else {
2756 cm->allow_masked_compound = 0;
2757 }
2758#endif // CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
2759}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002760
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07002761#if CONFIG_VAR_REFS
2762static void check_valid_ref_frames(AV1_COMMON *cm) {
2763 MV_REFERENCE_FRAME ref_frame;
2764 // TODO(zoeliu): To handle ALTREF_FRAME the same way as do with other
2765 // reference frames: Current encoder invalid ALTREF when ALTREF
2766 // is the same as LAST, but invalid all the other references
2767 // when they are the same as ALTREF.
2768 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2769 RefBuffer *const ref_buf = &cm->frame_refs[ref_frame - LAST_FRAME];
2770
2771 if (ref_buf->idx != INVALID_IDX) {
2772 ref_buf->is_valid = 1;
2773
2774 MV_REFERENCE_FRAME ref;
2775 for (ref = LAST_FRAME; ref < ref_frame; ++ref) {
2776 RefBuffer *const buf = &cm->frame_refs[ref - LAST_FRAME];
2777 if (buf->is_valid && buf->idx == ref_buf->idx) {
2778 if (ref_frame != ALTREF_FRAME || ref == LAST_FRAME) {
2779 ref_buf->is_valid = 0;
2780 break;
2781 } else {
2782 buf->is_valid = 0;
2783 }
2784 }
2785 }
2786 } else {
2787 ref_buf->is_valid = 0;
2788 }
2789 }
2790}
2791#endif // CONFIG_VAR_REFS
2792
Sarah Parker3e579a62017-08-23 16:53:20 -07002793#if CONFIG_GLOBAL_MOTION
2794static int read_global_motion_params(WarpedMotionParams *params,
David Barkerd7c8bd52017-09-25 14:47:29 +01002795 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07002796 struct aom_read_bit_buffer *rb,
2797 int allow_hp) {
2798 TransformationType type = aom_rb_read_bit(rb);
2799 if (type != IDENTITY) {
2800#if GLOBAL_TRANS_TYPES > 4
2801 type += aom_rb_read_literal(rb, GLOBAL_TYPE_BITS);
2802#else
2803 if (aom_rb_read_bit(rb))
2804 type = ROTZOOM;
2805 else
2806 type = aom_rb_read_bit(rb) ? TRANSLATION : AFFINE;
2807#endif // GLOBAL_TRANS_TYPES > 4
2808 }
2809
2810 int trans_bits;
2811 int trans_dec_factor;
2812 int trans_prec_diff;
David Barkerd7c8bd52017-09-25 14:47:29 +01002813 *params = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002814 params->wmtype = type;
2815 switch (type) {
Sarah Parker3e579a62017-08-23 16:53:20 -07002816 case AFFINE:
2817 case ROTZOOM:
2818 params->wmmat[2] = aom_rb_read_signed_primitive_refsubexpfin(
2819 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2820 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2821 (1 << GM_ALPHA_PREC_BITS)) *
2822 GM_ALPHA_DECODE_FACTOR +
2823 (1 << WARPEDMODEL_PREC_BITS);
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002824 params->wmmat[3] = aom_rb_read_signed_primitive_refsubexpfin(
2825 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2826 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF)) *
2827 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002828 if (type >= AFFINE) {
Debargha Mukherjee1a2b35f2017-10-21 10:41:46 -07002829 params->wmmat[4] = aom_rb_read_signed_primitive_refsubexpfin(
2830 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2831 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF)) *
2832 GM_ALPHA_DECODE_FACTOR;
Sarah Parker3e579a62017-08-23 16:53:20 -07002833 params->wmmat[5] = aom_rb_read_signed_primitive_refsubexpfin(
2834 rb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2835 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2836 (1 << GM_ALPHA_PREC_BITS)) *
2837 GM_ALPHA_DECODE_FACTOR +
2838 (1 << WARPEDMODEL_PREC_BITS);
2839 } else {
2840 params->wmmat[4] = -params->wmmat[3];
2841 params->wmmat[5] = params->wmmat[2];
2842 }
2843 // fallthrough intended
2844 case TRANSLATION:
2845 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2846 : GM_ABS_TRANS_BITS;
2847 trans_dec_factor = (type == TRANSLATION)
2848 ? GM_TRANS_ONLY_DECODE_FACTOR * (1 << !allow_hp)
2849 : GM_TRANS_DECODE_FACTOR;
2850 trans_prec_diff = (type == TRANSLATION)
2851 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2852 : GM_TRANS_PREC_DIFF;
2853 params->wmmat[0] = aom_rb_read_signed_primitive_refsubexpfin(
2854 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2855 (ref_params->wmmat[0] >> trans_prec_diff)) *
2856 trans_dec_factor;
2857 params->wmmat[1] = aom_rb_read_signed_primitive_refsubexpfin(
2858 rb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2859 (ref_params->wmmat[1] >> trans_prec_diff)) *
2860 trans_dec_factor;
2861 case IDENTITY: break;
2862 default: assert(0);
2863 }
2864 if (params->wmtype <= AFFINE) {
2865 int good_shear_params = get_shear_params(params);
2866 if (!good_shear_params) return 0;
2867 }
2868
2869 return 1;
2870}
2871
2872static void read_global_motion(AV1_COMMON *cm, struct aom_read_bit_buffer *rb) {
2873 int frame;
2874 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002875 const WarpedMotionParams *ref_params =
2876 cm->error_resilient_mode ? &default_warp_params
2877 : &cm->prev_frame->global_motion[frame];
Sarah Parker3e579a62017-08-23 16:53:20 -07002878 int good_params = read_global_motion_params(
David Barkerd7c8bd52017-09-25 14:47:29 +01002879 &cm->global_motion[frame], ref_params, rb, cm->allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002880 if (!good_params)
2881 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2882 "Invalid shear parameters for global motion.");
2883
2884 // TODO(sarahparker, debargha): The logic in the commented out code below
2885 // does not work currently and causes mismatches when resize is on. Fix it
2886 // before turning the optimization back on.
2887 /*
2888 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame(cm, frame);
2889 if (cm->width == ref_buf->y_crop_width &&
2890 cm->height == ref_buf->y_crop_height) {
2891 read_global_motion_params(&cm->global_motion[frame],
2892 &cm->prev_frame->global_motion[frame], rb,
2893 cm->allow_high_precision_mv);
2894 } else {
David Barkerd7c8bd52017-09-25 14:47:29 +01002895 cm->global_motion[frame] = default_warp_params;
Sarah Parker3e579a62017-08-23 16:53:20 -07002896 }
2897 */
2898 /*
2899 printf("Dec Ref %d [%d/%d]: %d %d %d %d\n",
2900 frame, cm->current_video_frame, cm->show_frame,
2901 cm->global_motion[frame].wmmat[0],
2902 cm->global_motion[frame].wmmat[1],
2903 cm->global_motion[frame].wmmat[2],
2904 cm->global_motion[frame].wmmat[3]);
2905 */
2906 }
David Barkercba7da72017-09-14 11:24:27 +01002907 memcpy(cm->cur_frame->global_motion, cm->global_motion,
2908 TOTAL_REFS_PER_FRAME * sizeof(WarpedMotionParams));
Sarah Parker3e579a62017-08-23 16:53:20 -07002909}
2910#endif // CONFIG_GLOBAL_MOTION
2911
Yaowu Xuf883b422016-08-30 14:01:10 -07002912static size_t read_uncompressed_header(AV1Decoder *pbi,
2913 struct aom_read_bit_buffer *rb) {
2914 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002915 MACROBLOCKD *const xd = &pbi->mb;
2916 BufferPool *const pool = cm->buffer_pool;
2917 RefCntBuffer *const frame_bufs = pool->frame_bufs;
2918 int i, mask, ref_index = 0;
2919 size_t sz;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002920
Yaowu Xuc27fc142016-08-22 16:08:15 -07002921 cm->last_frame_type = cm->frame_type;
2922 cm->last_intra_only = cm->intra_only;
2923
Yaowu Xuc27fc142016-08-22 16:08:15 -07002924 // NOTE: By default all coded frames to be used as a reference
2925 cm->is_reference_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002926
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002927#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002928 if (aom_rb_read_literal(rb, 2) != AOM_FRAME_MARKER)
2929 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002930 "Invalid frame marker");
2931
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 cm->profile = av1_read_profile(rb);
Sebastien Alaiwanb9c652a2017-05-03 15:44:28 +02002933
2934 const BITSTREAM_PROFILE MAX_SUPPORTED_PROFILE =
2935 CONFIG_HIGHBITDEPTH ? MAX_PROFILES : PROFILE_2;
2936
2937 if (cm->profile >= MAX_SUPPORTED_PROFILE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002938 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002939 "Unsupported bitstream profile");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002940#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941
Yunqing Wangc2502b52017-07-19 17:44:18 -07002942#if CONFIG_EXT_TILE
2943 cm->large_scale_tile = aom_rb_read_literal(rb, 1);
2944#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002945 if (cm->large_scale_tile) cm->seq_params.frame_id_numbers_present_flag = 0;
Yunqing Wangc2502b52017-07-19 17:44:18 -07002946#endif // CONFIG_REFERENCE_BUFFER
2947#endif // CONFIG_EXT_TILE
2948
Yaowu Xuf883b422016-08-30 14:01:10 -07002949 cm->show_existing_frame = aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002950
2951 if (cm->show_existing_frame) {
Yaowu Xu415ba932016-12-27 11:17:32 -08002952 // Show an existing frame directly.
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01002953 const int existing_frame_idx = aom_rb_read_literal(rb, 3);
2954 const int frame_to_show = cm->ref_frame_map[existing_frame_idx];
Yaowu Xu415ba932016-12-27 11:17:32 -08002955#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01002956 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002957 int frame_id_length = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002958 int display_frame_id = aom_rb_read_literal(rb, frame_id_length);
2959 /* Compare display_frame_id with ref_frame_id and check valid for
2960 * referencing */
2961 if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
2962 cm->valid_for_referencing[existing_frame_idx] == 0)
2963 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2964 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002965 }
2966#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002967 lock_buffer_pool(pool);
2968 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
2969 unlock_buffer_pool(pool);
Yaowu Xuf883b422016-08-30 14:01:10 -07002970 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002971 "Buffer %d does not contain a decoded frame",
2972 frame_to_show);
2973 }
2974 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2975 unlock_buffer_pool(pool);
2976
Cheng Chen13fc8192017-08-19 11:49:28 -07002977#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002978 cm->lf.filter_level[0] = 0;
2979 cm->lf.filter_level[1] = 0;
2980#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002981 cm->lf.filter_level = 0;
Cheng Chen179479f2017-08-04 10:56:39 -07002982#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002983 cm->show_frame = 1;
2984 pbi->refresh_frame_flags = 0;
2985
2986 if (cm->frame_parallel_decode) {
2987 for (i = 0; i < REF_FRAMES; ++i)
2988 cm->next_ref_frame_map[i] = cm->ref_frame_map[i];
2989 }
2990
2991 return 0;
2992 }
2993
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002994#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002995 cm->frame_type = (FRAME_TYPE)aom_rb_read_bit(rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07002996 cm->show_frame = aom_rb_read_bit(rb);
2997 if (cm->frame_type != KEY_FRAME)
2998 cm->intra_only = cm->show_frame ? 0 : aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002999#else
3000 cm->frame_type = (FRAME_TYPE)aom_rb_read_literal(rb, 2); // 2 bits
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003001 cm->show_frame = aom_rb_read_bit(rb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003002 cm->intra_only = cm->frame_type == INTRA_ONLY_FRAME;
3003#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003004 cm->error_resilient_mode = aom_rb_read_bit(rb);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003005#if CONFIG_REFERENCE_BUFFER
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003006#if !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01003007 if (frame_is_intra_only(cm)) read_sequence_header(&cm->seq_params, rb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003008#endif // !CONFIG_OBU
David Barker5e70a112017-10-03 14:28:17 +01003009 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003010 int frame_id_length = cm->seq_params.frame_id_length;
3011 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003012 int prev_frame_id = 0;
3013 if (cm->frame_type != KEY_FRAME) {
3014 prev_frame_id = cm->current_frame_id;
3015 }
3016 cm->current_frame_id = aom_rb_read_literal(rb, frame_id_length);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003017
David Barker5e70a112017-10-03 14:28:17 +01003018 if (cm->frame_type != KEY_FRAME) {
3019 int diff_frame_id;
3020 if (cm->current_frame_id > prev_frame_id) {
3021 diff_frame_id = cm->current_frame_id - prev_frame_id;
3022 } else {
3023 diff_frame_id =
3024 (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003025 }
David Barker5e70a112017-10-03 14:28:17 +01003026 /* Check current_frame_id for conformance */
3027 if (prev_frame_id == cm->current_frame_id ||
3028 diff_frame_id >= (1 << (frame_id_length - 1))) {
3029 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3030 "Invalid value of current_frame_id");
3031 }
3032 }
3033 /* Check if some frames need to be marked as not valid for referencing */
3034 for (i = 0; i < REF_FRAMES; i++) {
3035 if (cm->frame_type == KEY_FRAME) {
3036 cm->valid_for_referencing[i] = 0;
3037 } else if (cm->current_frame_id - (1 << diff_len) > 0) {
3038 if (cm->ref_frame_id[i] > cm->current_frame_id ||
3039 cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003040 cm->valid_for_referencing[i] = 0;
David Barker5e70a112017-10-03 14:28:17 +01003041 } else {
3042 if (cm->ref_frame_id[i] > cm->current_frame_id &&
3043 cm->ref_frame_id[i] <
3044 (1 << frame_id_length) + cm->current_frame_id - (1 << diff_len))
3045 cm->valid_for_referencing[i] = 0;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003046 }
3047 }
3048 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003049#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003050 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003051#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02003052 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003053#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003054 pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1;
3055
3056 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3057 cm->frame_refs[i].idx = INVALID_IDX;
3058 cm->frame_refs[i].buf = NULL;
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07003059#if CONFIG_VAR_REFS
3060 cm->frame_refs[i].is_valid = 0;
3061#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003062 }
3063
3064 setup_frame_size(cm, rb);
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003065 setup_sb_size(cm, rb);
3066
Yaowu Xuc27fc142016-08-22 16:08:15 -07003067 if (pbi->need_resync) {
3068 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3069 pbi->need_resync = 0;
3070 }
Alex Converseeb780e72016-12-13 12:46:41 -08003071#if CONFIG_ANS && ANS_MAX_SYMBOLS
3072 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
3073#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
hui su24f7b072016-10-12 11:36:24 -07003074 cm->allow_screen_content_tools = aom_rb_read_bit(rb);
RogerZhou3b635242017-09-19 10:06:46 -07003075#if CONFIG_AMVR
3076 if (cm->allow_screen_content_tools) {
3077 if (aom_rb_read_bit(rb)) {
3078 cm->seq_mv_precision_level = 2;
3079 } else {
3080 cm->seq_mv_precision_level = aom_rb_read_bit(rb) ? 0 : 1;
3081 }
3082 } else {
3083 cm->seq_mv_precision_level = 0;
3084 }
3085#endif
Fangwen Fu930c51c2017-05-07 20:39:17 -07003086#if CONFIG_TEMPMV_SIGNALING
3087 cm->use_prev_frame_mvs = 0;
3088#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003089 } else {
hui su24f7b072016-10-12 11:36:24 -07003090 if (cm->intra_only) cm->allow_screen_content_tools = aom_rb_read_bit(rb);
Thomas Daedea6a854b2017-06-22 17:49:11 -07003091#if CONFIG_TEMPMV_SIGNALING
3092 if (cm->intra_only || cm->error_resilient_mode) cm->use_prev_frame_mvs = 0;
3093#endif
3094#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3095// The only way to reset all frame contexts to their default values is with a
3096// keyframe.
3097#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003098 if (cm->error_resilient_mode) {
3099 cm->reset_frame_context = RESET_FRAME_CONTEXT_ALL;
3100 } else {
3101 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003102 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003103 ? RESET_FRAME_CONTEXT_ALL
3104 : RESET_FRAME_CONTEXT_CURRENT;
3105 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003106 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003107 ? RESET_FRAME_CONTEXT_CURRENT
3108 : RESET_FRAME_CONTEXT_NONE;
3109 if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT)
Yaowu Xuf883b422016-08-30 14:01:10 -07003110 cm->reset_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003111 ? RESET_FRAME_CONTEXT_ALL
3112 : RESET_FRAME_CONTEXT_CURRENT;
3113 }
3114 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07003115#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003116
3117 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003118#if !CONFIG_OBU
Sebastien Alaiwan8b7a4e12017-06-13 11:25:57 +02003119 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003120#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003121
Yaowu Xuf883b422016-08-30 14:01:10 -07003122 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003123 setup_frame_size(cm, rb);
Pavel Frolovea3dd3a2017-09-25 16:06:19 +03003124 setup_sb_size(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003125 if (pbi->need_resync) {
3126 memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map));
3127 pbi->need_resync = 0;
3128 }
Alex Converseeb780e72016-12-13 12:46:41 -08003129#if CONFIG_ANS && ANS_MAX_SYMBOLS
3130 cm->ans_window_size_log2 = aom_rb_read_literal(rb, 4) + 8;
3131#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003132 } else if (pbi->need_resync != 1) { /* Skip if need resync */
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003133#if CONFIG_OBU
3134 pbi->refresh_frame_flags = (cm->frame_type == S_FRAME)
3135 ? ~(1 << REF_FRAMES)
3136 : aom_rb_read_literal(rb, REF_FRAMES);
3137#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003138 pbi->refresh_frame_flags = aom_rb_read_literal(rb, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003139#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003140
Yaowu Xuc27fc142016-08-22 16:08:15 -07003141 if (!pbi->refresh_frame_flags) {
3142 // NOTE: "pbi->refresh_frame_flags == 0" indicates that the coded frame
3143 // will not be used as a reference
3144 cm->is_reference_frame = 0;
3145 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003146
3147 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003148 const int ref = aom_rb_read_literal(rb, REF_FRAMES_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003149 const int idx = cm->ref_frame_map[ref];
Rupert Swarbrick5eb471c2017-10-02 16:06:54 +01003150
3151 // Most of the time, streams start with a keyframe. In that case,
3152 // ref_frame_map will have been filled in at that point and will not
3153 // contain any -1's. However, streams are explicitly allowed to start
3154 // with an intra-only frame, so long as they don't then signal a
3155 // reference to a slot that hasn't been set yet. That's what we are
3156 // checking here.
3157 if (idx == -1)
3158 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3159 "Inter frame requests nonexistent reference");
3160
Yaowu Xuc27fc142016-08-22 16:08:15 -07003161 RefBuffer *const ref_frame = &cm->frame_refs[i];
3162 ref_frame->idx = idx;
3163 ref_frame->buf = &frame_bufs[idx].buf;
Zoe Liu17af2742017-10-06 10:36:42 -07003164#if CONFIG_FRAME_SIGN_BIAS
3165#if CONFIG_OBU
3166 // NOTE: For the scenario of (cm->frame_type != S_FRAME),
3167 // ref_frame_sign_bias will be reset based on frame offsets.
3168 cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
3169#endif // CONFIG_OBU
3170#else // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003171#if CONFIG_OBU
3172 cm->ref_frame_sign_bias[LAST_FRAME + i] =
3173 (cm->frame_type == S_FRAME) ? 0 : aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003174#else // !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07003175 cm->ref_frame_sign_bias[LAST_FRAME + i] = aom_rb_read_bit(rb);
Zoe Liu17af2742017-10-06 10:36:42 -07003176#endif // CONFIG_OBU
3177#endif // CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003178#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003179 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003180 int frame_id_length = cm->seq_params.frame_id_length;
3181 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003182 int delta_frame_id_minus1 = aom_rb_read_literal(rb, diff_len);
3183 int ref_frame_id =
3184 ((cm->current_frame_id - (delta_frame_id_minus1 + 1) +
3185 (1 << frame_id_length)) %
3186 (1 << frame_id_length));
3187 /* Compare values derived from delta_frame_id_minus1 and
3188 * refresh_frame_flags. Also, check valid for referencing */
3189 if (ref_frame_id != cm->ref_frame_id[ref] ||
3190 cm->valid_for_referencing[ref] == 0)
3191 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3192 "Reference buffer frame ID mismatch");
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003193 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003194#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003195 }
3196
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07003197#if CONFIG_VAR_REFS
3198 check_valid_ref_frames(cm);
3199#endif // CONFIG_VAR_REFS
3200
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003201#if CONFIG_FRAME_SIZE
3202 if (cm->error_resilient_mode == 0) {
3203 setup_frame_size_with_refs(cm, rb);
3204 } else {
3205 setup_frame_size(cm, rb);
3206 }
3207#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003208 setup_frame_size_with_refs(cm, rb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003209#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003210
RogerZhou3b635242017-09-19 10:06:46 -07003211#if CONFIG_AMVR
3212 if (cm->seq_mv_precision_level == 2) {
3213 cm->cur_frame_mv_precision_level = aom_rb_read_bit(rb) ? 0 : 1;
3214 } else {
3215 cm->cur_frame_mv_precision_level = cm->seq_mv_precision_level;
3216 }
3217#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003218 cm->allow_high_precision_mv = aom_rb_read_bit(rb);
Angie Chiang5678ad92016-11-21 09:38:40 -08003219 cm->interp_filter = read_frame_interp_filter(rb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003220#if CONFIG_TEMPMV_SIGNALING
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003221 if (frame_might_use_prev_frame_mvs(cm))
Fangwen Fu8d164de2016-12-14 13:40:54 -08003222 cm->use_prev_frame_mvs = aom_rb_read_bit(rb);
Rupert Swarbrick1f990a62017-07-11 11:09:33 +01003223 else
3224 cm->use_prev_frame_mvs = 0;
Fangwen Fu8d164de2016-12-14 13:40:54 -08003225#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003226 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3227 RefBuffer *const ref_buf = &cm->frame_refs[i];
Sebastien Alaiwan71e87842017-04-12 16:03:28 +02003228#if CONFIG_HIGHBITDEPTH
Yaowu Xuf883b422016-08-30 14:01:10 -07003229 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003230 &ref_buf->sf, ref_buf->buf->y_crop_width,
3231 ref_buf->buf->y_crop_height, cm->width, cm->height,
3232 cm->use_highbitdepth);
3233#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003234 av1_setup_scale_factors_for_frame(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003235 &ref_buf->sf, ref_buf->buf->y_crop_width,
3236 ref_buf->buf->y_crop_height, cm->width, cm->height);
3237#endif
3238 }
3239 }
3240 }
Jingning Hanc723b342017-08-24 11:19:46 -07003241
Jingning Hanea255c92017-09-29 08:12:09 -07003242#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003243 if (cm->show_frame == 0) {
3244 cm->frame_offset = cm->current_video_frame + aom_rb_read_literal(rb, 4);
3245 } else {
3246 cm->frame_offset = cm->current_video_frame;
3247 }
Zoe Liu17af2742017-10-06 10:36:42 -07003248 av1_setup_frame_buf_refs(cm);
3249
3250#if CONFIG_FRAME_SIGN_BIAS
3251#if CONFIG_OBU
3252 if (cm->frame_type != S_FRAME)
3253#endif // CONFIG_OBU
3254 av1_setup_frame_sign_bias(cm);
Zoe Liu17af2742017-10-06 10:36:42 -07003255#endif // CONFIG_FRAME_SIGN_BIAS
3256#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003257
Fangwen Fu8d164de2016-12-14 13:40:54 -08003258#if CONFIG_TEMPMV_SIGNALING
3259 cm->cur_frame->intra_only = cm->frame_type == KEY_FRAME || cm->intra_only;
3260#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003261
3262#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003263 if (cm->seq_params.frame_id_numbers_present_flag) {
3264 /* If bitmask is set, update reference frame id values and
3265 mark frames as valid for reference */
3266 int refresh_frame_flags =
3267 cm->frame_type == KEY_FRAME ? 0xFF : pbi->refresh_frame_flags;
3268 for (i = 0; i < REF_FRAMES; i++) {
3269 if ((refresh_frame_flags >> i) & 1) {
3270 cm->ref_frame_id[i] = cm->current_frame_id;
3271 cm->valid_for_referencing[i] = 1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003272 }
3273 }
3274 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003275#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003276
Yaowu Xuc27fc142016-08-22 16:08:15 -07003277 get_frame_new_buffer(cm)->bit_depth = cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003278 get_frame_new_buffer(cm)->color_space = cm->color_space;
anorkin76fb1262017-03-22 15:12:12 -07003279#if CONFIG_COLORSPACE_HEADERS
3280 get_frame_new_buffer(cm)->transfer_function = cm->transfer_function;
3281 get_frame_new_buffer(cm)->chroma_sample_position = cm->chroma_sample_position;
3282#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003283 get_frame_new_buffer(cm)->color_range = cm->color_range;
3284 get_frame_new_buffer(cm)->render_width = cm->render_width;
3285 get_frame_new_buffer(cm)->render_height = cm->render_height;
3286
3287 if (pbi->need_resync) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003288 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003289 "Keyframe / intra-only frame required to reset decoder"
3290 " state");
3291 }
3292
3293 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003294 cm->refresh_frame_context = aom_rb_read_bit(rb)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003295 ? REFRESH_FRAME_CONTEXT_FORWARD
3296 : REFRESH_FRAME_CONTEXT_BACKWARD;
3297 } else {
3298 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_FORWARD;
3299 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003300#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003301 // This flag will be overridden by the call to av1_setup_past_independence
Yaowu Xuc27fc142016-08-22 16:08:15 -07003302 // below, forcing the use of context 0 for those frame types.
Yaowu Xuf883b422016-08-30 14:01:10 -07003303 cm->frame_context_idx = aom_rb_read_literal(rb, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003304#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003305
3306 // Generate next_ref_frame_map.
3307 lock_buffer_pool(pool);
3308 for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) {
3309 if (mask & 1) {
3310 cm->next_ref_frame_map[ref_index] = cm->new_fb_idx;
3311 ++frame_bufs[cm->new_fb_idx].ref_count;
3312 } else {
3313 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3314 }
3315 // Current thread holds the reference frame.
3316 if (cm->ref_frame_map[ref_index] >= 0)
3317 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3318 ++ref_index;
3319 }
3320
3321 for (; ref_index < REF_FRAMES; ++ref_index) {
3322 cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
3323
3324 // Current thread holds the reference frame.
3325 if (cm->ref_frame_map[ref_index] >= 0)
3326 ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count;
3327 }
3328 unlock_buffer_pool(pool);
3329 pbi->hold_ref_buf = 1;
3330
3331 if (frame_is_intra_only(cm) || cm->error_resilient_mode)
Yaowu Xuf883b422016-08-30 14:01:10 -07003332 av1_setup_past_independence(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003333
Yaowu Xuc27fc142016-08-22 16:08:15 -07003334 setup_loopfilter(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003335 setup_quantization(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003336 xd->bd = (int)cm->bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003337
hui su0d103572017-03-01 17:58:01 -08003338#if CONFIG_Q_ADAPT_PROBS
Yaowu Xuf883b422016-08-30 14:01:10 -07003339 av1_default_coef_probs(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003340 if (cm->frame_type == KEY_FRAME || cm->error_resilient_mode ||
3341 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL) {
3342 for (i = 0; i < FRAME_CONTEXTS; ++i) cm->frame_contexts[i] = *cm->fc;
3343 } else if (cm->reset_frame_context == RESET_FRAME_CONTEXT_CURRENT) {
Thomas Daededa4d8b92017-06-05 15:44:14 -07003344#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3345 if (cm->frame_refs[0].idx <= 0) {
3346 cm->frame_contexts[cm->frame_refs[0].idx] = *cm->fc;
3347 }
3348#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003349 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
Thomas Daededa4d8b92017-06-05 15:44:14 -07003350#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003351 }
hui su0d103572017-03-01 17:58:01 -08003352#endif // CONFIG_Q_ADAPT_PROBS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003353
3354 setup_segmentation(cm, rb);
3355
Arild Fuldseth07441162016-08-15 15:07:52 +02003356 {
Thomas Davies28444be2017-10-13 18:12:25 +01003357 int delta_q_allowed = 1;
3358#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003359 struct segmentation *const seg = &cm->seg;
3360 int segment_quantizer_active = 0;
3361 for (i = 0; i < MAX_SEGMENTS; i++) {
3362 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3363 segment_quantizer_active = 1;
3364 }
3365 }
Thomas Davies28444be2017-10-13 18:12:25 +01003366 delta_q_allowed = !segment_quantizer_active;
3367#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003368
Thomas Daviesf6936102016-09-05 16:51:31 +01003369 cm->delta_q_res = 1;
Fangwen Fu231fe422017-04-24 17:52:29 -07003370#if CONFIG_EXT_DELTA_Q
3371 cm->delta_lf_res = 1;
Jonathan Matthewsa48b1e62017-09-01 14:58:47 +01003372 cm->delta_lf_present_flag = 0;
Cheng Chen880166a2017-10-02 17:48:48 -07003373#if CONFIG_LOOPFILTER_LEVEL
3374 cm->delta_lf_multi = 0;
3375#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003376#endif
Thomas Davies28444be2017-10-13 18:12:25 +01003377 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003378 cm->delta_q_present_flag = aom_rb_read_bit(rb);
3379 } else {
3380 cm->delta_q_present_flag = 0;
3381 }
3382 if (cm->delta_q_present_flag) {
3383 xd->prev_qindex = cm->base_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +01003384 cm->delta_q_res = 1 << aom_rb_read_literal(rb, 2);
Fangwen Fu231fe422017-04-24 17:52:29 -07003385#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003386 cm->delta_lf_present_flag = aom_rb_read_bit(rb);
3387 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07003388 xd->prev_delta_lf_from_base = 0;
3389 cm->delta_lf_res = 1 << aom_rb_read_literal(rb, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07003390#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003391 cm->delta_lf_multi = aom_rb_read_bit(rb);
Cheng Chena97394f2017-09-27 15:05:14 -07003392 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3393 xd->prev_delta_lf[lf_id] = 0;
3394#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003395 }
3396#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003397 }
3398 }
RogerZhou3b635242017-09-19 10:06:46 -07003399#if CONFIG_AMVR
3400 xd->cur_frame_mv_precision_level = cm->cur_frame_mv_precision_level;
3401#endif
Thomas Davies3ab20b42017-09-19 10:30:53 +01003402
Urvang Joshi454280d2016-10-14 16:51:44 -07003403 for (i = 0; i < MAX_SEGMENTS; ++i) {
3404 const int qindex = cm->seg.enabled
3405 ? av1_get_qindex(&cm->seg, i, cm->base_qindex)
3406 : cm->base_qindex;
3407 xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
3408 cm->uv_dc_delta_q == 0 && cm->uv_ac_delta_q == 0;
3409 xd->qindex[i] = qindex;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003410 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003411 cm->all_lossless = all_lossless(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003412 setup_segmentation_dequant(cm);
Thomas Daedef636d5c2017-06-29 13:48:27 -07003413#if CONFIG_CDEF
3414 if (!cm->all_lossless) {
3415 setup_cdef(cm, rb);
3416 }
3417#endif
3418#if CONFIG_LOOP_RESTORATION
3419 decode_restoration_mode(cm, rb);
3420#endif // CONFIG_LOOP_RESTORATION
3421 cm->tx_mode = read_tx_mode(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003422 cm->reference_mode = read_frame_reference_mode(cm, rb);
Debargha Mukherjee6f3c8982017-09-22 21:14:01 -07003423 if (cm->reference_mode != SINGLE_REFERENCE) setup_compound_reference_mode(cm);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003424 read_compound_tools(cm, rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003425
Sarah Parkere68a3e42017-02-16 14:03:24 -08003426#if CONFIG_EXT_TX
3427 cm->reduced_tx_set_used = aom_rb_read_bit(rb);
3428#endif // CONFIG_EXT_TX
3429
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003430#if CONFIG_ADAPT_SCAN
3431 cm->use_adapt_scan = aom_rb_read_bit(rb);
3432 // TODO(angiebird): call av1_init_scan_order only when use_adapt_scan
3433 // switches from 1 to 0
3434 if (cm->use_adapt_scan == 0) av1_init_scan_order(cm);
3435#endif // CONFIG_ADAPT_SCAN
3436
Pavel Frolov57c36e12017-09-12 15:00:40 +03003437 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3438 // show_exisiting_frame=1, nor can it take a frame not used as
3439 // a reference, it is probable that by the time it is being
3440 // referred to, the frame buffer it originally points to may
3441 // already get expired and have been reassigned to the current
3442 // newly coded frame. Hence, we need to check whether this is
3443 // the case, and if yes, we have 2 choices:
3444 // (1) Simply disable the use of previous frame mvs; or
3445 // (2) Have cm->prev_frame point to one reference frame buffer,
3446 // e.g. LAST_FRAME.
3447 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3448 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3449 cm->prev_frame =
3450 cm->frame_refs[LAST_FRAME - LAST_FRAME].idx != INVALID_IDX
3451 ? &cm->buffer_pool
3452 ->frame_bufs[cm->frame_refs[LAST_FRAME - LAST_FRAME].idx]
3453 : NULL;
3454 }
Pavel Frolov57c36e12017-09-12 15:00:40 +03003455
3456#if CONFIG_TEMPMV_SIGNALING
3457 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3458 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3459 "Frame wrongly requests previous frame MVs");
3460 }
3461#else
3462 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3463#if CONFIG_FRAME_SUPERRES
3464 cm->width == cm->last_width &&
3465 cm->height == cm->last_height &&
3466#else
3467 cm->width == cm->prev_frame->buf.y_crop_width &&
3468 cm->height == cm->prev_frame->buf.y_crop_height &&
3469#endif // CONFIG_FRAME_SUPERRES
3470 !cm->last_intra_only && cm->last_show_frame &&
3471 (cm->last_frame_type != KEY_FRAME);
3472#endif // CONFIG_TEMPMV_SIGNALING
3473
Sarah Parker3e579a62017-08-23 16:53:20 -07003474#if CONFIG_GLOBAL_MOTION
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003475 if (!frame_is_intra_only(cm)) read_global_motion(cm, rb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003476#endif
3477
Yaowu Xuc27fc142016-08-22 16:08:15 -07003478 read_tile_info(pbi, rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003479 if (use_compressed_header(cm)) {
3480 sz = aom_rb_read_literal(rb, 16);
3481 if (sz == 0)
3482 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3483 "Invalid header size");
3484 } else {
3485 sz = 0;
3486 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003487 return sz;
3488}
3489
Yaowu Xuf883b422016-08-30 14:01:10 -07003490static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003491 size_t partition_size) {
Thomas Davies2e868ab2017-10-24 10:42:27 +01003492#if CONFIG_NEW_MULTISYMBOL
Thomas Daviese7154832017-10-03 10:12:17 +01003493 (void)pbi;
3494 (void)data;
3495 (void)partition_size;
3496 return 0;
3497#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003498 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07003499 aom_reader r;
Ryanf0e39192017-10-09 09:45:13 -07003500
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003501#if ((CONFIG_RECT_TX_EXT) || (!CONFIG_NEW_MULTISYMBOL || CONFIG_LV_MAP) || \
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02003502 (CONFIG_COMPOUND_SINGLEREF))
Thomas Davies599395e2017-07-21 18:02:48 +01003503 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies599395e2017-07-21 18:02:48 +01003504#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003505
Alex Converse2cdf0d82016-12-13 13:53:09 -08003506#if CONFIG_ANS && ANS_MAX_SYMBOLS
Alex Converseeb780e72016-12-13 12:46:41 -08003507 r.window_size = 1 << cm->ans_window_size_log2;
Alex Converse2cdf0d82016-12-13 13:53:09 -08003508#endif
Alex Converse346440b2017-01-03 13:47:37 -08003509 if (aom_reader_init(&r, data, partition_size, pbi->decrypt_cb,
3510 pbi->decrypt_state))
Yaowu Xuf883b422016-08-30 14:01:10 -07003511 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003512 "Failed to allocate bool decoder 0");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003513
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02003514#if CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -07003515 if (cm->tx_mode == TX_MODE_SELECT)
3516 av1_diff_update_prob(&r, &fc->quarter_tx_size_prob, ACCT_STR);
Yue Chend6bdd462017-07-19 16:05:43 -07003517#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003518
Jonathan Matthews12d127e2017-09-29 15:10:26 +01003519#if CONFIG_LV_MAP && !LV_MAP_PROB
Angie Chiang85e3b962017-10-01 16:04:43 -07003520 av1_read_txb_probs(fc, cm->tx_mode, &r, &cm->counts);
Jonathan Matthews12d127e2017-09-29 15:10:26 +01003521#endif // CONFIG_LV_MAP && !LV_MAP_PROB
Angie Chiang800df032017-03-22 11:14:12 -07003522
Thomas Davies985bfc32017-06-27 16:51:26 +01003523#if !CONFIG_NEW_MULTISYMBOL
David Barker16c64e32017-08-23 16:54:59 +01003524 if (cm->tx_mode == TX_MODE_SELECT)
Ryanf0e39192017-10-09 09:45:13 -07003525 for (int i = 0; i < TXFM_PARTITION_CONTEXTS; ++i)
David Barker16c64e32017-08-23 16:54:59 +01003526 av1_diff_update_prob(&r, &fc->txfm_partition_prob[i], ACCT_STR);
Ryanf0e39192017-10-09 09:45:13 -07003527 for (int i = 0; i < SKIP_CONTEXTS; ++i)
Thomas Davies61e3e372017-04-04 16:10:23 +01003528 av1_diff_update_prob(&r, &fc->skip_probs[i], ACCT_STR);
3529#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003530
Debargha Mukherjee801cc922017-09-22 17:22:50 -07003531 if (!frame_is_intra_only(cm)) {
Thomas Davies149eda52017-06-12 18:11:55 +01003532#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533 read_inter_mode_probs(fc, &r);
Thomas Davies149eda52017-06-12 18:11:55 +01003534#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003535
Yue Chen4d26acb2017-05-01 12:28:34 -07003536#if CONFIG_INTERINTRA
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003537 if (cm->reference_mode != COMPOUND_REFERENCE &&
3538 cm->allow_interintra_compound) {
Thomas Daviescff91712017-07-07 11:49:55 +01003539#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003540 for (int i = 0; i < BLOCK_SIZE_GROUPS; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003541 if (is_interintra_allowed_bsize_group(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003542 av1_diff_update_prob(&r, &fc->interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003543 }
3544 }
Thomas Daviescff91712017-07-07 11:49:55 +01003545#endif
Thomas Daviescff91712017-07-07 11:49:55 +01003546#if CONFIG_WEDGE && !CONFIG_NEW_MULTISYMBOL
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003547#if CONFIG_EXT_PARTITION_TYPES
3548 int block_sizes_to_update = BLOCK_SIZES_ALL;
3549#else
3550 int block_sizes_to_update = BLOCK_SIZES;
3551#endif
Ryanf0e39192017-10-09 09:45:13 -07003552 for (int i = 0; i < block_sizes_to_update; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003553 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) {
Michael Bebenita6048d052016-08-25 14:40:54 -07003554 av1_diff_update_prob(&r, &fc->wedge_interintra_prob[i], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003555 }
3556 }
Debargha Mukherjeeed057992017-05-07 05:15:06 -07003557#endif // CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003558 }
Yue Chen4d26acb2017-05-01 12:28:34 -07003559#endif // CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07003560
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003561#if !CONFIG_NEW_MULTISYMBOL
Ryanf0e39192017-10-09 09:45:13 -07003562 for (int i = 0; i < INTRA_INTER_CONTEXTS; i++)
Michael Bebenita6048d052016-08-25 14:40:54 -07003563 av1_diff_update_prob(&r, &fc->intra_inter_prob[i], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01003564#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003565
David Barker037ee412017-09-19 12:43:46 +01003566#if !CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003567 read_frame_reference_mode_probs(cm, &r);
David Barker037ee412017-09-19 12:43:46 +01003568#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003569
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003570#if CONFIG_COMPOUND_SINGLEREF
Ryanf0e39192017-10-09 09:45:13 -07003571 for (int i = 0; i < COMP_INTER_MODE_CONTEXTS; i++)
Zoe Liu85b66462017-04-20 14:28:19 -07003572 av1_diff_update_prob(&r, &fc->comp_inter_mode_prob[i], ACCT_STR);
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003573#endif // CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003574
Thomas Davies599395e2017-07-21 18:02:48 +01003575#if !CONFIG_NEW_MULTISYMBOL
RogerZhou3b635242017-09-19 10:06:46 -07003576#if CONFIG_AMVR
3577 if (cm->cur_frame_mv_precision_level == 0) {
3578#endif
Ryanf0e39192017-10-09 09:45:13 -07003579 for (int i = 0; i < NMV_CONTEXTS; ++i)
RogerZhou3b635242017-09-19 10:06:46 -07003580 read_mv_probs(&fc->nmvc[i], cm->allow_high_precision_mv, &r);
3581#if CONFIG_AMVR
3582 }
3583#endif
Thomas Davies599395e2017-07-21 18:02:48 +01003584#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003585 }
3586
Yaowu Xuf883b422016-08-30 14:01:10 -07003587 return aom_reader_has_error(&r);
Thomas Davies2e868ab2017-10-24 10:42:27 +01003588#endif // CONFIG_NEW_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003589}
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003590
Yaowu Xuc27fc142016-08-22 16:08:15 -07003591#ifdef NDEBUG
3592#define debug_check_frame_counts(cm) (void)0
3593#else // !NDEBUG
3594// Counts should only be incremented when frame_parallel_decoding_mode and
3595// error_resilient_mode are disabled.
Yaowu Xuf883b422016-08-30 14:01:10 -07003596static void debug_check_frame_counts(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003597 FRAME_COUNTS zero_counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003598 av1_zero(zero_counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003599 assert(cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD ||
3600 cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003601 assert(!memcmp(cm->counts.partition, zero_counts.partition,
3602 sizeof(cm->counts.partition)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003603 assert(!memcmp(cm->counts.switchable_interp, zero_counts.switchable_interp,
3604 sizeof(cm->counts.switchable_interp)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003605 assert(!memcmp(cm->counts.inter_compound_mode,
3606 zero_counts.inter_compound_mode,
3607 sizeof(cm->counts.inter_compound_mode)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07003608#if CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07003609 assert(!memcmp(cm->counts.interintra, zero_counts.interintra,
3610 sizeof(cm->counts.interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07003611#if CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003612 assert(!memcmp(cm->counts.wedge_interintra, zero_counts.wedge_interintra,
3613 sizeof(cm->counts.wedge_interintra)));
Debargha Mukherjeebcfb0e12017-05-11 20:09:16 -07003614#endif // CONFIG_WEDGE
3615#endif // CONFIG_INTERINTRA
Sarah Parker6fddd182016-11-10 20:57:20 -08003616 assert(!memcmp(cm->counts.compound_interinter,
3617 zero_counts.compound_interinter,
3618 sizeof(cm->counts.compound_interinter)));
Yue Chencb60b182016-10-13 15:18:22 -07003619#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
3620 assert(!memcmp(cm->counts.motion_mode, zero_counts.motion_mode,
3621 sizeof(cm->counts.motion_mode)));
3622#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07003623#if CONFIG_NCOBMC_ADAPT_WEIGHT && CONFIG_MOTION_VAR
3624 assert(!memcmp(cm->counts.ncobmc_mode, zero_counts.ncobmc_mode,
3625 sizeof(cm->counts.ncobmc_mode)));
3626#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003627 assert(!memcmp(cm->counts.intra_inter, zero_counts.intra_inter,
3628 sizeof(cm->counts.intra_inter)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003629#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu85b66462017-04-20 14:28:19 -07003630 assert(!memcmp(cm->counts.comp_inter_mode, zero_counts.comp_inter_mode,
3631 sizeof(cm->counts.comp_inter_mode)));
Sebastien Alaiwan0bdea0d2017-10-02 15:15:05 +02003632#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 assert(!memcmp(cm->counts.comp_inter, zero_counts.comp_inter,
3634 sizeof(cm->counts.comp_inter)));
Zoe Liuc082bbc2017-05-17 13:31:37 -07003635#if CONFIG_EXT_COMP_REFS
3636 assert(!memcmp(cm->counts.comp_ref_type, zero_counts.comp_ref_type,
3637 sizeof(cm->counts.comp_ref_type)));
3638 assert(!memcmp(cm->counts.uni_comp_ref, zero_counts.uni_comp_ref,
3639 sizeof(cm->counts.uni_comp_ref)));
3640#endif // CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003641 assert(!memcmp(cm->counts.single_ref, zero_counts.single_ref,
3642 sizeof(cm->counts.single_ref)));
3643 assert(!memcmp(cm->counts.comp_ref, zero_counts.comp_ref,
3644 sizeof(cm->counts.comp_ref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003645 assert(!memcmp(cm->counts.comp_bwdref, zero_counts.comp_bwdref,
3646 sizeof(cm->counts.comp_bwdref)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003647 assert(!memcmp(&cm->counts.tx_size, &zero_counts.tx_size,
3648 sizeof(cm->counts.tx_size)));
3649 assert(!memcmp(cm->counts.skip, zero_counts.skip, sizeof(cm->counts.skip)));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003650 assert(
3651 !memcmp(&cm->counts.mv[0], &zero_counts.mv[0], sizeof(cm->counts.mv[0])));
3652 assert(
3653 !memcmp(&cm->counts.mv[1], &zero_counts.mv[1], sizeof(cm->counts.mv[0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003654}
3655#endif // NDEBUG
3656
Yaowu Xuf883b422016-08-30 14:01:10 -07003657static struct aom_read_bit_buffer *init_read_bit_buffer(
3658 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, const uint8_t *data,
3659 const uint8_t *data_end, uint8_t clear_data[MAX_AV1_HEADER_SIZE]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003660 rb->bit_offset = 0;
3661 rb->error_handler = error_handler;
3662 rb->error_handler_data = &pbi->common;
3663 if (pbi->decrypt_cb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003664 const int n = (int)AOMMIN(MAX_AV1_HEADER_SIZE, data_end - data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003665 pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n);
3666 rb->bit_buffer = clear_data;
3667 rb->bit_buffer_end = clear_data + n;
3668 } else {
3669 rb->bit_buffer = data;
3670 rb->bit_buffer_end = data_end;
3671 }
3672 return rb;
3673}
3674
3675//------------------------------------------------------------------------------
3676
Yaowu Xuf883b422016-08-30 14:01:10 -07003677void av1_read_frame_size(struct aom_read_bit_buffer *rb, int *width,
3678 int *height) {
3679 *width = aom_rb_read_literal(rb, 16) + 1;
3680 *height = aom_rb_read_literal(rb, 16) + 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003681}
3682
Yaowu Xuf883b422016-08-30 14:01:10 -07003683BITSTREAM_PROFILE av1_read_profile(struct aom_read_bit_buffer *rb) {
3684 int profile = aom_rb_read_bit(rb);
3685 profile |= aom_rb_read_bit(rb) << 1;
3686 if (profile > 2) profile += aom_rb_read_bit(rb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003687 return (BITSTREAM_PROFILE)profile;
3688}
3689
Thomas Davies4822e142017-10-10 11:30:36 +01003690static void make_update_tile_list_dec(AV1Decoder *pbi, int start_tile,
3691 int num_tile, FRAME_CONTEXT *ec_ctxs[]) {
Thomas Davies028b57f2017-02-22 16:42:11 +00003692 int i;
Thomas Davies4822e142017-10-10 11:30:36 +01003693 for (i = start_tile; i < start_tile + num_tile; ++i)
3694 ec_ctxs[i - start_tile] = &pbi->tile_data[i].tctx;
Thomas Davies028b57f2017-02-22 16:42:11 +00003695}
Thomas Davies028b57f2017-02-22 16:42:11 +00003696
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003697#if CONFIG_FRAME_SUPERRES
3698void superres_post_decode(AV1Decoder *pbi) {
3699 AV1_COMMON *const cm = &pbi->common;
3700 BufferPool *const pool = cm->buffer_pool;
3701
3702 if (av1_superres_unscaled(cm)) return;
3703
3704 lock_buffer_pool(pool);
3705 av1_superres_upscale(cm, pool);
3706 unlock_buffer_pool(pool);
3707}
3708#endif // CONFIG_FRAME_SUPERRES
3709
Yi Luo10e23002017-07-31 11:54:43 -07003710static void dec_setup_frame_boundary_info(AV1_COMMON *const cm) {
David Barker5c06a642017-08-18 13:18:16 +01003711// Note: When LOOPFILTERING_ACROSS_TILES is enabled, we need to clear the
3712// boundary information every frame, since the tile boundaries may
3713// change every frame (particularly when dependent-horztiles is also
3714// enabled); when it is disabled, the only information stored is the frame
3715// boundaries, which only depend on the frame size.
3716#if !CONFIG_LOOPFILTERING_ACROSS_TILES
3717 if (cm->width != cm->last_width || cm->height != cm->last_height)
3718#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
3719 {
Yi Luo10e23002017-07-31 11:54:43 -07003720 int row, col;
3721 for (row = 0; row < cm->mi_rows; ++row) {
3722 MODE_INFO *mi = cm->mi + row * cm->mi_stride;
3723 for (col = 0; col < cm->mi_cols; ++col) {
3724 mi->mbmi.boundary_info = 0;
3725 mi++;
3726 }
3727 }
3728 av1_setup_frame_boundary_info(cm);
3729 }
3730}
3731
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003732size_t av1_decode_frame_headers_and_setup(AV1Decoder *pbi, const uint8_t *data,
3733 const uint8_t *data_end,
3734 const uint8_t **p_data_end) {
3735 AV1_COMMON *const cm = &pbi->common;
3736 MACROBLOCKD *const xd = &pbi->mb;
3737 struct aom_read_bit_buffer rb;
3738 uint8_t clear_data[MAX_AV1_HEADER_SIZE];
3739 size_t first_partition_size;
3740 YV12_BUFFER_CONFIG *new_fb;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003741 RefBuffer *last_fb_ref_buf = &cm->frame_refs[LAST_FRAME - LAST_FRAME];
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003742
3743#if CONFIG_ADAPT_SCAN
3744 av1_deliver_eob_threshold(cm, xd);
3745#endif
3746#if CONFIG_BITSTREAM_DEBUG
3747 bitstream_queue_set_frame_read(cm->current_video_frame * 2 + cm->show_frame);
3748#endif
3749
3750#if CONFIG_GLOBAL_MOTION
3751 int i;
3752 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003753 cm->global_motion[i] = default_warp_params;
3754 cm->cur_frame->global_motion[i] = default_warp_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003755 }
3756 xd->global_motion = cm->global_motion;
3757#endif // CONFIG_GLOBAL_MOTION
3758
3759 first_partition_size = read_uncompressed_header(
3760 pbi, init_read_bit_buffer(pbi, &rb, data, data_end, clear_data));
3761
3762#if CONFIG_EXT_TILE
3763 // If cm->single_tile_decoding = 0, the independent decoding of a single tile
3764 // or a section of a frame is not allowed.
3765 if (!cm->single_tile_decoding &&
3766 (pbi->dec_tile_row >= 0 || pbi->dec_tile_col >= 0)) {
3767 pbi->dec_tile_row = -1;
3768 pbi->dec_tile_col = -1;
3769 }
3770#endif // CONFIG_EXT_TILE
3771
3772 pbi->first_partition_size = first_partition_size;
3773 pbi->uncomp_hdr_size = aom_rb_bytes_read(&rb);
3774 new_fb = get_frame_new_buffer(cm);
3775 xd->cur_buf = new_fb;
3776#if CONFIG_INTRABC
3777#if CONFIG_HIGHBITDEPTH
3778 av1_setup_scale_factors_for_frame(
3779 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3780 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3781 cm->use_highbitdepth);
3782#else
3783 av1_setup_scale_factors_for_frame(
3784 &xd->sf_identity, xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height,
3785 xd->cur_buf->y_crop_width, xd->cur_buf->y_crop_height);
3786#endif // CONFIG_HIGHBITDEPTH
3787#endif // CONFIG_INTRABC
3788
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003789 if (cm->show_existing_frame) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003790 // showing a frame directly
3791 *p_data_end = data + aom_rb_bytes_read(&rb);
3792 return 0;
3793 }
3794
3795 data += aom_rb_bytes_read(&rb);
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003796 if (first_partition_size)
3797 if (!read_is_valid(data, first_partition_size, data_end))
3798 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3799 "Truncated packet or corrupt header length");
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003800
3801 cm->setup_mi(cm);
3802
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003803 // NOTE(zoeliu): As cm->prev_frame can take neither a frame of
3804 // show_exisiting_frame=1, nor can it take a frame not used as
3805 // a reference, it is probable that by the time it is being
3806 // referred to, the frame buffer it originally points to may
3807 // already get expired and have been reassigned to the current
3808 // newly coded frame. Hence, we need to check whether this is
3809 // the case, and if yes, we have 2 choices:
3810 // (1) Simply disable the use of previous frame mvs; or
3811 // (2) Have cm->prev_frame point to one reference frame buffer,
3812 // e.g. LAST_FRAME.
3813 if (!dec_is_ref_frame_buf(pbi, cm->prev_frame)) {
3814 // Reassign the LAST_FRAME buffer to cm->prev_frame.
3815 cm->prev_frame = last_fb_ref_buf->idx != INVALID_IDX
3816 ? &cm->buffer_pool->frame_bufs[last_fb_ref_buf->idx]
3817 : NULL;
3818 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003819
3820#if CONFIG_TEMPMV_SIGNALING
3821 if (cm->use_prev_frame_mvs && !frame_can_use_prev_frame_mvs(cm)) {
3822 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3823 "Frame wrongly requests previous frame MVs");
3824 }
3825#else
3826 cm->use_prev_frame_mvs = !cm->error_resilient_mode && cm->prev_frame &&
3827#if CONFIG_FRAME_SUPERRES
3828 cm->width == cm->last_width &&
3829 cm->height == cm->last_height &&
3830#else
3831 cm->width == cm->prev_frame->buf.y_crop_width &&
3832 cm->height == cm->prev_frame->buf.y_crop_height &&
3833#endif // CONFIG_FRAME_SUPERRES
3834 !cm->last_intra_only && cm->last_show_frame &&
3835 (cm->last_frame_type != KEY_FRAME);
3836#endif // CONFIG_TEMPMV_SIGNALING
3837
Jingning Hanea255c92017-09-29 08:12:09 -07003838#if CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003839 av1_setup_motion_field(cm);
Jingning Hanea255c92017-09-29 08:12:09 -07003840#endif // CONFIG_MFMV
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003841
3842 av1_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y);
3843#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3844 if (cm->error_resilient_mode || frame_is_intra_only(cm)) {
3845 // use the default frame context values
3846 *cm->fc = cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3847 cm->pre_fc = &cm->frame_contexts[FRAME_CONTEXT_DEFAULTS];
3848 } else {
3849 *cm->fc = cm->frame_contexts[cm->frame_refs[0].idx];
3850 cm->pre_fc = &cm->frame_contexts[cm->frame_refs[0].idx];
3851 }
3852#else
3853 *cm->fc = cm->frame_contexts[cm->frame_context_idx];
3854 cm->pre_fc = &cm->frame_contexts[cm->frame_context_idx];
3855#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3856 if (!cm->fc->initialized)
3857 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3858 "Uninitialized entropy context.");
3859
3860 av1_zero(cm->counts);
3861
3862 xd->corrupted = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003863 if (first_partition_size) {
3864 new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size);
3865 if (new_fb->corrupted)
3866 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
3867 "Decode failed. Frame data header is corrupted.");
3868 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003869 return first_partition_size;
3870}
3871
3872void av1_decode_tg_tiles_and_wrapup(AV1Decoder *pbi, const uint8_t *data,
3873 const uint8_t *data_end,
3874 const uint8_t **p_data_end, int startTile,
3875 int endTile, int initialize_flag) {
3876 AV1_COMMON *const cm = &pbi->common;
3877 MACROBLOCKD *const xd = &pbi->mb;
3878 int context_updated = 0;
3879
3880#if CONFIG_LOOP_RESTORATION
3881 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3882 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3883 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3884 av1_alloc_restoration_buffers(cm);
3885 }
3886#endif
3887
Cheng Chend8184da2017-09-26 18:15:22 -07003888#if !CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003889 if (cm->lf.filter_level && !cm->skip_loop_filter) {
3890 av1_loop_filter_frame_init(cm, cm->lf.filter_level, cm->lf.filter_level);
3891 }
3892#endif
3893
3894 // If encoded in frame parallel mode, frame context is ready after decoding
3895 // the frame header.
3896 if (cm->frame_parallel_decode && initialize_flag &&
3897 cm->refresh_frame_context != REFRESH_FRAME_CONTEXT_BACKWARD) {
3898 AVxWorker *const worker = pbi->frame_worker_owner;
3899 FrameWorkerData *const frame_worker_data = worker->data1;
3900 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD) {
3901 context_updated = 1;
3902#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
3903 cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
3904#else
3905 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
3906#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
3907 }
3908 av1_frameworker_lock_stats(worker);
3909 pbi->cur_buf->row = -1;
3910 pbi->cur_buf->col = -1;
3911 frame_worker_data->frame_context_ready = 1;
3912 // Signal the main thread that context is ready.
3913 av1_frameworker_signal_stats(worker);
3914 av1_frameworker_unlock_stats(worker);
3915 }
3916
3917 dec_setup_frame_boundary_info(cm);
3918
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003919#if CONFIG_OBU
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003920 *p_data_end = decode_tiles(pbi, data, data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003921#else
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07003922 *p_data_end =
3923 decode_tiles(pbi, data + pbi->uncomp_hdr_size + pbi->first_partition_size,
3924 data_end, startTile, endTile);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003925#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003926
3927 if (endTile != cm->tile_rows * cm->tile_cols - 1) {
3928 return;
3929 }
3930
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02003931#if CONFIG_STRIPED_LOOP_RESTORATION
3932 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3933 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3934 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3935 av1_loop_restoration_save_boundary_lines(&pbi->cur_buf->buf, cm);
3936 }
3937#endif
3938
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003939#if CONFIG_CDEF
3940 if (!cm->skip_loop_filter && !cm->all_lossless) {
3941 av1_cdef_frame(&pbi->cur_buf->buf, cm, &pbi->mb);
3942 }
3943#endif // CONFIG_CDEF
3944
3945#if CONFIG_FRAME_SUPERRES
3946 superres_post_decode(pbi);
3947#endif // CONFIG_FRAME_SUPERRES
3948
3949#if CONFIG_LOOP_RESTORATION
3950 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3951 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3952 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3953 aom_extend_frame_borders((YV12_BUFFER_CONFIG *)xd->cur_buf);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01003954 av1_loop_restoration_filter_frame((YV12_BUFFER_CONFIG *)xd->cur_buf, cm,
3955 cm->rst_info, 7, NULL);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003956 }
3957#endif // CONFIG_LOOP_RESTORATION
3958
3959 if (!xd->corrupted) {
3960 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Thomas Davies4822e142017-10-10 11:30:36 +01003961#if CONFIG_SIMPLE_BWD_ADAPT
3962 const int num_bwd_ctxs = 1;
3963#else
3964 const int num_bwd_ctxs = cm->tile_rows * cm->tile_cols;
3965#endif
3966 FRAME_CONTEXT **tile_ctxs =
3967 aom_malloc(num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx));
3968 aom_cdf_prob **cdf_ptrs = aom_malloc(
3969 num_bwd_ctxs * sizeof(&pbi->tile_data[0].tctx.partition_cdf[0][0]));
3970#if CONFIG_SIMPLE_BWD_ADAPT
3971 make_update_tile_list_dec(pbi, cm->largest_tile_id, num_bwd_ctxs,
3972 tile_ctxs);
3973#else
3974 make_update_tile_list_dec(pbi, 0, num_bwd_ctxs, tile_ctxs);
3975#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003976#if CONFIG_LV_MAP
3977 av1_adapt_coef_probs(cm);
3978#endif // CONFIG_LV_MAP
Angie Chiang85e3b962017-10-01 16:04:43 -07003979#if CONFIG_SYMBOLRATE
3980 av1_dump_symbol_rate(cm);
3981#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003982 av1_adapt_intra_frame_probs(cm);
3983 av1_average_tile_coef_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003984 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003985 av1_average_tile_intra_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01003986 num_bwd_ctxs);
Debargha Mukherjee43061b32017-10-13 16:50:17 -07003987 av1_average_tile_loopfilter_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
3988 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003989#if CONFIG_ADAPT_SCAN
3990 av1_adapt_scan_order(cm);
3991#endif // CONFIG_ADAPT_SCAN
3992
3993 if (!frame_is_intra_only(cm)) {
3994 av1_adapt_inter_frame_probs(cm);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003995#if !CONFIG_NEW_MULTISYMBOL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003996 av1_adapt_mv_probs(cm, cm->allow_high_precision_mv);
Thomas Davies0e7b1d72017-10-02 10:54:24 +01003997#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003998 av1_average_tile_inter_cdfs(&pbi->common, pbi->common.fc, tile_ctxs,
Thomas Davies4822e142017-10-10 11:30:36 +01003999 cdf_ptrs, num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004000 av1_average_tile_mv_cdfs(pbi->common.fc, tile_ctxs, cdf_ptrs,
Thomas Davies4822e142017-10-10 11:30:36 +01004001 num_bwd_ctxs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004002 }
4003 aom_free(tile_ctxs);
4004 aom_free(cdf_ptrs);
4005 } else {
4006 debug_check_frame_counts(cm);
4007 }
4008 } else {
4009 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
4010 "Decode failed. Frame data is corrupted.");
4011 }
4012
4013#if CONFIG_INSPECTION
4014 if (pbi->inspect_cb != NULL) {
4015 (*pbi->inspect_cb)(pbi, pbi->inspect_ctx);
4016 }
4017#endif
4018
4019// Non frame parallel update frame context here.
4020#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
4021 if (!context_updated) cm->frame_contexts[cm->new_fb_idx] = *cm->fc;
4022#else
4023 if (!cm->error_resilient_mode && !context_updated)
4024 cm->frame_contexts[cm->frame_context_idx] = *cm->fc;
4025#endif
4026}
4027
4028#if CONFIG_OBU
4029
4030static OBU_TYPE read_obu_header(struct aom_read_bit_buffer *rb,
4031 uint32_t *header_size) {
4032 OBU_TYPE obu_type;
4033 int obu_extension_flag;
4034
4035 *header_size = 1;
4036
4037 obu_type = (OBU_TYPE)aom_rb_read_literal(rb, 5);
4038 aom_rb_read_literal(rb, 2); // reserved
4039 obu_extension_flag = aom_rb_read_bit(rb);
4040 if (obu_extension_flag) {
4041 *header_size += 1;
4042 aom_rb_read_literal(rb, 3); // temporal_id
4043 aom_rb_read_literal(rb, 2);
4044 aom_rb_read_literal(rb, 2);
4045 aom_rb_read_literal(rb, 1); // reserved
4046 }
4047
4048 return obu_type;
4049}
4050
4051static uint32_t read_temporal_delimiter_obu() { return 0; }
4052
4053static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
4054 struct aom_read_bit_buffer *rb) {
4055 AV1_COMMON *const cm = &pbi->common;
David Barker5e70a112017-10-03 14:28:17 +01004056 SequenceHeader *const seq_params = &cm->seq_params;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004057 uint32_t saved_bit_offset = rb->bit_offset;
4058
4059 cm->profile = av1_read_profile(rb);
4060 aom_rb_read_literal(rb, 4); // level
4061
4062 seq_params->frame_id_numbers_present_flag = aom_rb_read_bit(rb);
4063 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02004064 // We must always have delta_frame_id_length < frame_id_length,
4065 // in order for a frame to be referenced with a unique delta.
4066 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004067 seq_params->delta_frame_id_length = aom_rb_read_literal(rb, 4) + 2;
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02004068 seq_params->frame_id_length =
4069 aom_rb_read_literal(rb, 3) + seq_params->delta_frame_id_length + 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004070 }
4071
4072 read_bitdepth_colorspace_sampling(cm, rb, pbi->allow_lowbitdepth);
4073
4074 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
4075}
4076
4077static uint32_t read_frame_header_obu(AV1Decoder *pbi, const uint8_t *data,
4078 const uint8_t *data_end,
4079 const uint8_t **p_data_end) {
4080 size_t header_size;
4081
4082 header_size =
4083 av1_decode_frame_headers_and_setup(pbi, data, data_end, p_data_end);
4084 return (uint32_t)(pbi->uncomp_hdr_size + header_size);
4085}
4086
4087static uint32_t read_tile_group_header(AV1Decoder *pbi,
4088 struct aom_read_bit_buffer *rb,
4089 int *startTile, int *endTile) {
4090 AV1_COMMON *const cm = &pbi->common;
4091 uint32_t saved_bit_offset = rb->bit_offset;
4092
4093 *startTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4094 *endTile = aom_rb_read_literal(rb, cm->log2_tile_rows + cm->log2_tile_cols);
4095
4096 return ((rb->bit_offset - saved_bit_offset + 7) >> 3);
4097}
4098
4099static uint32_t read_one_tile_group_obu(AV1Decoder *pbi,
4100 struct aom_read_bit_buffer *rb,
4101 int is_first_tg, const uint8_t *data,
4102 const uint8_t *data_end,
4103 const uint8_t **p_data_end,
4104 int *is_last_tg) {
4105 AV1_COMMON *const cm = &pbi->common;
4106 int startTile, endTile;
4107 uint32_t header_size, tg_payload_size;
4108
4109 header_size = read_tile_group_header(pbi, rb, &startTile, &endTile);
4110 data += header_size;
4111 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, startTile,
4112 endTile, is_first_tg);
4113 tg_payload_size = (uint32_t)(*p_data_end - data);
4114
4115 // TODO(shan): For now, assume all tile groups received in order
4116 *is_last_tg = endTile == cm->tile_rows * cm->tile_cols - 1;
4117
4118 return header_size + tg_payload_size;
4119}
4120
4121void av1_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data,
4122 const uint8_t *data_end,
4123 const uint8_t **p_data_end) {
4124 AV1_COMMON *const cm = &pbi->common;
4125 int frame_decoding_finished = 0;
4126 int is_first_tg_obu_received = 1;
4127 int frame_header_received = 0;
4128 int frame_header_size = 0;
4129
4130 // decode frame as a series of OBUs
4131 while (!frame_decoding_finished && !cm->error.error_code) {
4132 struct aom_read_bit_buffer rb;
4133 uint8_t clear_data[80];
4134 uint32_t obu_size, obu_header_size, obu_payload_size = 0;
4135 OBU_TYPE obu_type;
4136
4137 init_read_bit_buffer(pbi, &rb, data + 4, data_end, clear_data);
4138
4139 // every obu is preceded by 4-byte size of obu (obu header + payload size)
4140 // The obu size is only needed for tile group OBUs
4141 obu_size = mem_get_le32(data);
4142 obu_type = read_obu_header(&rb, &obu_header_size);
4143 data += (4 + obu_header_size);
4144
4145 switch (obu_type) {
4146 case OBU_TD: obu_payload_size = read_temporal_delimiter_obu(); break;
4147 case OBU_SEQUENCE_HEADER:
4148 obu_payload_size = read_sequence_header_obu(pbi, &rb);
4149 break;
4150 case OBU_FRAME_HEADER:
4151 // Only decode first frame header received
4152 if (!frame_header_received) {
4153 frame_header_size = obu_payload_size =
4154 read_frame_header_obu(pbi, data, data_end, p_data_end);
4155 frame_header_received = 1;
4156 } else {
4157 obu_payload_size = frame_header_size;
4158 }
4159 if (cm->show_existing_frame) frame_decoding_finished = 1;
4160 break;
4161 case OBU_TILE_GROUP:
4162 obu_payload_size = read_one_tile_group_obu(
4163 pbi, &rb, is_first_tg_obu_received, data, data + obu_size - 1,
4164 p_data_end, &frame_decoding_finished);
4165 is_first_tg_obu_received = 0;
4166 break;
4167 default: break;
4168 }
4169 data += obu_payload_size;
4170 }
4171}
4172#endif