blob: 0de16c75bcb062f4cdfc1f3c856373d9e43b8668 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13
14#include "av1/common/common.h"
15#include "av1/common/entropy.h"
16#include "av1/common/entropymode.h"
17#include "av1/common/entropymv.h"
18#include "av1/common/mvref_common.h"
19#include "av1/common/pred_common.h"
20#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080021#if CONFIG_EXT_INTRA
22#include "av1/common/reconintra.h"
23#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "av1/common/seg_common.h"
Yue Chen69f18e12016-09-08 14:48:15 -070025#if CONFIG_WARPED_MOTION
26#include "av1/common/warped_motion.h"
27#endif // CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -070028
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/decoder/decodeframe.h"
Jingning Han1aab8182016-06-03 11:09:06 -070030#include "av1/decoder/decodemv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070031
Yaowu Xuf883b422016-08-30 14:01:10 -070032#include "aom_dsp/aom_dsp_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033
Michael Bebenita6048d052016-08-25 14:40:54 -070034#define ACCT_STR __func__
Zoe Liu85b66462017-04-20 14:28:19 -070035
Di Chen56586622017-06-09 13:49:44 -070036#define DEC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070037
hui su5db97432016-10-14 16:10:14 -070038#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070039static INLINE int read_uniform(aom_reader *r, int n) {
hui su37499292017-04-26 09:49:53 -070040 const int l = get_unsigned_bits(n);
41 const int m = (1 << l) - n;
42 const int v = aom_read_literal(r, l - 1, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -070043 assert(l != 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -070044 if (v < m)
45 return v;
46 else
Michael Bebenita6048d052016-08-25 14:40:54 -070047 return (v << 1) - m + aom_read_literal(r, 1, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -070048}
hui su5db97432016-10-14 16:10:14 -070049#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -070050
Thomas9ac55082016-09-23 18:04:17 +010051static PREDICTION_MODE read_intra_mode(aom_reader *r, aom_cdf_prob *cdf) {
Nathan E. Egge3ef926e2016-09-07 18:20:41 -040052 return (PREDICTION_MODE)
53 av1_intra_mode_inv[aom_read_symbol(r, cdf, INTRA_MODES, ACCT_STR)];
54}
Yaowu Xuc27fc142016-08-22 16:08:15 -070055
Thomas Daviesf6936102016-09-05 16:51:31 +010056#if CONFIG_DELTA_Q
57static int read_delta_qindex(AV1_COMMON *cm, MACROBLOCKD *xd, aom_reader *r,
58 MB_MODE_INFO *const mbmi, int mi_col, int mi_row) {
59 FRAME_COUNTS *counts = xd->counts;
60 int sign, abs, reduced_delta_qindex = 0;
61 BLOCK_SIZE bsize = mbmi->sb_type;
62 const int b_col = mi_col & MAX_MIB_MASK;
63 const int b_row = mi_row & MAX_MIB_MASK;
64 const int read_delta_q_flag = (b_col == 0 && b_row == 0);
Thomas Daviesd6ee8a82017-03-02 14:42:50 +000065 int rem_bits, thr;
66 int i, smallval;
67#if CONFIG_EC_ADAPT
68 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
69 (void)cm;
70#else
71 FRAME_CONTEXT *ec_ctx = cm->fc;
72#endif
Thomas Daviesf6936102016-09-05 16:51:31 +010073
Alex Converse68abef82017-03-23 14:50:33 -070074 if ((bsize != BLOCK_LARGEST || mbmi->skip == 0) && read_delta_q_flag) {
Thomas Daviesd6ee8a82017-03-02 14:42:50 +000075 abs = aom_read_symbol(r, ec_ctx->delta_q_cdf, DELTA_Q_PROBS + 1, ACCT_STR);
Thomas Daviesd6ee8a82017-03-02 14:42:50 +000076 smallval = (abs < DELTA_Q_SMALL);
77 if (counts) {
78 for (i = 0; i < abs; ++i) counts->delta_q[i][1]++;
79 if (smallval) counts->delta_q[abs][0]++;
80 }
81
82 if (!smallval) {
Thomas Daviesf6936102016-09-05 16:51:31 +010083 rem_bits = aom_read_literal(r, 3, ACCT_STR);
84 thr = (1 << rem_bits) + 1;
85 abs = aom_read_literal(r, rem_bits, ACCT_STR) + thr;
86 }
87
88 if (abs) {
89 sign = aom_read_bit(r, ACCT_STR);
90 } else {
91 sign = 1;
92 }
93
94 reduced_delta_qindex = sign ? -abs : abs;
95 }
96 return reduced_delta_qindex;
97}
Fangwen Fu231fe422017-04-24 17:52:29 -070098#if CONFIG_EXT_DELTA_Q
99static int read_delta_lflevel(AV1_COMMON *cm, MACROBLOCKD *xd, aom_reader *r,
100 MB_MODE_INFO *const mbmi, int mi_col,
101 int mi_row) {
102 FRAME_COUNTS *counts = xd->counts;
103 int sign, abs, reduced_delta_lflevel = 0;
104 BLOCK_SIZE bsize = mbmi->sb_type;
105 const int b_col = mi_col & MAX_MIB_MASK;
106 const int b_row = mi_row & MAX_MIB_MASK;
107 const int read_delta_lf_flag = (b_col == 0 && b_row == 0);
108 int rem_bits, thr;
109 int i, smallval;
110#if CONFIG_EC_ADAPT
111 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
112 (void)cm;
113#else
114 FRAME_CONTEXT *ec_ctx = cm->fc;
115#endif
116
117 if ((bsize != BLOCK_64X64 || mbmi->skip == 0) && read_delta_lf_flag) {
Fangwen Fu231fe422017-04-24 17:52:29 -0700118 abs =
119 aom_read_symbol(r, ec_ctx->delta_lf_cdf, DELTA_LF_PROBS + 1, ACCT_STR);
Fangwen Fu231fe422017-04-24 17:52:29 -0700120 smallval = (abs < DELTA_LF_SMALL);
121 if (counts) {
122 for (i = 0; i < abs; ++i) counts->delta_lf[i][1]++;
123 if (smallval) counts->delta_lf[abs][0]++;
124 }
125 if (!smallval) {
126 rem_bits = aom_read_literal(r, 3, ACCT_STR);
127 thr = (1 << rem_bits) + 1;
128 abs = aom_read_literal(r, rem_bits, ACCT_STR) + thr;
129 }
130
131 if (abs) {
132 sign = aom_read_bit(r, ACCT_STR);
133 } else {
134 sign = 1;
135 }
136
137 reduced_delta_lflevel = sign ? -abs : abs;
138 }
139 return reduced_delta_lflevel;
140}
141#endif
Thomas Daviesf6936102016-09-05 16:51:31 +0100142#endif
143
Nathan E. Eggea1f80e32017-05-23 11:52:32 -0400144static PREDICTION_MODE read_intra_mode_y(FRAME_CONTEXT *ec_ctx, MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700145 aom_reader *r, int size_group) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146 const PREDICTION_MODE y_mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000147 read_intra_mode(r, ec_ctx->y_mode_cdf[size_group]);
Nathan E. Egge5bb3a742017-06-30 12:47:43 -0400148#if !CONFIG_EC_ADAPT || CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149 FRAME_COUNTS *counts = xd->counts;
150 if (counts) ++counts->y_mode[size_group][y_mode];
Nathan E. Egge5bb3a742017-06-30 12:47:43 -0400151#else
152 /* TODO(negge): Can we remove this parameter? */
153 (void)xd;
154#endif // !CONFIG_EC_ADAPT || CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 return y_mode;
156}
157
Nathan E. Eggea1f80e32017-05-23 11:52:32 -0400158static PREDICTION_MODE read_intra_mode_uv(FRAME_CONTEXT *ec_ctx,
159 MACROBLOCKD *xd, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 PREDICTION_MODE y_mode) {
161 const PREDICTION_MODE uv_mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000162 read_intra_mode(r, ec_ctx->uv_mode_cdf[y_mode]);
Nathan E. Egge5bb3a742017-06-30 12:47:43 -0400163#if !CONFIG_EC_ADAPT || CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700164 FRAME_COUNTS *counts = xd->counts;
165 if (counts) ++counts->uv_mode[y_mode][uv_mode];
Nathan E. Egge5bb3a742017-06-30 12:47:43 -0400166#else
167 /* TODO(negge): Can we remove this parameter? */
168 (void)xd;
169#endif // !CONFIG_EC_ADAPT || CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 return uv_mode;
171}
172
Luc Trudeauf5334002017-04-25 12:21:26 -0400173#if CONFIG_CFL
David Michael Barr23198662017-06-19 23:19:48 +0900174static int read_cfl_alphas(FRAME_CONTEXT *const ec_ctx, aom_reader *r,
Luc Trudeau2c317902017-04-28 11:06:50 -0400175 CFL_SIGN_TYPE signs_out[CFL_PRED_PLANES]) {
David Michael Barr23198662017-06-19 23:19:48 +0900176 const int ind =
177 aom_read_symbol(r, ec_ctx->cfl_alpha_cdf, CFL_ALPHABET_SIZE, "cfl:alpha");
178 // Signs are only coded for nonzero values
179 // sign == 0 implies negative alpha
180 // sign == 1 implies positive alpha
181 signs_out[CFL_PRED_U] = cfl_alpha_codes[ind][CFL_PRED_U]
182 ? aom_read_bit(r, "cfl:sign")
183 : CFL_SIGN_POS;
184 signs_out[CFL_PRED_V] = cfl_alpha_codes[ind][CFL_PRED_V]
185 ? aom_read_bit(r, "cfl:sign")
186 : CFL_SIGN_POS;
Luc Trudeauf5334002017-04-25 12:21:26 -0400187
David Michael Barr23198662017-06-19 23:19:48 +0900188 return ind;
Luc Trudeauf5334002017-04-25 12:21:26 -0400189}
190#endif
191
Yue Chen4d26acb2017-05-01 12:28:34 -0700192#if CONFIG_EXT_INTER && CONFIG_INTERINTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700193static INTERINTRA_MODE read_interintra_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
194 aom_reader *r, int size_group) {
195 const INTERINTRA_MODE ii_mode = (INTERINTRA_MODE)aom_read_tree(
Michael Bebenita6048d052016-08-25 14:40:54 -0700196 r, av1_interintra_mode_tree, cm->fc->interintra_mode_prob[size_group],
197 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 FRAME_COUNTS *counts = xd->counts;
199 if (counts) ++counts->interintra_mode[size_group][ii_mode];
200 return ii_mode;
201}
Yue Chen4d26acb2017-05-01 12:28:34 -0700202#endif // CONFIG_EXT_INTER && CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700203
Thomas Davies1de6c882017-01-11 17:47:49 +0000204static PREDICTION_MODE read_inter_mode(FRAME_CONTEXT *ec_ctx, MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700205 aom_reader *r, int16_t ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206 FRAME_COUNTS *counts = xd->counts;
207 int16_t mode_ctx = ctx & NEWMV_CTX_MASK;
Thomas Davies149eda52017-06-12 18:11:55 +0100208 int is_newmv, is_zeromv, is_refmv;
209#if CONFIG_NEW_MULTISYMBOL
210 is_newmv = aom_read_symbol(r, ec_ctx->newmv_cdf[mode_ctx], 2, ACCT_STR) == 0;
211#else
212 is_newmv = aom_read(r, ec_ctx->newmv_prob[mode_ctx], ACCT_STR) == 0;
213#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214
Thomas Davies149eda52017-06-12 18:11:55 +0100215 if (is_newmv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216 if (counts) ++counts->newmv_mode[mode_ctx][0];
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700217 return NEWMV;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700218 }
219 if (counts) ++counts->newmv_mode[mode_ctx][1];
220
221 if (ctx & (1 << ALL_ZERO_FLAG_OFFSET)) return ZEROMV;
222
223 mode_ctx = (ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
224
Thomas Davies149eda52017-06-12 18:11:55 +0100225#if CONFIG_NEW_MULTISYMBOL
226 is_zeromv =
227 aom_read_symbol(r, ec_ctx->zeromv_cdf[mode_ctx], 2, ACCT_STR) == 0;
228#else
229 is_zeromv = aom_read(r, ec_ctx->zeromv_prob[mode_ctx], ACCT_STR) == 0;
230#endif
231 if (is_zeromv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232 if (counts) ++counts->zeromv_mode[mode_ctx][0];
233 return ZEROMV;
234 }
235 if (counts) ++counts->zeromv_mode[mode_ctx][1];
236
237 mode_ctx = (ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
238
239 if (ctx & (1 << SKIP_NEARESTMV_OFFSET)) mode_ctx = 6;
240 if (ctx & (1 << SKIP_NEARMV_OFFSET)) mode_ctx = 7;
241 if (ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) mode_ctx = 8;
242
Thomas Davies149eda52017-06-12 18:11:55 +0100243#if CONFIG_NEW_MULTISYMBOL
244 is_refmv = aom_read_symbol(r, ec_ctx->refmv_cdf[mode_ctx], 2, ACCT_STR) == 0;
245#else
246 is_refmv = aom_read(r, ec_ctx->refmv_prob[mode_ctx], ACCT_STR) == 0;
247#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700248
Thomas Davies149eda52017-06-12 18:11:55 +0100249 if (is_refmv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700250 if (counts) ++counts->refmv_mode[mode_ctx][0];
251
252 return NEARESTMV;
253 } else {
254 if (counts) ++counts->refmv_mode[mode_ctx][1];
255 return NEARMV;
256 }
257
258 // Invalid prediction mode.
259 assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260}
261
Thomas Davies149eda52017-06-12 18:11:55 +0100262static void read_drl_idx(FRAME_CONTEXT *ec_ctx, MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700263 MB_MODE_INFO *mbmi, aom_reader *r) {
264 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700265 mbmi->ref_mv_idx = 0;
266
David Barker404b2e82017-03-27 13:07:47 +0100267#if CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -0700268#if CONFIG_COMPOUND_SINGLEREF
269 if (mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV ||
270 mbmi->mode == SR_NEW_NEWMV) {
271#else // !CONFIG_COMPOUND_SINGLEREF
David Barker404b2e82017-03-27 13:07:47 +0100272 if (mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV) {
Zoe Liu85b66462017-04-20 14:28:19 -0700273#endif // CONFIG_COMPOUND_SINGLEREF
274#else // !CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275 if (mbmi->mode == NEWMV) {
Zoe Liu85b66462017-04-20 14:28:19 -0700276#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 int idx;
278 for (idx = 0; idx < 2; ++idx) {
279 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700280 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100281#if CONFIG_NEW_MULTISYMBOL
282 int drl_idx = aom_read_symbol(r, ec_ctx->drl_cdf[drl_ctx], 2, ACCT_STR);
283#else
284 int drl_idx = aom_read(r, ec_ctx->drl_prob[drl_ctx], ACCT_STR);
285#endif
286 mbmi->ref_mv_idx = idx + drl_idx;
287 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][drl_idx];
288 if (!drl_idx) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700289 }
290 }
291 }
292
David Barker3dfba992017-04-03 16:10:09 +0100293 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700294 int idx;
295 // Offset the NEARESTMV mode.
296 // TODO(jingning): Unify the two syntax decoding loops after the NEARESTMV
297 // mode is factored in.
298 for (idx = 1; idx < 3; ++idx) {
299 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700300 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100301#if CONFIG_NEW_MULTISYMBOL
302 int drl_idx = aom_read_symbol(r, ec_ctx->drl_cdf[drl_ctx], 2, ACCT_STR);
303#else
304 int drl_idx = aom_read(r, ec_ctx->drl_prob[drl_ctx], ACCT_STR);
305#endif
306 mbmi->ref_mv_idx = idx + drl_idx - 1;
307 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][drl_idx];
308 if (!drl_idx) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700309 }
310 }
311 }
312}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700313
Yaowu Xub24e1152016-10-31 16:28:32 -0700314#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
315static MOTION_MODE read_motion_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
Sarah Parker19234cc2017-03-10 16:43:25 -0800316 MODE_INFO *mi, aom_reader *r) {
317 MB_MODE_INFO *mbmi = &mi->mbmi;
318 const MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(
319#if CONFIG_GLOBAL_MOTION && SEPARATE_GLOBAL_MOTION
320 0, xd->global_motion,
321#endif // CONFIG_GLOBAL_MOTION && SEPARATE_GLOBAL_MOTION
322 mi);
Yue Chen69f18e12016-09-08 14:48:15 -0700323 int motion_mode;
324 FRAME_COUNTS *counts = xd->counts;
Yaowu Xub24e1152016-10-31 16:28:32 -0700325
Yue Chen69f18e12016-09-08 14:48:15 -0700326 if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return SIMPLE_TRANSLATION;
327#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
328 if (last_motion_mode_allowed == OBMC_CAUSAL) {
329 motion_mode = aom_read(r, cm->fc->obmc_prob[mbmi->sb_type], ACCT_STR);
330 if (counts) ++counts->obmc[mbmi->sb_type][motion_mode];
331 return (MOTION_MODE)(SIMPLE_TRANSLATION + motion_mode);
332 } else {
333#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700334 motion_mode =
335 aom_read_tree(r, av1_motion_mode_tree,
336 cm->fc->motion_mode_prob[mbmi->sb_type], ACCT_STR);
337 if (counts) ++counts->motion_mode[mbmi->sb_type][motion_mode];
338 return (MOTION_MODE)(SIMPLE_TRANSLATION + motion_mode);
Yue Chen69f18e12016-09-08 14:48:15 -0700339#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700340 }
Yue Chen69f18e12016-09-08 14:48:15 -0700341#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700342}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700343
344#if CONFIG_NCOBMC_ADAPT_WEIGHT
345static void read_ncobmc_mode(AV1_COMMON *cm, MACROBLOCKD *xd, MODE_INFO *mi,
346 NCOBMC_MODE ncobmc_mode[2], aom_reader *r) {
347 MB_MODE_INFO *mbmi = &mi->mbmi;
348 FRAME_COUNTS *counts = xd->counts;
349 ADAPT_OVERLAP_BLOCK ao_block = adapt_overlap_block_lookup[mbmi->sb_type];
350
351 if (ncobmc_mode_allowed(mbmi->sb_type) == NO_OVERLAP ||
352 ao_block == ADAPT_OVERLAP_BLOCK_INVALID)
353 return;
354
355 ncobmc_mode[0] = aom_read_tree(r, av1_ncobmc_mode_tree,
356 cm->fc->ncobmc_mode_prob[ao_block], ACCT_STR);
357 if (counts) ++counts->ncobmc_mode[ao_block][ncobmc_mode[0]];
358
359 if (mi_size_wide[mbmi->sb_type] != mi_size_high[mbmi->sb_type]) {
360 ncobmc_mode[1] = aom_read_tree(
361 r, av1_ncobmc_mode_tree, cm->fc->ncobmc_mode_prob[ao_block], ACCT_STR);
362 if (counts) ++counts->ncobmc_mode[ao_block][ncobmc_mode[1]];
363 }
364}
365#endif
Yaowu Xub24e1152016-10-31 16:28:32 -0700366#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
367
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700369static PREDICTION_MODE read_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
370 aom_reader *r, int16_t ctx) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700371 const int mode =
372 aom_read_tree(r, av1_inter_compound_mode_tree,
373 cm->fc->inter_compound_mode_probs[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700374 FRAME_COUNTS *counts = xd->counts;
375
376 if (counts) ++counts->inter_compound_mode[ctx][mode];
377
378 assert(is_inter_compound_mode(NEAREST_NEARESTMV + mode));
379 return NEAREST_NEARESTMV + mode;
380}
Zoe Liu85b66462017-04-20 14:28:19 -0700381
382#if CONFIG_COMPOUND_SINGLEREF
383static PREDICTION_MODE read_inter_singleref_comp_mode(AV1_COMMON *cm,
384 MACROBLOCKD *xd,
385 aom_reader *r,
386 int16_t ctx) {
387 const int mode =
388 aom_read_tree(r, av1_inter_singleref_comp_mode_tree,
389 cm->fc->inter_singleref_comp_mode_probs[ctx], ACCT_STR);
390 FRAME_COUNTS *counts = xd->counts;
391
392 if (counts) ++counts->inter_singleref_comp_mode[ctx][mode];
393
394 assert(is_inter_singleref_comp_mode(SR_NEAREST_NEARMV + mode));
395 return SR_NEAREST_NEARMV + mode;
396}
397#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -0700398#endif // CONFIG_EXT_INTER
399
Thomas9ac55082016-09-23 18:04:17 +0100400static int read_segment_id(aom_reader *r, struct segmentation_probs *segp) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700401 return aom_read_symbol(r, segp->tree_cdf, MAX_SEGMENTS, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700402}
403
404#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700405static void read_tx_size_vartx(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700406 MB_MODE_INFO *mbmi, FRAME_COUNTS *counts,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700407 TX_SIZE tx_size, int depth, int blk_row,
408 int blk_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700409 int is_split = 0;
410 const int tx_row = blk_row >> 1;
411 const int tx_col = blk_col >> 1;
Jingning Hanf64062f2016-11-02 16:22:18 -0700412 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
413 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
Jingning Han331662e2017-05-30 17:03:32 -0700414 int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
415 xd->left_txfm_context + blk_row,
Jingning Hanc8b89362016-11-01 10:28:53 -0700416 mbmi->sb_type, tx_size);
clang-format67948d32016-09-07 22:40:40 -0700417 TX_SIZE(*const inter_tx_size)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700418 [MAX_MIB_SIZE] =
419 (TX_SIZE(*)[MAX_MIB_SIZE]) & mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
421
Jingning Han571189c2016-10-24 10:38:43 -0700422 if (depth == MAX_VARTX_DEPTH) {
Jingning Han94d5bfc2016-10-21 10:14:36 -0700423 int idx, idy;
424 inter_tx_size[0][0] = tx_size;
Jingning Han65abc312016-10-27 13:04:21 -0700425 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
426 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Jingning Han94d5bfc2016-10-21 10:14:36 -0700427 inter_tx_size[idy][idx] = tx_size;
428 mbmi->tx_size = tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700429 mbmi->min_tx_size = AOMMIN(mbmi->min_tx_size, get_min_tx_size(tx_size));
Jingning Han94d5bfc2016-10-21 10:14:36 -0700430 if (counts) ++counts->txfm_partition[ctx][0];
Jingning Han331662e2017-05-30 17:03:32 -0700431 txfm_partition_update(xd->above_txfm_context + blk_col,
432 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700433 return;
434 }
435
Michael Bebenita6048d052016-08-25 14:40:54 -0700436 is_split = aom_read(r, cm->fc->txfm_partition_prob[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437
438 if (is_split) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700439 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
440 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441 int i;
442
443 if (counts) ++counts->txfm_partition[ctx][1];
444
445 if (tx_size == TX_8X8) {
Jingning Han9ca05b72017-01-03 14:41:36 -0800446 int idx, idy;
Jingning Hanab9ecba2017-01-13 09:11:58 -0800447 inter_tx_size[0][0] = sub_txs;
Jingning Han9ca05b72017-01-03 14:41:36 -0800448 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
449 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Jingning Han581d1692017-01-05 16:03:54 -0800450 inter_tx_size[idy][idx] = inter_tx_size[0][0];
Jingning Hanab9ecba2017-01-13 09:11:58 -0800451 mbmi->tx_size = sub_txs;
Jingning Hane67b38a2016-11-04 10:30:00 -0700452 mbmi->min_tx_size = get_min_tx_size(mbmi->tx_size);
Jingning Han331662e2017-05-30 17:03:32 -0700453 txfm_partition_update(xd->above_txfm_context + blk_col,
454 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700455 return;
456 }
457
458 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700459 for (i = 0; i < 4; ++i) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700460 int offsetr = blk_row + (i >> 1) * bsl;
461 int offsetc = blk_col + (i & 0x01) * bsl;
462 read_tx_size_vartx(cm, xd, mbmi, counts, sub_txs, depth + 1, offsetr,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700463 offsetc, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464 }
465 } else {
466 int idx, idy;
467 inter_tx_size[0][0] = tx_size;
Jingning Han65abc312016-10-27 13:04:21 -0700468 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
469 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 inter_tx_size[idy][idx] = tx_size;
471 mbmi->tx_size = tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700472 mbmi->min_tx_size = AOMMIN(mbmi->min_tx_size, get_min_tx_size(tx_size));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700473 if (counts) ++counts->txfm_partition[ctx][0];
Jingning Han331662e2017-05-30 17:03:32 -0700474 txfm_partition_update(xd->above_txfm_context + blk_col,
475 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476 }
477}
478#endif
479
Yaowu Xuf883b422016-08-30 14:01:10 -0700480static TX_SIZE read_selected_tx_size(AV1_COMMON *cm, MACROBLOCKD *xd,
481 int tx_size_cat, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700482 FRAME_COUNTS *counts = xd->counts;
483 const int ctx = get_tx_size_context(xd);
Thomas Davies15580c52017-03-09 13:53:42 +0000484#if CONFIG_EC_ADAPT
485 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
486 (void)cm;
487#else
488 FRAME_CONTEXT *ec_ctx = cm->fc;
489#endif
490
Nathan E. Egge476c63c2017-05-18 18:35:16 -0400491 const int depth = aom_read_symbol(r, ec_ctx->tx_size_cdf[tx_size_cat][ctx],
492 tx_size_cat + 2, ACCT_STR);
Urvang Joshifeb925f2016-12-05 10:37:29 -0800493 const TX_SIZE tx_size = depth_to_tx_size(depth);
494#if CONFIG_RECT_TX
495 assert(!is_rect_tx(tx_size));
496#endif // CONFIG_RECT_TX
Jingning Han906be072016-10-26 11:04:31 -0700497 if (counts) ++counts->tx_size[tx_size_cat][ctx][depth];
Jingning Han4e1737a2016-10-25 16:05:02 -0700498 return tx_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499}
500
Urvang Joshifeb925f2016-12-05 10:37:29 -0800501static TX_SIZE read_tx_size(AV1_COMMON *cm, MACROBLOCKD *xd, int is_inter,
502 int allow_select_inter, aom_reader *r) {
503 const TX_MODE tx_mode = cm->tx_mode;
504 const BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700505 if (xd->lossless[xd->mi[0]->mbmi.segment_id]) return TX_4X4;
Debargha Mukherjee428bbb22017-03-17 07:30:24 -0700506#if CONFIG_CB4X4 && (CONFIG_VAR_TX || CONFIG_EXT_TX) && CONFIG_RECT_TX
Jingning Han3daa4fd2017-01-20 10:33:50 -0800507 if (bsize > BLOCK_4X4) {
Jingning Han4be1a4d2017-01-06 10:59:20 -0800508#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700509 if (bsize >= BLOCK_8X8) {
Urvang Joshifeb925f2016-12-05 10:37:29 -0800510#endif // CONFIG_CB4X4 && CONFIG_VAR_TX
511 if ((!is_inter || allow_select_inter) && tx_mode == TX_MODE_SELECT) {
512 const int32_t tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
513 : intra_tx_size_cat_lookup[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700514 const TX_SIZE coded_tx_size =
Urvang Joshifeb925f2016-12-05 10:37:29 -0800515 read_selected_tx_size(cm, xd, tx_size_cat, r);
Yue Chen3ca7dd92017-05-23 16:03:39 -0700516#if CONFIG_RECT_TX && (CONFIG_EXT_TX || CONFIG_VAR_TX)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700517 if (coded_tx_size > max_txsize_lookup[bsize]) {
518 assert(coded_tx_size == max_txsize_lookup[bsize] + 1);
Yue Chen3ca7dd92017-05-23 16:03:39 -0700519#if CONFIG_EXT_TX && CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -0700520 if (is_quarter_tx_allowed(xd, &xd->mi[0]->mbmi, is_inter)) {
521 int quarter_tx = aom_read(r, cm->fc->quarter_tx_size_prob, ACCT_STR);
522 FRAME_COUNTS *counts = xd->counts;
523
524 if (counts) ++counts->quarter_tx_size[quarter_tx];
525 return quarter_tx ? quarter_txsize_lookup[bsize]
526 : max_txsize_rect_lookup[bsize];
527 }
Yue Chen3ca7dd92017-05-23 16:03:39 -0700528#endif // CONFIG_EXT_TX && CONFIG_RECT_TX_EXT
Yue Chen56e226e2017-05-02 16:21:40 -0700529
Yaowu Xuc27fc142016-08-22 16:08:15 -0700530 return max_txsize_rect_lookup[bsize];
531 }
Peter de Rivaza7c81462016-09-26 14:20:13 +0100532#else
533 assert(coded_tx_size <= max_txsize_lookup[bsize]);
Yue Chen3ca7dd92017-05-23 16:03:39 -0700534#endif // CONFIG_RECT_TX && (CONFIG_EXT_TX || CONFIG_VAR_TX)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700535 return coded_tx_size;
536 } else {
Urvang Joshifeb925f2016-12-05 10:37:29 -0800537 return tx_size_from_tx_mode(bsize, tx_mode, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700538 }
539 } else {
540#if CONFIG_EXT_TX && CONFIG_RECT_TX
541 assert(IMPLIES(tx_mode == ONLY_4X4, bsize == BLOCK_4X4));
542 return max_txsize_rect_lookup[bsize];
543#else
544 return TX_4X4;
Urvang Joshifeb925f2016-12-05 10:37:29 -0800545#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700546 }
547}
548
Yaowu Xuf883b422016-08-30 14:01:10 -0700549static int dec_get_segment_id(const AV1_COMMON *cm, const uint8_t *segment_ids,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700550 int mi_offset, int x_mis, int y_mis) {
551 int x, y, segment_id = INT_MAX;
552
553 for (y = 0; y < y_mis; y++)
554 for (x = 0; x < x_mis; x++)
555 segment_id =
Yaowu Xuf883b422016-08-30 14:01:10 -0700556 AOMMIN(segment_id, segment_ids[mi_offset + y * cm->mi_cols + x]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700557
558 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
559 return segment_id;
560}
561
Yaowu Xuf883b422016-08-30 14:01:10 -0700562static void set_segment_id(AV1_COMMON *cm, int mi_offset, int x_mis, int y_mis,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700563 int segment_id) {
564 int x, y;
565
566 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
567
568 for (y = 0; y < y_mis; y++)
569 for (x = 0; x < x_mis; x++)
570 cm->current_frame_seg_map[mi_offset + y * cm->mi_cols + x] = segment_id;
571}
572
Yaowu Xuf883b422016-08-30 14:01:10 -0700573static int read_intra_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700574 int mi_offset, int x_mis, int y_mis,
Yaowu Xuf883b422016-08-30 14:01:10 -0700575 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700576 struct segmentation *const seg = &cm->seg;
577 FRAME_COUNTS *counts = xd->counts;
578 struct segmentation_probs *const segp = &cm->fc->seg;
579 int segment_id;
580
581 if (!seg->enabled) return 0; // Default for disabled segmentation
582
583 assert(seg->update_map && !seg->temporal_update);
584
585 segment_id = read_segment_id(r, segp);
586 if (counts) ++counts->seg.tree_total[segment_id];
587 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
588 return segment_id;
589}
590
Yaowu Xuf883b422016-08-30 14:01:10 -0700591static void copy_segment_id(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592 const uint8_t *last_segment_ids,
593 uint8_t *current_segment_ids, int mi_offset,
594 int x_mis, int y_mis) {
595 int x, y;
596
597 for (y = 0; y < y_mis; y++)
598 for (x = 0; x < x_mis; x++)
599 current_segment_ids[mi_offset + y * cm->mi_cols + x] =
600 last_segment_ids ? last_segment_ids[mi_offset + y * cm->mi_cols + x]
601 : 0;
602}
603
Yaowu Xuf883b422016-08-30 14:01:10 -0700604static int read_inter_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
605 int mi_row, int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606 struct segmentation *const seg = &cm->seg;
607 FRAME_COUNTS *counts = xd->counts;
608 struct segmentation_probs *const segp = &cm->fc->seg;
609 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
610 int predicted_segment_id, segment_id;
611 const int mi_offset = mi_row * cm->mi_cols + mi_col;
Jingning Hanc709e1f2016-12-06 14:48:09 -0800612 const int bw = mi_size_wide[mbmi->sb_type];
613 const int bh = mi_size_high[mbmi->sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700614
615 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -0700616 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
617 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700618
619 if (!seg->enabled) return 0; // Default for disabled segmentation
620
621 predicted_segment_id = cm->last_frame_seg_map
622 ? dec_get_segment_id(cm, cm->last_frame_seg_map,
623 mi_offset, x_mis, y_mis)
624 : 0;
625
626 if (!seg->update_map) {
627 copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
628 mi_offset, x_mis, y_mis);
629 return predicted_segment_id;
630 }
631
632 if (seg->temporal_update) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700633 const int ctx = av1_get_pred_context_seg_id(xd);
634 const aom_prob pred_prob = segp->pred_probs[ctx];
Michael Bebenita6048d052016-08-25 14:40:54 -0700635 mbmi->seg_id_predicted = aom_read(r, pred_prob, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700636 if (counts) ++counts->seg.pred[ctx][mbmi->seg_id_predicted];
637 if (mbmi->seg_id_predicted) {
638 segment_id = predicted_segment_id;
639 } else {
640 segment_id = read_segment_id(r, segp);
641 if (counts) ++counts->seg.tree_mispred[segment_id];
642 }
643 } else {
644 segment_id = read_segment_id(r, segp);
645 if (counts) ++counts->seg.tree_total[segment_id];
646 }
647 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
648 return segment_id;
649}
650
Yaowu Xuf883b422016-08-30 14:01:10 -0700651static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
652 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700653 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
654 return 1;
655 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700656 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100657#if CONFIG_NEW_MULTISYMBOL
658 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
659 const int skip = aom_read_symbol(r, ec_ctx->skip_cdfs[ctx], 2, ACCT_STR);
660#else
Michael Bebenita6048d052016-08-25 14:40:54 -0700661 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Thomas Davies61e3e372017-04-04 16:10:23 +0100662#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700663 FRAME_COUNTS *counts = xd->counts;
664 if (counts) ++counts->skip[ctx][skip];
665 return skip;
666 }
667}
668
Urvang Joshib100db72016-10-12 16:28:56 -0700669#if CONFIG_PALETTE
hui su33567b22017-04-30 16:40:19 -0700670#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -0700671static int uint16_compare(const void *a, const void *b) {
Urvang Joshi0ba850e2017-05-12 17:05:45 -0700672 const uint16_t va = *(const uint16_t *)a;
673 const uint16_t vb = *(const uint16_t *)b;
hui su33567b22017-04-30 16:40:19 -0700674 return va - vb;
675}
hui su33567b22017-04-30 16:40:19 -0700676
677static void read_palette_colors_y(MACROBLOCKD *const xd, int bit_depth,
678 PALETTE_MODE_INFO *const pmi, aom_reader *r) {
679 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
680 const MODE_INFO *const above_mi = xd->above_mi;
681 const MODE_INFO *const left_mi = xd->left_mi;
682 const int n_cache = av1_get_palette_cache(above_mi, left_mi, 0, color_cache);
683 const int n = pmi->palette_size[0];
684 int idx = 0;
685 for (int i = 0; i < n_cache && idx < n; ++i)
686 if (aom_read_bit(r, ACCT_STR)) pmi->palette_colors[idx++] = color_cache[i];
687 if (idx < n) {
688 pmi->palette_colors[idx++] = aom_read_literal(r, bit_depth, ACCT_STR);
689 if (idx < n) {
690 const int min_bits = bit_depth - 3;
691 int bits = min_bits + aom_read_literal(r, 2, ACCT_STR);
692 int range = (1 << bit_depth) - pmi->palette_colors[idx - 1] - 1;
693 for (; idx < n; ++idx) {
694 const int delta = aom_read_literal(r, bits, ACCT_STR) + 1;
695 pmi->palette_colors[idx] = pmi->palette_colors[idx - 1] + delta;
696 range -= delta;
697 bits = AOMMIN(bits, av1_ceil_log2(range));
698 }
699 }
700 }
hui su33567b22017-04-30 16:40:19 -0700701 qsort(pmi->palette_colors, n, sizeof(pmi->palette_colors[0]), uint16_compare);
hui su33567b22017-04-30 16:40:19 -0700702}
703
704static void read_palette_colors_uv(MACROBLOCKD *const xd, int bit_depth,
705 PALETTE_MODE_INFO *const pmi,
706 aom_reader *r) {
707 const int n = pmi->palette_size[1];
708 // U channel colors.
709 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
710 const MODE_INFO *const above_mi = xd->above_mi;
711 const MODE_INFO *const left_mi = xd->left_mi;
712 const int n_cache = av1_get_palette_cache(above_mi, left_mi, 1, color_cache);
713 int idx = PALETTE_MAX_SIZE;
714 for (int i = 0; i < n_cache && idx < PALETTE_MAX_SIZE + n; ++i)
715 if (aom_read_bit(r, ACCT_STR)) pmi->palette_colors[idx++] = color_cache[i];
716 if (idx < PALETTE_MAX_SIZE + n) {
717 pmi->palette_colors[idx++] = aom_read_literal(r, bit_depth, ACCT_STR);
718 if (idx < PALETTE_MAX_SIZE + n) {
719 const int min_bits = bit_depth - 3;
720 int bits = min_bits + aom_read_literal(r, 2, ACCT_STR);
721 int range = (1 << bit_depth) - pmi->palette_colors[idx - 1];
722 for (; idx < PALETTE_MAX_SIZE + n; ++idx) {
723 const int delta = aom_read_literal(r, bits, ACCT_STR);
724 pmi->palette_colors[idx] = pmi->palette_colors[idx - 1] + delta;
725 range -= delta;
726 bits = AOMMIN(bits, av1_ceil_log2(range));
727 }
728 }
729 }
hui su33567b22017-04-30 16:40:19 -0700730 qsort(pmi->palette_colors + PALETTE_MAX_SIZE, n,
731 sizeof(pmi->palette_colors[0]), uint16_compare);
hui su33567b22017-04-30 16:40:19 -0700732
733 // V channel colors.
734 if (aom_read_bit(r, ACCT_STR)) { // Delta encoding.
735 const int min_bits_v = bit_depth - 4;
736 const int max_val = 1 << bit_depth;
737 int bits = min_bits_v + aom_read_literal(r, 2, ACCT_STR);
738 pmi->palette_colors[2 * PALETTE_MAX_SIZE] =
739 aom_read_literal(r, bit_depth, ACCT_STR);
740 for (int i = 1; i < n; ++i) {
741 int delta = aom_read_literal(r, bits, ACCT_STR);
742 if (delta && aom_read_bit(r, ACCT_STR)) delta = -delta;
743 int val = (int)pmi->palette_colors[2 * PALETTE_MAX_SIZE + i - 1] + delta;
744 if (val < 0) val += max_val;
745 if (val >= max_val) val -= max_val;
746 pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] = val;
747 }
748 } else {
749 for (int i = 0; i < n; ++i) {
750 pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] =
751 aom_read_literal(r, bit_depth, ACCT_STR);
752 }
753 }
754}
755#endif // CONFIG_PALETTE_DELTA_ENCODING
756
Yaowu Xuf883b422016-08-30 14:01:10 -0700757static void read_palette_mode_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
758 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759 MODE_INFO *const mi = xd->mi[0];
760 MB_MODE_INFO *const mbmi = &mi->mbmi;
761 const MODE_INFO *const above_mi = xd->above_mi;
762 const MODE_INFO *const left_mi = xd->left_mi;
763 const BLOCK_SIZE bsize = mbmi->sb_type;
hui su33567b22017-04-30 16:40:19 -0700764 int n;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700765 PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
766
767 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800768 int palette_y_mode_ctx = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700769 if (above_mi)
Urvang Joshi23a61112017-01-30 14:59:27 -0800770 palette_y_mode_ctx +=
771 (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700772 if (left_mi)
Urvang Joshi23a61112017-01-30 14:59:27 -0800773 palette_y_mode_ctx +=
774 (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
775 if (aom_read(r, av1_default_palette_y_mode_prob[bsize - BLOCK_8X8]
776 [palette_y_mode_ctx],
777 ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700778 pmi->palette_size[0] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700779 aom_read_tree(r, av1_palette_size_tree,
Michael Bebenita6048d052016-08-25 14:40:54 -0700780 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
781 ACCT_STR) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700782 2;
783 n = pmi->palette_size[0];
hui sud13c24a2017-04-07 16:13:07 -0700784#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -0700785 read_palette_colors_y(xd, cm->bit_depth, pmi, r);
hui sud13c24a2017-04-07 16:13:07 -0700786#else
hui su33567b22017-04-30 16:40:19 -0700787 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700788 for (i = 0; i < n; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700789 pmi->palette_colors[i] = aom_read_literal(r, cm->bit_depth, ACCT_STR);
hui sud13c24a2017-04-07 16:13:07 -0700790#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -0700791 xd->plane[0].color_index_map[0] = read_uniform(r, n);
792 assert(xd->plane[0].color_index_map[0] < n);
793 }
794 }
795
796 if (mbmi->uv_mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800797 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
798 if (aom_read(r, av1_default_palette_uv_mode_prob[palette_uv_mode_ctx],
Michael Bebenita6048d052016-08-25 14:40:54 -0700799 ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800 pmi->palette_size[1] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700801 aom_read_tree(r, av1_palette_size_tree,
Michael Bebenita6048d052016-08-25 14:40:54 -0700802 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
803 ACCT_STR) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 2;
805 n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700806#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -0700807 read_palette_colors_uv(xd, cm->bit_depth, pmi, r);
hui sud13c24a2017-04-07 16:13:07 -0700808#else
hui su33567b22017-04-30 16:40:19 -0700809 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810 for (i = 0; i < n; ++i) {
811 pmi->palette_colors[PALETTE_MAX_SIZE + i] =
Michael Bebenita6048d052016-08-25 14:40:54 -0700812 aom_read_literal(r, cm->bit_depth, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700813 pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] =
Michael Bebenita6048d052016-08-25 14:40:54 -0700814 aom_read_literal(r, cm->bit_depth, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700815 }
hui sud13c24a2017-04-07 16:13:07 -0700816#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 xd->plane[1].color_index_map[0] = read_uniform(r, n);
818 assert(xd->plane[1].color_index_map[0] < n);
819 }
820 }
821}
Urvang Joshib100db72016-10-12 16:28:56 -0700822#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823
hui su5db97432016-10-14 16:10:14 -0700824#if CONFIG_FILTER_INTRA
825static void read_filter_intra_mode_info(AV1_COMMON *const cm,
Jingning Han62946d12017-05-26 11:29:30 -0700826 MACROBLOCKD *const xd, int mi_row,
827 int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700828 MODE_INFO *const mi = xd->mi[0];
829 MB_MODE_INFO *const mbmi = &mi->mbmi;
830 FRAME_COUNTS *counts = xd->counts;
hui su5db97432016-10-14 16:10:14 -0700831 FILTER_INTRA_MODE_INFO *filter_intra_mode_info =
832 &mbmi->filter_intra_mode_info;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700833
Urvang Joshib100db72016-10-12 16:28:56 -0700834 if (mbmi->mode == DC_PRED
835#if CONFIG_PALETTE
836 && mbmi->palette_mode_info.palette_size[0] == 0
837#endif // CONFIG_PALETTE
838 ) {
hui su5db97432016-10-14 16:10:14 -0700839 filter_intra_mode_info->use_filter_intra_mode[0] =
840 aom_read(r, cm->fc->filter_intra_probs[0], ACCT_STR);
841 if (filter_intra_mode_info->use_filter_intra_mode[0]) {
842 filter_intra_mode_info->filter_intra_mode[0] =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700843 read_uniform(r, FILTER_INTRA_MODES);
844 }
hui su5db97432016-10-14 16:10:14 -0700845 if (counts) {
clang-format55ce9e02017-02-15 22:27:12 -0800846 ++counts
847 ->filter_intra[0][filter_intra_mode_info->use_filter_intra_mode[0]];
hui su5db97432016-10-14 16:10:14 -0700848 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 }
Jingning Han62946d12017-05-26 11:29:30 -0700850
851#if CONFIG_CB4X4
852 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type,
853 xd->plane[1].subsampling_x,
854 xd->plane[1].subsampling_y))
855 return;
hui sub4ed1492017-05-31 17:25:42 -0700856#else
857 (void)mi_row;
858 (void)mi_col;
859#endif // CONFIG_CB4X4
Jingning Han62946d12017-05-26 11:29:30 -0700860
Urvang Joshib100db72016-10-12 16:28:56 -0700861 if (mbmi->uv_mode == DC_PRED
862#if CONFIG_PALETTE
863 && mbmi->palette_mode_info.palette_size[1] == 0
864#endif // CONFIG_PALETTE
865 ) {
hui su5db97432016-10-14 16:10:14 -0700866 filter_intra_mode_info->use_filter_intra_mode[1] =
867 aom_read(r, cm->fc->filter_intra_probs[1], ACCT_STR);
868 if (filter_intra_mode_info->use_filter_intra_mode[1]) {
869 filter_intra_mode_info->filter_intra_mode[1] =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870 read_uniform(r, FILTER_INTRA_MODES);
871 }
hui su5db97432016-10-14 16:10:14 -0700872 if (counts) {
clang-format55ce9e02017-02-15 22:27:12 -0800873 ++counts
874 ->filter_intra[1][filter_intra_mode_info->use_filter_intra_mode[1]];
hui su5db97432016-10-14 16:10:14 -0700875 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876 }
877}
hui su5db97432016-10-14 16:10:14 -0700878#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700879
hui su5db97432016-10-14 16:10:14 -0700880#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700881static void read_intra_angle_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
882 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700883 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
884 const BLOCK_SIZE bsize = mbmi->sb_type;
hui sueda3d762016-12-06 16:58:23 -0800885#if CONFIG_INTRA_INTERP
hui sub4e25d22017-03-09 15:32:30 -0800886#if CONFIG_EC_ADAPT
887 FRAME_CONTEXT *const ec_ctx = xd->tile_ctx;
888#else
889 FRAME_CONTEXT *const ec_ctx = cm->fc;
890#endif // CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700891 const int ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 int p_angle;
hui sueda3d762016-12-06 16:58:23 -0800893#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700894
hui sueda3d762016-12-06 16:58:23 -0800895 (void)cm;
Joe Young830d4ce2017-05-30 17:48:13 -0700896
897 mbmi->angle_delta[0] = 0;
898 mbmi->angle_delta[1] = 0;
899
900 if (!av1_use_angle_delta(bsize)) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700901
hui su45dc5972016-12-08 17:42:50 -0800902 if (av1_is_directional_mode(mbmi->mode, bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700903 mbmi->angle_delta[0] =
hui su0a6731f2017-04-26 15:23:47 -0700904 read_uniform(r, 2 * MAX_ANGLE_DELTA + 1) - MAX_ANGLE_DELTA;
hui sueda3d762016-12-06 16:58:23 -0800905#if CONFIG_INTRA_INTERP
hui su0a6731f2017-04-26 15:23:47 -0700906 p_angle = mode_to_angle_map[mbmi->mode] + mbmi->angle_delta[0] * ANGLE_STEP;
Yaowu Xuf883b422016-08-30 14:01:10 -0700907 if (av1_is_intra_filter_switchable(p_angle)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700908 FRAME_COUNTS *counts = xd->counts;
hui sub4e25d22017-03-09 15:32:30 -0800909 mbmi->intra_filter = aom_read_symbol(r, ec_ctx->intra_filter_cdf[ctx],
910 INTRA_FILTERS, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911 if (counts) ++counts->intra_filter[ctx][mbmi->intra_filter];
912 } else {
913 mbmi->intra_filter = INTRA_FILTER_LINEAR;
914 }
hui sueda3d762016-12-06 16:58:23 -0800915#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916 }
917
hui su45dc5972016-12-08 17:42:50 -0800918 if (av1_is_directional_mode(mbmi->uv_mode, bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700919 mbmi->angle_delta[1] =
hui su0a6731f2017-04-26 15:23:47 -0700920 read_uniform(r, 2 * MAX_ANGLE_DELTA + 1) - MAX_ANGLE_DELTA;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700921 }
922}
923#endif // CONFIG_EXT_INTRA
924
Angie Chianga9f9a312017-04-13 16:40:43 -0700925void av1_read_tx_type(const AV1_COMMON *const cm, MACROBLOCKD *xd,
Jingning Hanab7163d2016-11-04 09:46:35 -0700926#if CONFIG_SUPERTX
Angie Chianga9f9a312017-04-13 16:40:43 -0700927 int supertx_enabled,
Jingning Hanab7163d2016-11-04 09:46:35 -0700928#endif
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700929#if CONFIG_TXK_SEL
Jingning Han243b66b2017-06-23 12:11:47 -0700930 int block, int plane, TX_SIZE tx_size,
Angie Chianga9f9a312017-04-13 16:40:43 -0700931#endif
932 aom_reader *r) {
933 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Hanab7163d2016-11-04 09:46:35 -0700934 const int inter_block = is_inter_block(mbmi);
Jingning Han243b66b2017-06-23 12:11:47 -0700935#if !CONFIG_TXK_SEL
Jingning Hane67b38a2016-11-04 10:30:00 -0700936#if CONFIG_VAR_TX
937 const TX_SIZE tx_size = inter_block ? mbmi->min_tx_size : mbmi->tx_size;
938#else
Jingning Hanab7163d2016-11-04 09:46:35 -0700939 const TX_SIZE tx_size = mbmi->tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700940#endif
Jingning Han243b66b2017-06-23 12:11:47 -0700941#endif // !CONFIG_TXK_SEL
Thomas Daviescef09622017-01-11 17:27:12 +0000942#if CONFIG_EC_ADAPT
943 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
944#else
945 FRAME_CONTEXT *ec_ctx = cm->fc;
946#endif
947
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700948#if !CONFIG_TXK_SEL
Angie Chianga9f9a312017-04-13 16:40:43 -0700949 TX_TYPE *tx_type = &mbmi->tx_type;
950#else
Angie Chiang39b06eb2017-04-14 09:52:29 -0700951 // only y plane's tx_type is transmitted
952 if (plane > 0) return;
Angie Chianga9f9a312017-04-13 16:40:43 -0700953 TX_TYPE *tx_type = &mbmi->txk_type[block];
954#endif
955
Jingning Hanab7163d2016-11-04 09:46:35 -0700956 if (!FIXED_TX_TYPE) {
957#if CONFIG_EXT_TX
Urvang Joshifeb925f2016-12-05 10:37:29 -0800958 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
Sarah Parkere68a3e42017-02-16 14:03:24 -0800959 if (get_ext_tx_types(tx_size, mbmi->sb_type, inter_block,
960 cm->reduced_tx_set_used) > 1 &&
Yue Cheneeacc4c2017-01-17 17:29:17 -0800961 ((!cm->seg.enabled && cm->base_qindex > 0) ||
962 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
963 !mbmi->skip &&
Jingning Hanab7163d2016-11-04 09:46:35 -0700964#if CONFIG_SUPERTX
965 !supertx_enabled &&
966#endif // CONFIG_SUPERTX
967 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Sarah Parkere68a3e42017-02-16 14:03:24 -0800968 const int eset = get_ext_tx_set(tx_size, mbmi->sb_type, inter_block,
969 cm->reduced_tx_set_used);
Sarah Parker784596d2017-06-23 08:41:26 -0700970 // eset == 0 should correspond to a set with only DCT_DCT and
971 // there is no need to read the tx_type
972 assert(eset != 0);
Jingning Hanab7163d2016-11-04 09:46:35 -0700973 FRAME_COUNTS *counts = xd->counts;
974
975 if (inter_block) {
Sarah Parker784596d2017-06-23 08:41:26 -0700976 *tx_type = av1_ext_tx_inter_inv[eset][aom_read_symbol(
977 r, ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
978 ext_tx_cnt_inter[eset], ACCT_STR)];
979 if (counts) ++counts->inter_ext_tx[eset][square_tx_size][*tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -0700980 } else if (ALLOW_INTRA_EXT_TX) {
Sarah Parker784596d2017-06-23 08:41:26 -0700981 *tx_type = av1_ext_tx_intra_inv[eset][aom_read_symbol(
982 r, ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
983 ext_tx_cnt_intra[eset], ACCT_STR)];
984 if (counts)
985 ++counts->intra_ext_tx[eset][square_tx_size][mbmi->mode][*tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -0700986 }
987 } else {
Angie Chianga9f9a312017-04-13 16:40:43 -0700988 *tx_type = DCT_DCT;
Jingning Hanab7163d2016-11-04 09:46:35 -0700989 }
990#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800991
992 if (tx_size < TX_32X32 &&
993 ((!cm->seg.enabled && cm->base_qindex > 0) ||
994 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
995 !mbmi->skip &&
Jingning Hanab7163d2016-11-04 09:46:35 -0700996#if CONFIG_SUPERTX
997 !supertx_enabled &&
998#endif // CONFIG_SUPERTX
999 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1000 FRAME_COUNTS *counts = xd->counts;
Yue Cheneeacc4c2017-01-17 17:29:17 -08001001
Jingning Hanab7163d2016-11-04 09:46:35 -07001002 if (inter_block) {
Angie Chianga9f9a312017-04-13 16:40:43 -07001003 *tx_type = av1_ext_tx_inv[aom_read_symbol(
Thomas Daviescef09622017-01-11 17:27:12 +00001004 r, ec_ctx->inter_ext_tx_cdf[tx_size], TX_TYPES, ACCT_STR)];
Angie Chianga9f9a312017-04-13 16:40:43 -07001005 if (counts) ++counts->inter_ext_tx[tx_size][*tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -07001006 } else {
1007 const TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
Angie Chianga9f9a312017-04-13 16:40:43 -07001008 *tx_type = av1_ext_tx_inv[aom_read_symbol(
Thomas Daviescef09622017-01-11 17:27:12 +00001009 r, ec_ctx->intra_ext_tx_cdf[tx_size][tx_type_nom], TX_TYPES,
Jingning Hanab7163d2016-11-04 09:46:35 -07001010 ACCT_STR)];
Angie Chianga9f9a312017-04-13 16:40:43 -07001011 if (counts) ++counts->intra_ext_tx[tx_size][tx_type_nom][*tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -07001012 }
1013 } else {
Angie Chianga9f9a312017-04-13 16:40:43 -07001014 *tx_type = DCT_DCT;
Jingning Hanab7163d2016-11-04 09:46:35 -07001015 }
1016#endif // CONFIG_EXT_TX
1017 }
1018}
1019
Alex Converse28744302017-04-13 14:46:22 -07001020#if CONFIG_INTRABC
1021static INLINE void read_mv(aom_reader *r, MV *mv, const MV *ref,
1022 nmv_context *ctx, nmv_context_counts *counts,
Alex Converse6b2584c2017-05-02 09:51:21 -07001023 MvSubpelPrecision precision);
Alex Converse28744302017-04-13 14:46:22 -07001024
1025static INLINE int is_mv_valid(const MV *mv);
1026
1027static INLINE int assign_dv(AV1_COMMON *cm, MACROBLOCKD *xd, int_mv *mv,
1028 const int_mv *ref_mv, int mi_row, int mi_col,
1029 BLOCK_SIZE bsize, aom_reader *r) {
1030#if CONFIG_EC_ADAPT
1031 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1032 (void)cm;
1033#else
1034 FRAME_CONTEXT *ec_ctx = cm->fc;
1035#endif
1036 FRAME_COUNTS *counts = xd->counts;
1037 nmv_context_counts *const dv_counts = counts ? &counts->dv : NULL;
Alex Converse6b2584c2017-05-02 09:51:21 -07001038 read_mv(r, &mv->as_mv, &ref_mv->as_mv, &ec_ctx->ndvc, dv_counts,
1039 MV_SUBPEL_NONE);
Alex Converse28744302017-04-13 14:46:22 -07001040 int valid = is_mv_valid(&mv->as_mv) &&
1041 is_dv_valid(mv->as_mv, &xd->tile, mi_row, mi_col, bsize);
Alex Converse28744302017-04-13 14:46:22 -07001042 return valid;
1043}
1044#endif // CONFIG_INTRABC
1045
Yaowu Xuf883b422016-08-30 14:01:10 -07001046static void read_intra_frame_mode_info(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001047 MACROBLOCKD *const xd, int mi_row,
Yaowu Xuf883b422016-08-30 14:01:10 -07001048 int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001049 MODE_INFO *const mi = xd->mi[0];
1050 MB_MODE_INFO *const mbmi = &mi->mbmi;
1051 const MODE_INFO *above_mi = xd->above_mi;
1052 const MODE_INFO *left_mi = xd->left_mi;
1053 const BLOCK_SIZE bsize = mbmi->sb_type;
1054 int i;
1055 const int mi_offset = mi_row * cm->mi_cols + mi_col;
Jingning Han85dc03f2016-12-06 16:03:10 -08001056 const int bw = mi_size_wide[bsize];
1057 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001058
1059 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -07001060 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
1061 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001062#if CONFIG_EC_ADAPT
1063 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Nathan E. Egge476c63c2017-05-18 18:35:16 -04001064#else
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001065 FRAME_CONTEXT *ec_ctx = cm->fc;
1066#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067
1068 mbmi->segment_id = read_intra_segment_id(cm, xd, mi_offset, x_mis, y_mis, r);
1069 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
Arild Fuldseth07441162016-08-15 15:07:52 +02001070
1071#if CONFIG_DELTA_Q
1072 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001073 xd->current_qindex =
1074 xd->prev_qindex +
1075 read_delta_qindex(cm, xd, r, mbmi, mi_col, mi_row) * cm->delta_q_res;
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001076 /* Normative: Clamp to [1,MAXQ] to not interfere with lossless mode */
1077 xd->current_qindex = clamp(xd->current_qindex, 1, MAXQ);
Thomas Daviesf6936102016-09-05 16:51:31 +01001078 xd->prev_qindex = xd->current_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07001079#if CONFIG_EXT_DELTA_Q
1080 if (cm->delta_lf_present_flag) {
1081 mbmi->current_delta_lf_from_base = xd->current_delta_lf_from_base =
1082 xd->prev_delta_lf_from_base +
1083 read_delta_lflevel(cm, xd, r, mbmi, mi_col, mi_row) *
1084 cm->delta_lf_res;
1085 xd->prev_delta_lf_from_base = xd->current_delta_lf_from_base;
1086 }
1087#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02001088 }
1089#endif
1090
Yaowu Xuc27fc142016-08-22 16:08:15 -07001091 mbmi->ref_frame[0] = INTRA_FRAME;
Emil Keyder01770b32017-01-20 18:03:11 -05001092 mbmi->ref_frame[1] = NONE_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001093
Alex Converse28744302017-04-13 14:46:22 -07001094#if CONFIG_INTRABC
1095 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools) {
Alex Converse7c412ea2017-06-01 15:16:22 -07001096 mbmi->use_intrabc = aom_read(r, ec_ctx->intrabc_prob, ACCT_STR);
Alex Converse28744302017-04-13 14:46:22 -07001097 if (mbmi->use_intrabc) {
Alex Conversef71808c2017-06-06 12:21:17 -07001098 mbmi->tx_size = read_tx_size(cm, xd, 1, !mbmi->skip, r);
Alex Converse28744302017-04-13 14:46:22 -07001099 mbmi->mode = mbmi->uv_mode = DC_PRED;
Jingning Hand6c17d92017-04-24 16:33:12 -07001100#if CONFIG_DUAL_FILTER
1101 for (int idx = 0; idx < 4; ++idx) mbmi->interp_filter[idx] = BILINEAR;
1102#else
Alex Converse28744302017-04-13 14:46:22 -07001103 mbmi->interp_filter = BILINEAR;
Jingning Hand6c17d92017-04-24 16:33:12 -07001104#endif
Alex Converse44c2bad2017-05-11 09:36:10 -07001105
1106 int16_t inter_mode_ctx[MODE_CTX_REF_FRAMES];
1107 int_mv ref_mvs[MAX_MV_REF_CANDIDATES] = {};
1108
1109 av1_find_mv_refs(cm, xd, mi, INTRA_FRAME, &xd->ref_mv_count[INTRA_FRAME],
1110 xd->ref_mv_stack[INTRA_FRAME],
1111#if CONFIG_EXT_INTER
Yue Chenf03907a2017-05-31 12:04:04 -07001112 NULL,
Alex Converse44c2bad2017-05-11 09:36:10 -07001113#endif // CONFIG_EXT_INTER
1114 ref_mvs, mi_row, mi_col, NULL, NULL, inter_mode_ctx);
1115
1116 int_mv nearestmv, nearmv;
1117 av1_find_best_ref_mvs(0, ref_mvs, &nearestmv, &nearmv);
1118
1119 int_mv dv_ref = nearestmv.as_int == 0 ? nearmv : nearestmv;
1120 if (dv_ref.as_int == 0) av1_find_ref_dv(&dv_ref, mi_row, mi_col);
1121
Alex Converse28744302017-04-13 14:46:22 -07001122 xd->corrupted |=
1123 !assign_dv(cm, xd, &mbmi->mv[0], &dv_ref, mi_row, mi_col, bsize, r);
Alex Conversee16b2662017-05-24 14:00:00 -07001124#if CONFIG_VAR_TX
1125 // TODO(aconverse@google.com): Evaluate allowing VAR TX on intrabc blocks
1126 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1127 const int height = block_size_high[bsize] >> tx_size_high_log2[0];
1128 int idx, idy;
1129 for (idy = 0; idy < height; ++idy)
1130 for (idx = 0; idx < width; ++idx)
1131 mbmi->inter_tx_size[idy >> 1][idx >> 1] = mbmi->tx_size;
1132 mbmi->min_tx_size = get_min_tx_size(mbmi->tx_size);
1133#endif // CONFIG_VAR_TX
Alex Conversedaa15e42017-05-02 14:27:16 -07001134#if CONFIG_EXT_TX && !CONFIG_TXK_SEL
1135 av1_read_tx_type(cm, xd,
1136#if CONFIG_SUPERTX
1137 0,
1138#endif
1139 r);
1140#endif // CONFIG_EXT_TX && !CONFIG_TXK_SEL
Alex Converse28744302017-04-13 14:46:22 -07001141 return;
1142 }
1143 }
1144#endif // CONFIG_INTRABC
1145
Alex Conversef71808c2017-06-06 12:21:17 -07001146 mbmi->tx_size = read_tx_size(cm, xd, 0, 1, r);
1147
Jingning Han52261842016-12-14 12:17:49 -08001148#if CONFIG_CB4X4
1149 (void)i;
1150 mbmi->mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001151 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Jingning Han52261842016-12-14 12:17:49 -08001152#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001153 switch (bsize) {
1154 case BLOCK_4X4:
1155 for (i = 0; i < 4; ++i)
Nathan E. Egge476c63c2017-05-18 18:35:16 -04001156 mi->bmi[i].as_mode = read_intra_mode(
1157 r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, i));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001158 mbmi->mode = mi->bmi[3].as_mode;
1159 break;
1160 case BLOCK_4X8:
1161 mi->bmi[0].as_mode = mi->bmi[2].as_mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001162 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001163 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001164 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165 break;
1166 case BLOCK_8X4:
1167 mi->bmi[0].as_mode = mi->bmi[1].as_mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001168 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001169 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001170 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 2));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001171 break;
1172 default:
1173 mbmi->mode =
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00001174 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001175 }
Jingning Han52261842016-12-14 12:17:49 -08001176#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001177
Jingning Han36fe3202017-02-20 22:31:49 -08001178#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07001179 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeau2c317902017-04-28 11:06:50 -04001180 xd->plane[1].subsampling_y)) {
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001181 mbmi->uv_mode = read_intra_mode_uv(ec_ctx, xd, r, mbmi->mode);
Jingning Han36fe3202017-02-20 22:31:49 -08001182#else
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001183 mbmi->uv_mode = read_intra_mode_uv(ec_ctx, xd, r, mbmi->mode);
Jingning Han36fe3202017-02-20 22:31:49 -08001184#endif
1185
Luc Trudeauf5334002017-04-25 12:21:26 -04001186#if CONFIG_CFL
Luc Trudeau2c317902017-04-28 11:06:50 -04001187 // TODO(ltrudeau) support PALETTE
1188 if (mbmi->uv_mode == DC_PRED) {
David Michael Barr23198662017-06-19 23:19:48 +09001189 mbmi->cfl_alpha_idx = read_cfl_alphas(ec_ctx, r, mbmi->cfl_alpha_signs);
Luc Trudeauf5334002017-04-25 12:21:26 -04001190 }
Luc Trudeau2c317902017-04-28 11:06:50 -04001191#endif // CONFIG_CFL
1192
1193#if CONFIG_CB4X4
Joe Young830d4ce2017-05-30 17:48:13 -07001194 } else {
1195 // Avoid decoding angle_info if there is is no chroma prediction
1196 mbmi->uv_mode = DC_PRED;
Luc Trudeauf5334002017-04-25 12:21:26 -04001197 }
1198#endif
1199
Yaowu Xuc27fc142016-08-22 16:08:15 -07001200#if CONFIG_EXT_INTRA
1201 read_intra_angle_info(cm, xd, r);
1202#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001203#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001204 mbmi->palette_mode_info.palette_size[0] = 0;
1205 mbmi->palette_mode_info.palette_size[1] = 0;
1206 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1207 read_palette_mode_info(cm, xd, r);
Urvang Joshib100db72016-10-12 16:28:56 -07001208#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001209#if CONFIG_FILTER_INTRA
1210 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
1211 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
Jingning Han48b1cb32017-01-23 10:26:14 -08001212 if (bsize >= BLOCK_8X8 || CONFIG_CB4X4)
Jingning Han62946d12017-05-26 11:29:30 -07001213 read_filter_intra_mode_info(cm, xd, mi_row, mi_col, r);
hui su5db97432016-10-14 16:10:14 -07001214#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001215
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001216#if !CONFIG_TXK_SEL
Angie Chianga9f9a312017-04-13 16:40:43 -07001217 av1_read_tx_type(cm, xd,
Jingning Hanab7163d2016-11-04 09:46:35 -07001218#if CONFIG_SUPERTX
Angie Chianga9f9a312017-04-13 16:40:43 -07001219 0,
Nathan E. Egge72762a22016-09-07 17:12:07 -04001220#endif
Angie Chianga9f9a312017-04-13 16:40:43 -07001221 r);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001222#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001223}
1224
Alex Converse6b2584c2017-05-02 09:51:21 -07001225static int read_mv_component(aom_reader *r, nmv_component *mvcomp,
1226#if CONFIG_INTRABC
1227 int use_subpel,
1228#endif // CONFIG_INTRABC
1229 int usehp) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001230 int mag, d, fr, hp;
Michael Bebenita6048d052016-08-25 14:40:54 -07001231 const int sign = aom_read(r, mvcomp->sign, ACCT_STR);
1232 const int mv_class =
Nathan E. Egged7b893c2016-09-08 15:08:48 -04001233 aom_read_symbol(r, mvcomp->class_cdf, MV_CLASSES, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001234 const int class0 = mv_class == MV_CLASS_0;
1235
1236 // Integer part
1237 if (class0) {
Nathan E. Egge45ea9632016-09-08 17:25:49 -04001238 d = aom_read(r, mvcomp->class0[0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001239 mag = 0;
1240 } else {
1241 int i;
1242 const int n = mv_class + CLASS0_BITS - 1; // number of bits
1243
1244 d = 0;
Michael Bebenita6048d052016-08-25 14:40:54 -07001245 for (i = 0; i < n; ++i) d |= aom_read(r, mvcomp->bits[i], ACCT_STR) << i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001246 mag = CLASS0_SIZE << (mv_class + 2);
1247 }
1248
Alex Converse6b2584c2017-05-02 09:51:21 -07001249#if CONFIG_INTRABC
1250 if (use_subpel) {
1251#endif // CONFIG_INTRABC
1252 // Fractional part
1253 fr = aom_read_symbol(r, class0 ? mvcomp->class0_fp_cdf[d] : mvcomp->fp_cdf,
1254 MV_FP_SIZE, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255
Alex Converse6b2584c2017-05-02 09:51:21 -07001256 // High precision part (if hp is not used, the default value of the hp is 1)
1257 hp = usehp ? aom_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp, ACCT_STR)
1258 : 1;
1259#if CONFIG_INTRABC
1260 } else {
1261 fr = 3;
1262 hp = 1;
1263 }
1264#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07001265
1266 // Result
1267 mag += ((d << 3) | (fr << 1) | hp) + 1;
1268 return sign ? -mag : mag;
1269}
1270
Yaowu Xuf883b422016-08-30 14:01:10 -07001271static INLINE void read_mv(aom_reader *r, MV *mv, const MV *ref,
Thomas9ac55082016-09-23 18:04:17 +01001272 nmv_context *ctx, nmv_context_counts *counts,
Alex Converse6b2584c2017-05-02 09:51:21 -07001273 MvSubpelPrecision precision) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001274 MV_JOINT_TYPE joint_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001275 MV diff = { 0, 0 };
Michael Bebenita6048d052016-08-25 14:40:54 -07001276 joint_type =
Nathan E. Egge5f7fd7a2016-09-08 11:22:03 -04001277 (MV_JOINT_TYPE)aom_read_symbol(r, ctx->joint_cdf, MV_JOINTS, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001278
1279 if (mv_joint_vertical(joint_type))
Alex Converse6b2584c2017-05-02 09:51:21 -07001280 diff.row = read_mv_component(r, &ctx->comps[0],
1281#if CONFIG_INTRABC
1282 precision > MV_SUBPEL_NONE,
1283#endif // CONFIG_INTRABC
1284 precision > MV_SUBPEL_LOW_PRECISION);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001285
1286 if (mv_joint_horizontal(joint_type))
Alex Converse6b2584c2017-05-02 09:51:21 -07001287 diff.col = read_mv_component(r, &ctx->comps[1],
1288#if CONFIG_INTRABC
1289 precision > MV_SUBPEL_NONE,
1290#endif // CONFIG_INTRABC
1291 precision > MV_SUBPEL_LOW_PRECISION);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001292
Alex Converse6b2584c2017-05-02 09:51:21 -07001293 av1_inc_mv(&diff, counts, precision);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001294
1295 mv->row = ref->row + diff.row;
1296 mv->col = ref->col + diff.col;
1297}
1298
Yaowu Xuf883b422016-08-30 14:01:10 -07001299static REFERENCE_MODE read_block_reference_mode(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001300 const MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -07001301 aom_reader *r) {
Jingning Han6b064722017-05-01 09:48:18 -07001302#if !SUB8X8_COMP_REF
Jingning Han69d21012017-05-14 16:51:27 -07001303 if (xd->mi[0]->mbmi.sb_type == BLOCK_4X4) return SINGLE_REFERENCE;
Jingning Han6b064722017-05-01 09:48:18 -07001304#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001305 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001306 const int ctx = av1_get_reference_mode_context(cm, xd);
Thomas Davies8c9bcdf2017-06-21 10:12:48 +01001307#if CONFIG_NEW_MULTISYMBOL
1308 const REFERENCE_MODE mode = (REFERENCE_MODE)aom_read_symbol(
1309 r, xd->tile_ctx->comp_inter_cdf[ctx], 2, ACCT_STR);
1310#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001311 const REFERENCE_MODE mode =
Michael Bebenita6048d052016-08-25 14:40:54 -07001312 (REFERENCE_MODE)aom_read(r, cm->fc->comp_inter_prob[ctx], ACCT_STR);
Thomas Davies8c9bcdf2017-06-21 10:12:48 +01001313#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001314 FRAME_COUNTS *counts = xd->counts;
1315 if (counts) ++counts->comp_inter[ctx][mode];
1316 return mode; // SINGLE_REFERENCE or COMPOUND_REFERENCE
1317 } else {
1318 return cm->reference_mode;
1319 }
1320}
1321
Thomas Davies315f5782017-06-14 15:14:55 +01001322#if CONFIG_NEW_MULTISYMBOL
1323#define READ_REF_BIT(pname) \
Thomas Davies894cc812017-06-22 17:51:33 +01001324 aom_read_symbol(r, av1_get_pred_cdf_##pname(cm, xd), 2, ACCT_STR)
Thomas Davies315f5782017-06-14 15:14:55 +01001325#else
1326#define READ_REF_BIT(pname) \
1327 aom_read(r, av1_get_pred_prob_##pname(cm, xd), ACCT_STR)
1328#endif
1329
Zoe Liuc082bbc2017-05-17 13:31:37 -07001330#if CONFIG_EXT_COMP_REFS
1331static REFERENCE_MODE read_comp_reference_type(AV1_COMMON *cm,
1332 const MACROBLOCKD *xd,
1333 aom_reader *r) {
1334 const int ctx = av1_get_comp_reference_type_context(cm, xd);
1335#if USE_UNI_COMP_REFS
1336 const COMP_REFERENCE_TYPE comp_ref_type = (COMP_REFERENCE_TYPE)aom_read(
1337 r, cm->fc->comp_ref_type_prob[ctx], ACCT_STR);
1338#else // !USE_UNI_COMP_REFS
1339 // TODO(zoeliu): Temporarily turn off uni-directional comp refs
1340 const COMP_REFERENCE_TYPE comp_ref_type = BIDIR_COMP_REFERENCE;
1341#endif // USE_UNI_COMP_REFS
1342 FRAME_COUNTS *counts = xd->counts;
1343 if (counts) ++counts->comp_ref_type[ctx][comp_ref_type];
1344 return comp_ref_type; // UNIDIR_COMP_REFERENCE or BIDIR_COMP_REFERENCE
1345}
1346#endif // CONFIG_EXT_COMP_REFS
1347
Yaowu Xuc27fc142016-08-22 16:08:15 -07001348// Read the referncence frame
Yaowu Xuf883b422016-08-30 14:01:10 -07001349static void read_ref_frames(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1350 aom_reader *r, int segment_id,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001351 MV_REFERENCE_FRAME ref_frame[2]) {
Thomas Davies894cc812017-06-22 17:51:33 +01001352#if CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001353 FRAME_CONTEXT *const fc = cm->fc;
Thomas Davies894cc812017-06-22 17:51:33 +01001354#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001355 FRAME_COUNTS *counts = xd->counts;
1356
1357 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1358 ref_frame[0] = (MV_REFERENCE_FRAME)get_segdata(&cm->seg, segment_id,
1359 SEG_LVL_REF_FRAME);
Emil Keyder01770b32017-01-20 18:03:11 -05001360 ref_frame[1] = NONE_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001361 } else {
1362 const REFERENCE_MODE mode = read_block_reference_mode(cm, xd, r);
1363 // FIXME(rbultje) I'm pretty sure this breaks segmentation ref frame coding
1364 if (mode == COMPOUND_REFERENCE) {
Zoe Liuc082bbc2017-05-17 13:31:37 -07001365#if CONFIG_EXT_COMP_REFS
1366 const COMP_REFERENCE_TYPE comp_ref_type =
1367 read_comp_reference_type(cm, xd, r);
1368
1369#if !USE_UNI_COMP_REFS
1370 // TODO(zoeliu): Temporarily turn off uni-directional comp refs
1371 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
1372#endif // !USE_UNI_COMP_REFS
1373
1374 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
1375 const int ctx = av1_get_pred_context_uni_comp_ref_p(cm, xd);
1376 const int bit = aom_read(r, fc->uni_comp_ref_prob[ctx][0], ACCT_STR);
1377 if (counts) ++counts->uni_comp_ref[ctx][0][bit];
1378
1379 if (bit) {
1380 ref_frame[0] = BWDREF_FRAME;
1381 ref_frame[1] = ALTREF_FRAME;
1382 } else {
1383 const int ctx1 = av1_get_pred_context_uni_comp_ref_p1(cm, xd);
1384 const int bit1 =
1385 aom_read(r, fc->uni_comp_ref_prob[ctx1][1], ACCT_STR);
1386 if (counts) ++counts->uni_comp_ref[ctx1][1][bit1];
1387
1388 if (bit1) {
1389 ref_frame[0] = LAST_FRAME;
1390 ref_frame[1] = GOLDEN_FRAME;
1391 } else {
1392 ref_frame[0] = LAST_FRAME;
1393 ref_frame[1] = LAST2_FRAME;
1394 }
1395 }
1396
1397 return;
1398 }
1399#endif // CONFIG_EXT_COMP_REFS
1400
1401// Normative in decoder (for low delay)
1402#if CONFIG_ONE_SIDED_COMPOUND || CONFIG_EXT_COMP_REFS
Arild Fuldseth (arilfuld)38897302017-04-27 20:03:03 +02001403 const int idx = 1;
Zoe Liuc082bbc2017-05-17 13:31:37 -07001404#else // !(CONFIG_ONE_SIDED_COMPOUND || CONFIG_EXT_COMP_REFS)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001405#if CONFIG_EXT_REFS
1406 const int idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
Zoe Liuc082bbc2017-05-17 13:31:37 -07001407#else // !CONFIG_EXT_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408 const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
1409#endif // CONFIG_EXT_REFS
Zoe Liuc082bbc2017-05-17 13:31:37 -07001410#endif // CONFIG_ONE_SIDED_COMPOUND || CONFIG_EXT_COMP_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001411
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001412 const int ctx = av1_get_pred_context_comp_ref_p(cm, xd);
1413#if CONFIG_VAR_REFS
1414 int bit;
1415 // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree
1416 if (L_OR_L2(cm) && L3_OR_G(cm))
Thomas Davies894cc812017-06-22 17:51:33 +01001417 bit = READ_REF_BIT(comp_ref_p);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001418 else
1419 bit = L3_OR_G(cm);
1420#else // !CONFIG_VAR_REFS
Thomas Davies894cc812017-06-22 17:51:33 +01001421 const int bit = READ_REF_BIT(comp_ref_p);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001422#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001423 if (counts) ++counts->comp_ref[ctx][0][bit];
1424
1425#if CONFIG_EXT_REFS
1426 // Decode forward references.
1427 if (!bit) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001428 const int ctx1 = av1_get_pred_context_comp_ref_p1(cm, xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001429#if CONFIG_VAR_REFS
1430 int bit1;
1431 // Test need to explicitly code (L) vs (L2) branch node in tree
1432 if (L_AND_L2(cm))
Thomas Davies894cc812017-06-22 17:51:33 +01001433 bit1 = READ_REF_BIT(comp_ref_p1);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001434 else
1435 bit1 = LAST_IS_VALID(cm);
1436#else // !CONFIG_VAR_REFS
Thomas Davies894cc812017-06-22 17:51:33 +01001437 const int bit1 = READ_REF_BIT(comp_ref_p1);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001438#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 if (counts) ++counts->comp_ref[ctx1][1][bit1];
1440 ref_frame[!idx] = cm->comp_fwd_ref[bit1 ? 0 : 1];
1441 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001442 const int ctx2 = av1_get_pred_context_comp_ref_p2(cm, xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001443#if CONFIG_VAR_REFS
1444 int bit2;
1445 // Test need to explicitly code (L3) vs (G) branch node in tree
1446 if (L3_AND_G(cm))
Thomas Davies894cc812017-06-22 17:51:33 +01001447 bit2 = READ_REF_BIT(comp_ref_p2);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001448 else
1449 bit2 = GOLDEN_IS_VALID(cm);
1450#else // !CONFIG_VAR_REFS
Thomas Davies894cc812017-06-22 17:51:33 +01001451 const int bit2 = READ_REF_BIT(comp_ref_p2);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001452#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001453 if (counts) ++counts->comp_ref[ctx2][2][bit2];
1454 ref_frame[!idx] = cm->comp_fwd_ref[bit2 ? 3 : 2];
1455 }
1456
1457 // Decode backward references.
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001458 const int ctx_bwd = av1_get_pred_context_comp_bwdref_p(cm, xd);
1459#if CONFIG_VAR_REFS
1460 int bit_bwd;
1461 // Test need to explicitly code (BWD) vs (ALT) branch node in tree
1462 if (BWD_AND_ALT(cm))
Thomas Davies894cc812017-06-22 17:51:33 +01001463 bit_bwd = READ_REF_BIT(comp_bwdref_p);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001464 else
1465 bit_bwd = ALTREF_IS_VALID(cm);
Thomas Davies894cc812017-06-22 17:51:33 +01001466#else // !CONFIG_VAR_REFS
1467 const int bit_bwd = READ_REF_BIT(comp_bwdref_p);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001468#endif // CONFIG_VAR_REFS
1469 if (counts) ++counts->comp_bwdref[ctx_bwd][0][bit_bwd];
1470 ref_frame[idx] = cm->comp_bwd_ref[bit_bwd];
1471#else // !CONFIG_EXT_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001472 ref_frame[!idx] = cm->comp_var_ref[bit];
1473 ref_frame[idx] = cm->comp_fixed_ref;
1474#endif // CONFIG_EXT_REFS
1475 } else if (mode == SINGLE_REFERENCE) {
1476#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07001477 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001478#if CONFIG_VAR_REFS
1479 int bit0;
1480 // Test need to explicitly code (L,L2,L3,G) vs (BWD,ALT) branch node in
1481 // tree
1482 if ((L_OR_L2(cm) || L3_OR_G(cm)) && BWD_OR_ALT(cm))
Thomas Davies315f5782017-06-14 15:14:55 +01001483 bit0 = READ_REF_BIT(single_ref_p1);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001484 else
1485 bit0 = BWD_OR_ALT(cm);
1486#else // !CONFIG_VAR_REFS
Thomas Davies315f5782017-06-14 15:14:55 +01001487 const int bit0 = READ_REF_BIT(single_ref_p1);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001488#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001489 if (counts) ++counts->single_ref[ctx0][0][bit0];
1490
1491 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001492 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001493#if CONFIG_VAR_REFS
1494 int bit1;
1495 // Test need to explicitly code (BWD) vs (ALT) branch node in tree
1496 if (BWD_AND_ALT(cm))
Thomas Davies315f5782017-06-14 15:14:55 +01001497 bit1 = READ_REF_BIT(single_ref_p2);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001498 else
1499 bit1 = ALTREF_IS_VALID(cm);
Thomas Davies315f5782017-06-14 15:14:55 +01001500#else // !CONFIG_VAR_REFS
1501 const int bit1 = READ_REF_BIT(single_ref_p2);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001502#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001503 if (counts) ++counts->single_ref[ctx1][1][bit1];
1504 ref_frame[0] = bit1 ? ALTREF_FRAME : BWDREF_FRAME;
1505 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001506 const int ctx2 = av1_get_pred_context_single_ref_p3(xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001507#if CONFIG_VAR_REFS
1508 int bit2;
1509 // Test need to explicitly code (L,L2) vs (L3,G) branch node in tree
1510 if (L_OR_L2(cm) && L3_OR_G(cm))
Thomas Davies315f5782017-06-14 15:14:55 +01001511 bit2 = READ_REF_BIT(single_ref_p3);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001512 else
1513 bit2 = L3_OR_G(cm);
Thomas Davies315f5782017-06-14 15:14:55 +01001514#else // !CONFIG_VAR_REFS
1515 const int bit2 = READ_REF_BIT(single_ref_p3);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001516#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001517 if (counts) ++counts->single_ref[ctx2][2][bit2];
1518 if (bit2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001519 const int ctx4 = av1_get_pred_context_single_ref_p5(xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001520#if CONFIG_VAR_REFS
1521 int bit4;
1522 // Test need to explicitly code (L3) vs (G) branch node in tree
1523 if (L3_AND_G(cm))
Thomas Davies315f5782017-06-14 15:14:55 +01001524 bit4 = READ_REF_BIT(single_ref_p5);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001525 else
1526 bit4 = GOLDEN_IS_VALID(cm);
Thomas Davies315f5782017-06-14 15:14:55 +01001527#else // !CONFIG_VAR_REFS
1528 const int bit4 = READ_REF_BIT(single_ref_p5);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001529#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001530 if (counts) ++counts->single_ref[ctx4][4][bit4];
1531 ref_frame[0] = bit4 ? GOLDEN_FRAME : LAST3_FRAME;
1532 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001533 const int ctx3 = av1_get_pred_context_single_ref_p4(xd);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001534#if CONFIG_VAR_REFS
1535 int bit3;
1536 // Test need to explicitly code (L) vs (L2) branch node in tree
1537 if (L_AND_L2(cm))
Thomas Davies315f5782017-06-14 15:14:55 +01001538 bit3 = READ_REF_BIT(single_ref_p4);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001539 else
1540 bit3 = LAST2_IS_VALID(cm);
Thomas Davies315f5782017-06-14 15:14:55 +01001541#else // !CONFIG_VAR_REFS
1542 const int bit3 = READ_REF_BIT(single_ref_p4);
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001543#endif // CONFIG_VAR_REFS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001544 if (counts) ++counts->single_ref[ctx3][3][bit3];
1545 ref_frame[0] = bit3 ? LAST2_FRAME : LAST_FRAME;
1546 }
1547 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07001548#else // !CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07001549 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
Thomas Davies315f5782017-06-14 15:14:55 +01001550 const int bit0 = READ_REF_BIT(single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001551 if (counts) ++counts->single_ref[ctx0][0][bit0];
1552
1553 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001554 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
Thomas Davies315f5782017-06-14 15:14:55 +01001555 const int bit1 = READ_REF_BIT(single_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001556 if (counts) ++counts->single_ref[ctx1][1][bit1];
1557 ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
1558 } else {
1559 ref_frame[0] = LAST_FRAME;
1560 }
1561#endif // CONFIG_EXT_REFS
1562
Emil Keyder01770b32017-01-20 18:03:11 -05001563 ref_frame[1] = NONE_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001564 } else {
1565 assert(0 && "Invalid prediction mode.");
1566 }
1567 }
1568}
1569
Angie Chiang9c4f8952016-11-21 11:13:19 -08001570static INLINE void read_mb_interp_filter(AV1_COMMON *const cm,
1571 MACROBLOCKD *const xd,
1572 MB_MODE_INFO *const mbmi,
1573 aom_reader *r) {
1574 FRAME_COUNTS *counts = xd->counts;
Thomas Davies77c7c402017-01-11 17:58:54 +00001575#if CONFIG_EC_ADAPT
1576 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1577#else
1578 FRAME_CONTEXT *ec_ctx = cm->fc;
1579#endif
1580
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001581 if (!av1_is_interp_needed(xd)) {
1582 set_default_interp_filters(mbmi, cm->interp_filter);
Yue Chen19e7aa82016-11-30 14:05:39 -08001583 return;
1584 }
Yue Chen19e7aa82016-11-30 14:05:39 -08001585
1586#if CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001587 if (cm->interp_filter != SWITCHABLE) {
Yue Chen19e7aa82016-11-30 14:05:39 -08001588 int dir;
1589
Angie Chiang9c4f8952016-11-21 11:13:19 -08001590 for (dir = 0; dir < 4; ++dir) mbmi->interp_filter[dir] = cm->interp_filter;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001591 } else {
Yue Chen19e7aa82016-11-30 14:05:39 -08001592 int dir;
1593
Angie Chiang9c4f8952016-11-21 11:13:19 -08001594 for (dir = 0; dir < 2; ++dir) {
1595 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
1596 mbmi->interp_filter[dir] = EIGHTTAP_REGULAR;
1597
1598 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1599 (mbmi->ref_frame[1] > INTRA_FRAME &&
1600 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Angie Chiang9c4f8952016-11-21 11:13:19 -08001601 mbmi->interp_filter[dir] =
1602 (InterpFilter)av1_switchable_interp_inv[aom_read_symbol(
Thomas Davies77c7c402017-01-11 17:58:54 +00001603 r, ec_ctx->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS,
Angie Chiang9c4f8952016-11-21 11:13:19 -08001604 ACCT_STR)];
Angie Chiang9c4f8952016-11-21 11:13:19 -08001605 if (counts) ++counts->switchable_interp[ctx][mbmi->interp_filter[dir]];
1606 }
1607 }
1608 // The index system works as:
1609 // (0, 1) -> (vertical, horizontal) filter types for the first ref frame.
1610 // (2, 3) -> (vertical, horizontal) filter types for the second ref frame.
1611 mbmi->interp_filter[2] = mbmi->interp_filter[0];
1612 mbmi->interp_filter[3] = mbmi->interp_filter[1];
1613 }
Nathan E. Egge476c63c2017-05-18 18:35:16 -04001614#else // CONFIG_DUAL_FILTER
Angie Chiang9c4f8952016-11-21 11:13:19 -08001615 if (cm->interp_filter != SWITCHABLE) {
1616 mbmi->interp_filter = cm->interp_filter;
1617 } else {
1618 const int ctx = av1_get_pred_context_switchable_interp(xd);
Angie Chiang9c4f8952016-11-21 11:13:19 -08001619 mbmi->interp_filter =
Michael Bebenita6048d052016-08-25 14:40:54 -07001620 (InterpFilter)av1_switchable_interp_inv[aom_read_symbol(
Thomas Davies77c7c402017-01-11 17:58:54 +00001621 r, ec_ctx->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS,
Michael Bebenita6048d052016-08-25 14:40:54 -07001622 ACCT_STR)];
Angie Chiang9c4f8952016-11-21 11:13:19 -08001623 if (counts) ++counts->switchable_interp[ctx][mbmi->interp_filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001624 }
Angie Chiang9c4f8952016-11-21 11:13:19 -08001625#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001626}
1627
Jingning Han36fe3202017-02-20 22:31:49 -08001628static void read_intra_block_mode_info(AV1_COMMON *const cm, const int mi_row,
1629 const int mi_col, MACROBLOCKD *const xd,
1630 MODE_INFO *mi, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001631 MB_MODE_INFO *const mbmi = &mi->mbmi;
1632 const BLOCK_SIZE bsize = mi->mbmi.sb_type;
1633 int i;
1634
1635 mbmi->ref_frame[0] = INTRA_FRAME;
Emil Keyder01770b32017-01-20 18:03:11 -05001636 mbmi->ref_frame[1] = NONE_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001637
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001638#if CONFIG_EC_ADAPT
1639 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1640#else
1641 FRAME_CONTEXT *ec_ctx = cm->fc;
1642#endif
1643
Jingning Han52261842016-12-14 12:17:49 -08001644#if CONFIG_CB4X4
1645 (void)i;
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001646 mbmi->mode = read_intra_mode_y(ec_ctx, xd, r, size_group_lookup[bsize]);
Jingning Han52261842016-12-14 12:17:49 -08001647#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001648 switch (bsize) {
1649 case BLOCK_4X4:
1650 for (i = 0; i < 4; ++i)
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001651 mi->bmi[i].as_mode = read_intra_mode_y(ec_ctx, xd, r, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652 mbmi->mode = mi->bmi[3].as_mode;
1653 break;
1654 case BLOCK_4X8:
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001655 mi->bmi[0].as_mode = mi->bmi[2].as_mode =
1656 read_intra_mode_y(ec_ctx, xd, r, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001657 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001658 read_intra_mode_y(ec_ctx, xd, r, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001659 break;
1660 case BLOCK_8X4:
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001661 mi->bmi[0].as_mode = mi->bmi[1].as_mode =
1662 read_intra_mode_y(ec_ctx, xd, r, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001663 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001664 read_intra_mode_y(ec_ctx, xd, r, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001665 break;
1666 default:
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001667 mbmi->mode = read_intra_mode_y(ec_ctx, xd, r, size_group_lookup[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001668 }
Jingning Han52261842016-12-14 12:17:49 -08001669#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001670
Jingning Han36fe3202017-02-20 22:31:49 -08001671#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07001672 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001673 xd->plane[1].subsampling_y)) {
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001674 mbmi->uv_mode = read_intra_mode_uv(ec_ctx, xd, r, mbmi->mode);
Jingning Han36fe3202017-02-20 22:31:49 -08001675#else
Nathan E. Eggea1f80e32017-05-23 11:52:32 -04001676 mbmi->uv_mode = read_intra_mode_uv(ec_ctx, xd, r, mbmi->mode);
Jingning Han36fe3202017-02-20 22:31:49 -08001677 (void)mi_row;
1678 (void)mi_col;
1679#endif
1680
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001681#if CONFIG_CFL
1682 // TODO(ltrudeau) support PALETTE
1683 if (mbmi->uv_mode == DC_PRED) {
1684 mbmi->cfl_alpha_idx = read_cfl_alphas(
1685#if CONFIG_EC_ADAPT
1686 xd->tile_ctx,
1687#else
1688 cm->fc,
1689#endif // CONFIG_EC_ADAPT
David Michael Barr23198662017-06-19 23:19:48 +09001690 r, mbmi->cfl_alpha_signs);
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001691 }
1692#endif // CONFIG_CFL
1693
1694#if CONFIG_CB4X4
1695 }
1696#endif
1697
Yaowu Xuc27fc142016-08-22 16:08:15 -07001698#if CONFIG_EXT_INTRA
1699 read_intra_angle_info(cm, xd, r);
1700#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001701#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001702 mbmi->palette_mode_info.palette_size[0] = 0;
1703 mbmi->palette_mode_info.palette_size[1] = 0;
1704 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1705 read_palette_mode_info(cm, xd, r);
Urvang Joshib100db72016-10-12 16:28:56 -07001706#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001707#if CONFIG_FILTER_INTRA
1708 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
1709 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
Jingning Han48b1cb32017-01-23 10:26:14 -08001710 if (bsize >= BLOCK_8X8 || CONFIG_CB4X4)
Jingning Han62946d12017-05-26 11:29:30 -07001711 read_filter_intra_mode_info(cm, xd, mi_row, mi_col, r);
hui su5db97432016-10-14 16:10:14 -07001712#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001713}
1714
1715static INLINE int is_mv_valid(const MV *mv) {
1716 return mv->row > MV_LOW && mv->row < MV_UPP && mv->col > MV_LOW &&
1717 mv->col < MV_UPP;
1718}
1719
Yaowu Xuf883b422016-08-30 14:01:10 -07001720static INLINE int assign_mv(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001721 PREDICTION_MODE mode,
Jingning Han5c60cdf2016-09-30 09:37:46 -07001722 MV_REFERENCE_FRAME ref_frame[2], int block,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001723 int_mv mv[2], int_mv ref_mv[2],
David Barker45390c12017-02-20 14:44:40 +00001724 int_mv nearest_mv[2], int_mv near_mv[2], int mi_row,
1725 int mi_col, int is_compound, int allow_hp,
1726 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001727 int i;
1728 int ret = 1;
Thomas Davies24523292017-01-11 16:56:47 +00001729#if CONFIG_EC_ADAPT
1730 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1731#else
1732 FRAME_CONTEXT *ec_ctx = cm->fc;
1733#endif
Debargha Mukherjeef6dd3c62017-02-23 13:21:23 -08001734 BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001735 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han5cfa6712016-12-14 09:53:38 -08001736#if CONFIG_CB4X4
1737 int_mv *pred_mv = mbmi->pred_mv;
1738 (void)block;
1739#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001740 int_mv *pred_mv =
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001741 (bsize >= BLOCK_8X8) ? mbmi->pred_mv : xd->mi[0]->bmi[block].pred_mv;
Jingning Han5cfa6712016-12-14 09:53:38 -08001742#endif // CONFIG_CB4X4
Sarah Parkere5299862016-08-16 14:57:37 -07001743 (void)ref_frame;
Thomas Davies24523292017-01-11 16:56:47 +00001744 (void)cm;
David Barker45390c12017-02-20 14:44:40 +00001745 (void)mi_row;
1746 (void)mi_col;
Debargha Mukherjeef6dd3c62017-02-23 13:21:23 -08001747 (void)bsize;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001748
1749 switch (mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001750 case NEWMV: {
1751 FRAME_COUNTS *counts = xd->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001752 for (i = 0; i < 1 + is_compound; ++i) {
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001753 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1754 int nmv_ctx =
1755 av1_nmv_ctx(xd->ref_mv_count[rf_type], xd->ref_mv_stack[rf_type], i,
1756 mbmi->ref_mv_idx);
Alex Converse3d0bdc12017-05-01 15:19:58 -07001757 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001758 nmv_context_counts *const mv_counts =
1759 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001760 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001761 ret = ret && is_mv_valid(&mv[i].as_mv);
1762
Yaowu Xuc27fc142016-08-22 16:08:15 -07001763 pred_mv[i].as_int = ref_mv[i].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001764 }
1765 break;
1766 }
1767 case NEARESTMV: {
1768 mv[0].as_int = nearest_mv[0].as_int;
1769 if (is_compound) mv[1].as_int = nearest_mv[1].as_int;
1770
Yaowu Xuc27fc142016-08-22 16:08:15 -07001771 pred_mv[0].as_int = nearest_mv[0].as_int;
1772 if (is_compound) pred_mv[1].as_int = nearest_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001773 break;
1774 }
1775 case NEARMV: {
1776 mv[0].as_int = near_mv[0].as_int;
1777 if (is_compound) mv[1].as_int = near_mv[1].as_int;
1778
Yaowu Xuc27fc142016-08-22 16:08:15 -07001779 pred_mv[0].as_int = near_mv[0].as_int;
1780 if (is_compound) pred_mv[1].as_int = near_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001781 break;
1782 }
1783 case ZEROMV: {
Sarah Parkere5299862016-08-16 14:57:37 -07001784#if CONFIG_GLOBAL_MOTION
David Barkercdcac6d2016-12-01 17:04:16 +00001785 mv[0].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[0]],
Sarah Parkerae7c4582017-02-28 16:30:30 -08001786 cm->allow_high_precision_mv, bsize,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08001787 mi_col, mi_row, block)
David Barkercdcac6d2016-12-01 17:04:16 +00001788 .as_int;
Sarah Parkere5299862016-08-16 14:57:37 -07001789 if (is_compound)
David Barkercdcac6d2016-12-01 17:04:16 +00001790 mv[1].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[1]],
Sarah Parkerae7c4582017-02-28 16:30:30 -08001791 cm->allow_high_precision_mv, bsize,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08001792 mi_col, mi_row, block)
David Barkercdcac6d2016-12-01 17:04:16 +00001793 .as_int;
Sarah Parkere5299862016-08-16 14:57:37 -07001794#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001795 mv[0].as_int = 0;
1796 if (is_compound) mv[1].as_int = 0;
Sarah Parkere5299862016-08-16 14:57:37 -07001797#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001798
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08001799 pred_mv[0].as_int = mv[0].as_int;
1800 if (is_compound) pred_mv[1].as_int = mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001801 break;
1802 }
1803#if CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -07001804#if CONFIG_COMPOUND_SINGLEREF
1805 case SR_NEAREST_NEARMV: {
1806 assert(!is_compound);
1807 mv[0].as_int = nearest_mv[0].as_int;
1808 mv[1].as_int = near_mv[0].as_int;
1809 break;
1810 }
1811 /*
1812 case SR_NEAREST_NEWMV: {
1813 assert(!is_compound);
1814 mv[0].as_int = nearest_mv[0].as_int;
1815
1816 FRAME_COUNTS *counts = xd->counts;
1817 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1818 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1819 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
1820 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
1821 nmv_context_counts *const mv_counts =
1822 counts ? &counts->mv[nmv_ctx] : NULL;
1823 read_mv(r, &mv[1].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
1824 ret = ret && is_mv_valid(&mv[1].as_mv);
1825 break;
1826 }*/
1827 case SR_NEAR_NEWMV: {
1828 assert(!is_compound);
1829 mv[0].as_int = near_mv[0].as_int;
1830
1831 FRAME_COUNTS *counts = xd->counts;
1832 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1833 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1834 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
1835 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
1836 nmv_context_counts *const mv_counts =
1837 counts ? &counts->mv[nmv_ctx] : NULL;
1838 read_mv(r, &mv[1].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
1839 ret = ret && is_mv_valid(&mv[1].as_mv);
1840 break;
1841 }
1842 case SR_ZERO_NEWMV: {
1843 assert(!is_compound);
1844#if CONFIG_GLOBAL_MOTION
1845 mv[0].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[0]],
1846 cm->allow_high_precision_mv, bsize,
1847 mi_col, mi_row, block)
1848 .as_int;
1849#else
1850 mv[0].as_int = 0;
1851#endif // CONFIG_GLOBAL_MOTION
1852
1853 FRAME_COUNTS *counts = xd->counts;
1854 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1855 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1856 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
1857 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
1858 nmv_context_counts *const mv_counts =
1859 counts ? &counts->mv[nmv_ctx] : NULL;
1860 read_mv(r, &mv[1].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
1861 ret = ret && is_mv_valid(&mv[1].as_mv);
1862 break;
1863 }
1864 case SR_NEW_NEWMV: {
1865 assert(!is_compound);
1866
1867 FRAME_COUNTS *counts = xd->counts;
1868 for (i = 0; i < 2; ++i) {
1869 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1870 int nmv_ctx =
1871 av1_nmv_ctx(xd->ref_mv_count[rf_type], xd->ref_mv_stack[rf_type], 0,
1872 mbmi->ref_mv_idx);
1873 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
1874 nmv_context_counts *const mv_counts =
1875 counts ? &counts->mv[nmv_ctx] : NULL;
1876 read_mv(r, &mv[i].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
1877 ret = ret && is_mv_valid(&mv[i].as_mv);
1878 }
1879 break;
1880 }
1881#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07001882 case NEW_NEWMV: {
1883 FRAME_COUNTS *counts = xd->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001884 assert(is_compound);
1885 for (i = 0; i < 2; ++i) {
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001886 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1887 int nmv_ctx =
1888 av1_nmv_ctx(xd->ref_mv_count[rf_type], xd->ref_mv_stack[rf_type], i,
1889 mbmi->ref_mv_idx);
Alex Converse3d0bdc12017-05-01 15:19:58 -07001890 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001891 nmv_context_counts *const mv_counts =
1892 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001893 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001894 ret = ret && is_mv_valid(&mv[i].as_mv);
1895 }
1896 break;
1897 }
1898 case NEAREST_NEARESTMV: {
1899 assert(is_compound);
1900 mv[0].as_int = nearest_mv[0].as_int;
1901 mv[1].as_int = nearest_mv[1].as_int;
1902 break;
1903 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001904 case NEAR_NEARMV: {
1905 assert(is_compound);
1906 mv[0].as_int = near_mv[0].as_int;
1907 mv[1].as_int = near_mv[1].as_int;
1908 break;
1909 }
1910 case NEW_NEARESTMV: {
1911 FRAME_COUNTS *counts = xd->counts;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001912 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1913 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1914 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Alex Converse3d0bdc12017-05-01 15:19:58 -07001915 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001916 nmv_context_counts *const mv_counts =
1917 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001918 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001919 assert(is_compound);
1920 ret = ret && is_mv_valid(&mv[0].as_mv);
1921 mv[1].as_int = nearest_mv[1].as_int;
1922 break;
1923 }
1924 case NEAREST_NEWMV: {
1925 FRAME_COUNTS *counts = xd->counts;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001926 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1927 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1928 xd->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001929 nmv_context_counts *const mv_counts =
1930 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001931 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Alex Converse3d0bdc12017-05-01 15:19:58 -07001932 mv[0].as_int = nearest_mv[0].as_int;
1933 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001934 assert(is_compound);
1935 ret = ret && is_mv_valid(&mv[1].as_mv);
1936 break;
1937 }
1938 case NEAR_NEWMV: {
1939 FRAME_COUNTS *counts = xd->counts;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001940 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1941 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1942 xd->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Alex Converse3d0bdc12017-05-01 15:19:58 -07001943 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001944 nmv_context_counts *const mv_counts =
1945 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001946 mv[0].as_int = near_mv[0].as_int;
1947 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001948 assert(is_compound);
1949
1950 ret = ret && is_mv_valid(&mv[1].as_mv);
1951 break;
1952 }
1953 case NEW_NEARMV: {
1954 FRAME_COUNTS *counts = xd->counts;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001955 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1956 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1957 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Alex Converse3d0bdc12017-05-01 15:19:58 -07001958 nmv_context *const nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001959 nmv_context_counts *const mv_counts =
1960 counts ? &counts->mv[nmv_ctx] : NULL;
Alex Converse3d0bdc12017-05-01 15:19:58 -07001961 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, nmvc, mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001962 assert(is_compound);
1963 ret = ret && is_mv_valid(&mv[0].as_mv);
1964 mv[1].as_int = near_mv[1].as_int;
1965 break;
1966 }
1967 case ZERO_ZEROMV: {
1968 assert(is_compound);
Sarah Parkerc2d38712017-01-24 15:15:41 -08001969#if CONFIG_GLOBAL_MOTION
1970 mv[0].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[0]],
Sarah Parkerae7c4582017-02-28 16:30:30 -08001971 cm->allow_high_precision_mv, bsize,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08001972 mi_col, mi_row, block)
Sarah Parkerc2d38712017-01-24 15:15:41 -08001973 .as_int;
1974 mv[1].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[1]],
Sarah Parkerae7c4582017-02-28 16:30:30 -08001975 cm->allow_high_precision_mv, bsize,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08001976 mi_col, mi_row, block)
Sarah Parkerc2d38712017-01-24 15:15:41 -08001977 .as_int;
1978#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001979 mv[0].as_int = 0;
1980 mv[1].as_int = 0;
Sarah Parkerc2d38712017-01-24 15:15:41 -08001981#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001982 break;
1983 }
1984#endif // CONFIG_EXT_INTER
1985 default: { return 0; }
1986 }
1987 return ret;
1988}
1989
Yaowu Xuf883b422016-08-30 14:01:10 -07001990static int read_is_inter_block(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1991 int segment_id, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001992 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1993 return get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) != INTRA_FRAME;
1994 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001995 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01001996#if CONFIG_NEW_MULTISYMBOL
1997#if CONFIG_EC_ADAPT
1998 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1999#else
2000 FRAME_CONTEXT *ec_ctx = cm->fc;
2001#endif
2002 const int is_inter =
2003 aom_read_symbol(r, ec_ctx->intra_inter_cdf[ctx], 2, ACCT_STR);
2004#else
Michael Bebenita6048d052016-08-25 14:40:54 -07002005 const int is_inter = aom_read(r, cm->fc->intra_inter_prob[ctx], ACCT_STR);
Thomas Daviesf6ad9352017-04-19 11:38:06 +01002006#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002007 FRAME_COUNTS *counts = xd->counts;
2008 if (counts) ++counts->intra_inter[ctx][is_inter];
2009 return is_inter;
2010 }
2011}
2012
Zoe Liu85b66462017-04-20 14:28:19 -07002013#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2014static int read_is_inter_singleref_comp_mode(AV1_COMMON *const cm,
2015 MACROBLOCKD *const xd,
2016 int segment_id, aom_reader *r) {
2017 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) return 0;
2018
2019 const int ctx = av1_get_inter_mode_context(xd);
2020 const int is_singleref_comp_mode =
2021 aom_read(r, cm->fc->comp_inter_mode_prob[ctx], ACCT_STR);
2022 FRAME_COUNTS *counts = xd->counts;
2023
2024 if (counts) ++counts->comp_inter_mode[ctx][is_singleref_comp_mode];
2025 return is_singleref_comp_mode;
2026}
2027#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2028
Yaowu Xuc27fc142016-08-22 16:08:15 -07002029static void fpm_sync(void *const data, int mi_row) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002030 AV1Decoder *const pbi = (AV1Decoder *)data;
2031 av1_frameworker_wait(pbi->frame_worker_owner, pbi->common.prev_frame,
2032 mi_row << pbi->common.mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002033}
2034
Di Chen56586622017-06-09 13:49:44 -07002035#if DEC_MISMATCH_DEBUG
2036static void dec_dump_logs(AV1_COMMON *cm, MODE_INFO *const mi,
Zoe Liuf9333f52017-07-03 10:52:01 -07002037 MACROBLOCKD *const xd, int mi_row, int mi_col,
2038 int16_t inter_mode_ctx[MODE_CTX_REF_FRAMES],
2039 int16_t mode_ctx) {
Di Chen56586622017-06-09 13:49:44 -07002040 int_mv mv[2] = { { 0 } };
2041 int ref;
2042 MB_MODE_INFO *const mbmi = &mi->mbmi;
Di Chen56586622017-06-09 13:49:44 -07002043 for (ref = 0; ref < 1 + has_second_ref(mbmi); ++ref)
2044 mv[ref].as_mv = mbmi->mv[ref].as_mv;
2045
2046 int interp_ctx[2] = { -1 };
2047 int interp_filter[2] = { cm->interp_filter };
2048 if (cm->interp_filter == SWITCHABLE) {
2049 int dir;
2050 for (dir = 0; dir < 2; ++dir) {
2051 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
2052 (mbmi->ref_frame[1] > INTRA_FRAME &&
2053 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
2054 interp_ctx[dir] = av1_get_pred_context_switchable_interp(xd, dir);
2055 interp_filter[dir] = mbmi->interp_filter[dir];
2056 } else {
2057 interp_filter[dir] = EIGHTTAP_REGULAR;
2058 }
2059 }
2060 }
2061
2062 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
2063 int16_t zeromv_ctx = -1;
2064 int16_t refmv_ctx = -1;
2065 if (mbmi->mode != NEWMV) {
2066 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) assert(mbmi->mode == ZEROMV);
2067 zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
2068 if (mbmi->mode != ZEROMV) {
2069 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
2070 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
2071 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
2072 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
2073 }
2074 }
2075
2076 int8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Zoe Liuf9333f52017-07-03 10:52:01 -07002077#define FRAME_TO_CHECK 1
2078 if (cm->current_video_frame == FRAME_TO_CHECK
2079 // && cm->show_frame == 0
2080 ) {
2081 printf(
2082 "=== DECODER ===: "
2083 "Frame=%d, (mi_row,mi_col)=(%d,%d), mode=%d, bsize=%d, "
2084 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
2085 "ref[1]=%d, motion_mode=%d, inter_mode_ctx=%d, mode_ctx=%d, "
2086 "interp_ctx=(%d,%d), interp_filter=(%d,%d), newmv_ctx=%d, "
2087 "zeromv_ctx=%d, refmv_ctx=%d\n",
2088 cm->current_video_frame, mi_row, mi_col, mbmi->mode, mbmi->sb_type,
2089 cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col, mv[1].as_mv.row,
2090 mv[1].as_mv.col, mbmi->ref_frame[0], mbmi->ref_frame[1],
2091 mbmi->motion_mode, inter_mode_ctx[ref_frame_type], mode_ctx,
2092 interp_ctx[0], interp_ctx[1], interp_filter[0], interp_filter[1],
2093 newmv_ctx, zeromv_ctx, refmv_ctx);
2094 }
Di Chen56586622017-06-09 13:49:44 -07002095}
2096#endif // DEC_MISMATCH_DEBUG
2097
Yaowu Xuf883b422016-08-30 14:01:10 -07002098static void read_inter_block_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002099 MACROBLOCKD *const xd,
2100 MODE_INFO *const mi,
David Barker491983d2016-11-10 13:22:17 +00002101#if (CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION || CONFIG_EXT_INTER) && \
2102 CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002103 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002104 int supertx_enabled) {
2105#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002106 int mi_row, int mi_col, aom_reader *r) {
Yue Chencb60b182016-10-13 15:18:22 -07002107#endif // CONFIG_MOTION_VAR && CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002108 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002109 MB_MODE_INFO *const mbmi = &mi->mbmi;
2110 const BLOCK_SIZE bsize = mbmi->sb_type;
2111 const int allow_hp = cm->allow_high_precision_mv;
Jingning Han5cfa6712016-12-14 09:53:38 -08002112 const int unify_bsize = CONFIG_CB4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002113 int_mv nearestmv[2], nearmv[2];
2114 int_mv ref_mvs[MODE_CTX_REF_FRAMES][MAX_MV_REF_CANDIDATES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002115 int ref, is_compound;
Zoe Liu85b66462017-04-20 14:28:19 -07002116#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2117 int is_singleref_comp_mode = 0;
2118#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002119 int16_t inter_mode_ctx[MODE_CTX_REF_FRAMES];
Sebastien Alaiwane140c502017-04-27 09:52:34 +02002120#if CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002121 int16_t compound_inter_mode_ctx[MODE_CTX_REF_FRAMES];
Sebastien Alaiwane140c502017-04-27 09:52:34 +02002122#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002123 int16_t mode_ctx = 0;
Yue Chen69f18e12016-09-08 14:48:15 -07002124#if CONFIG_WARPED_MOTION
Debargha Mukherjeee6eb3b52017-02-26 08:50:56 -08002125 int pts[SAMPLES_ARRAY_SIZE], pts_inref[SAMPLES_ARRAY_SIZE];
Yue Chen69f18e12016-09-08 14:48:15 -07002126#endif // CONFIG_WARPED_MOTION
Thomas Davies1de6c882017-01-11 17:47:49 +00002127#if CONFIG_EC_ADAPT
2128 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2129#else
2130 FRAME_CONTEXT *ec_ctx = cm->fc;
2131#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002132
Urvang Joshi5a9ea002017-05-22 15:25:18 -07002133 assert(NELEMENTS(mode_2_counter) == MB_MODE_COUNT);
2134
Urvang Joshib100db72016-10-12 16:28:56 -07002135#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002136 mbmi->palette_mode_info.palette_size[0] = 0;
2137 mbmi->palette_mode_info.palette_size[1] = 0;
Urvang Joshib100db72016-10-12 16:28:56 -07002138#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002139
Frederic Barbier7a84fd82017-03-02 18:08:15 +01002140 memset(ref_mvs, 0, sizeof(ref_mvs));
2141
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 read_ref_frames(cm, xd, r, mbmi->segment_id, mbmi->ref_frame);
2143 is_compound = has_second_ref(mbmi);
2144
Zoe Liuc082bbc2017-05-17 13:31:37 -07002145#if CONFIG_EXT_COMP_REFS
2146#if !USE_UNI_COMP_REFS
2147 // NOTE: uni-directional comp refs disabled
2148 if (is_compound)
2149 assert(mbmi->ref_frame[0] < BWDREF_FRAME &&
2150 mbmi->ref_frame[1] >= BWDREF_FRAME);
2151#endif // !USE_UNI_COMP_REFS
2152#endif // CONFIG_EXT_COMP_REFS
2153
Zoe Liu85b66462017-04-20 14:28:19 -07002154#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2155 if (!is_compound)
2156 is_singleref_comp_mode =
2157 read_is_inter_singleref_comp_mode(cm, xd, mbmi->segment_id, r);
2158#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2159
Yaowu Xuc27fc142016-08-22 16:08:15 -07002160 for (ref = 0; ref < 1 + is_compound; ++ref) {
2161 MV_REFERENCE_FRAME frame = mbmi->ref_frame[ref];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002162
Sebastien Alaiwane140c502017-04-27 09:52:34 +02002163 av1_find_mv_refs(
2164 cm, xd, mi, frame, &xd->ref_mv_count[frame], xd->ref_mv_stack[frame],
Yaowu Xuc27fc142016-08-22 16:08:15 -07002165#if CONFIG_EXT_INTER
Sebastien Alaiwane140c502017-04-27 09:52:34 +02002166 compound_inter_mode_ctx,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002167#endif // CONFIG_EXT_INTER
Sebastien Alaiwane140c502017-04-27 09:52:34 +02002168 ref_mvs[frame], mi_row, mi_col, fpm_sync, (void *)pbi, inter_mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002169 }
2170
Jingning Hanacddc032016-11-17 15:26:20 -08002171 if (is_compound) {
2172 MV_REFERENCE_FRAME ref_frame = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002173 av1_find_mv_refs(cm, xd, mi, ref_frame, &xd->ref_mv_count[ref_frame],
2174 xd->ref_mv_stack[ref_frame],
2175#if CONFIG_EXT_INTER
2176 compound_inter_mode_ctx,
2177#endif // CONFIG_EXT_INTER
2178 ref_mvs[ref_frame], mi_row, mi_col, fpm_sync, (void *)pbi,
2179 inter_mode_ctx);
2180
2181 if (xd->ref_mv_count[ref_frame] < 2) {
2182 MV_REFERENCE_FRAME rf[2];
David Barkercdcac6d2016-12-01 17:04:16 +00002183 int_mv zeromv[2];
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002184 av1_set_ref_frame(rf, ref_frame);
David Barkercdcac6d2016-12-01 17:04:16 +00002185#if CONFIG_GLOBAL_MOTION
2186 zeromv[0].as_int = gm_get_motion_vector(&cm->global_motion[rf[0]],
David Barker45390c12017-02-20 14:44:40 +00002187 cm->allow_high_precision_mv,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08002188 bsize, mi_col, mi_row, 0)
David Barkercdcac6d2016-12-01 17:04:16 +00002189 .as_int;
Sarah Parkerae7c4582017-02-28 16:30:30 -08002190 zeromv[1].as_int = (rf[1] != NONE_FRAME)
2191 ? gm_get_motion_vector(&cm->global_motion[rf[1]],
2192 cm->allow_high_precision_mv,
Debargha Mukherjeefebb59c2017-03-02 12:23:45 -08002193 bsize, mi_col, mi_row, 0)
Sarah Parkerae7c4582017-02-28 16:30:30 -08002194 .as_int
2195 : 0;
David Barkercdcac6d2016-12-01 17:04:16 +00002196#else
2197 zeromv[0].as_int = zeromv[1].as_int = 0;
2198#endif
Sarah Parker9923d1b2017-04-10 11:56:40 -07002199 for (ref = 0; ref < 2; ++ref) {
2200 if (rf[ref] == NONE_FRAME) continue;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002201 lower_mv_precision(&ref_mvs[rf[ref]][0].as_mv, allow_hp);
2202 lower_mv_precision(&ref_mvs[rf[ref]][1].as_mv, allow_hp);
Sarah Parker9923d1b2017-04-10 11:56:40 -07002203 if (ref_mvs[rf[ref]][0].as_int != zeromv[ref].as_int ||
2204 ref_mvs[rf[ref]][1].as_int != zeromv[ref].as_int)
2205 inter_mode_ctx[ref_frame] &= ~(1 << ALL_ZERO_FLAG_OFFSET);
Frederic Barbier72e2e982017-03-03 10:01:04 +01002206 }
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002207 }
2208 }
2209
Yaowu Xuc27fc142016-08-22 16:08:15 -07002210#if CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -07002211#if CONFIG_COMPOUND_SINGLEREF
2212 if (is_compound || is_singleref_comp_mode)
2213#else // !CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002214 if (is_compound)
Zoe Liu85b66462017-04-20 14:28:19 -07002215#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002216 mode_ctx = compound_inter_mode_ctx[mbmi->ref_frame[0]];
2217 else
2218#endif // CONFIG_EXT_INTER
2219 mode_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -07002220 av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002221 mbmi->ref_mv_idx = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002222
2223 if (segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
2224 mbmi->mode = ZEROMV;
Debargha Mukherjeec76f9dc2017-05-01 13:18:09 -07002225 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002226 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
David Barker3409c0d2017-06-30 17:33:13 +01002227 "Invalid usage of segment feature on small blocks");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002228 return;
2229 }
2230 } else {
Jingning Han5cfa6712016-12-14 09:53:38 -08002231 if (bsize >= BLOCK_8X8 || unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002232#if CONFIG_EXT_INTER
2233 if (is_compound)
2234 mbmi->mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
Zoe Liu85b66462017-04-20 14:28:19 -07002235#if CONFIG_COMPOUND_SINGLEREF
2236 else if (is_singleref_comp_mode)
2237 mbmi->mode = read_inter_singleref_comp_mode(cm, xd, r, mode_ctx);
2238#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002239 else
2240#endif // CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07002241 mbmi->mode = read_inter_mode(ec_ctx, xd, r, mode_ctx);
David Barker404b2e82017-03-27 13:07:47 +01002242#if CONFIG_EXT_INTER
David Barker3dfba992017-04-03 16:10:09 +01002243 if (mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV ||
Zoe Liu85b66462017-04-20 14:28:19 -07002244#if CONFIG_COMPOUND_SINGLEREF
2245 mbmi->mode == SR_NEW_NEWMV ||
2246#endif // CONFIG_COMPOUND_SINGLEREF
David Barker3dfba992017-04-03 16:10:09 +01002247 have_nearmv_in_inter_mode(mbmi->mode))
Zoe Liu85b66462017-04-20 14:28:19 -07002248#else // !CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002249 if (mbmi->mode == NEARMV || mbmi->mode == NEWMV)
Zoe Liu85b66462017-04-20 14:28:19 -07002250#endif // CONFIG_EXT_INTER
Thomas Davies149eda52017-06-12 18:11:55 +01002251 read_drl_idx(ec_ctx, xd, mbmi, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002252 }
2253 }
2254
2255#if CONFIG_EXT_INTER
Jingning Han5cfa6712016-12-14 09:53:38 -08002256 if ((bsize < BLOCK_8X8 && unify_bsize) ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07002257 (mbmi->mode != ZEROMV && mbmi->mode != ZERO_ZEROMV)) {
2258#else
Jingning Han5cfa6712016-12-14 09:53:38 -08002259 if ((bsize < BLOCK_8X8 && !unify_bsize) || mbmi->mode != ZEROMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002260#endif // CONFIG_EXT_INTER
2261 for (ref = 0; ref < 1 + is_compound; ++ref) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002262 av1_find_best_ref_mvs(allow_hp, ref_mvs[mbmi->ref_frame[ref]],
2263 &nearestmv[ref], &nearmv[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002264 }
2265 }
2266
Yaowu Xuc27fc142016-08-22 16:08:15 -07002267 if (mbmi->ref_mv_idx > 0) {
2268 int_mv cur_mv =
2269 xd->ref_mv_stack[mbmi->ref_frame[0]][1 + mbmi->ref_mv_idx].this_mv;
2270 nearmv[0] = cur_mv;
2271 }
2272
2273#if CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -07002274#if CONFIG_COMPOUND_SINGLEREF
2275 if ((is_compound || is_singleref_comp_mode) &&
2276 (bsize >= BLOCK_8X8 || unify_bsize) && mbmi->mode != ZERO_ZEROMV) {
2277#else // !CONFIG_COMPOUND_SINGLEREF
Jingning Han61418bb2017-01-23 17:12:48 -08002278 if (is_compound && (bsize >= BLOCK_8X8 || unify_bsize) &&
2279 mbmi->mode != ZERO_ZEROMV) {
Zoe Liu85b66462017-04-20 14:28:19 -07002280#endif // CONFIG_COMPOUND_SINGLEREF
2281#else // !CONFIG_EXT_INTER
Jingning Han5cfa6712016-12-14 09:53:38 -08002282 if (is_compound && (bsize >= BLOCK_8X8 || unify_bsize) &&
2283 mbmi->mode != NEWMV && mbmi->mode != ZEROMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07002285 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002286
2287#if CONFIG_EXT_INTER
2288 if (xd->ref_mv_count[ref_frame_type] > 0) {
2289#else
2290 if (xd->ref_mv_count[ref_frame_type] == 1 && mbmi->mode == NEARESTMV) {
2291#endif // CONFIG_EXT_INTER
2292#if CONFIG_EXT_INTER
2293 if (mbmi->mode == NEAREST_NEARESTMV) {
2294#endif // CONFIG_EXT_INTER
2295 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
2296 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
2297 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
2298 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
2299#if CONFIG_EXT_INTER
Zoe Liu85b66462017-04-20 14:28:19 -07002300 } else if (mbmi->mode == NEAREST_NEWMV
2301#if CONFIG_COMPOUND_SINGLEREF
2302 || mbmi->mode == SR_NEAREST_NEARMV
2303// || mbmi->mode == SR_NEAREST_NEWMV
2304#endif // CONFIG_COMPOUND_SINGLEREF
2305 ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002306 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
2307 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
Debargha Mukherjeebb6e1342017-04-17 16:05:04 -07002308 } else if (mbmi->mode == NEW_NEARESTMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002309 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
2310 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
2311 }
2312#endif // CONFIG_EXT_INTER
2313 }
2314
2315#if CONFIG_EXT_INTER
2316 if (xd->ref_mv_count[ref_frame_type] > 1) {
David Barker404b2e82017-03-27 13:07:47 +01002317 int ref_mv_idx = 1 + mbmi->ref_mv_idx;
Zoe Liu85b66462017-04-20 14:28:19 -07002318#if CONFIG_COMPOUND_SINGLEREF
2319 if (is_compound) {
2320#endif // CONFIG_COMPOUND_SINGLEREF
2321 if (compound_ref0_mode(mbmi->mode) == NEARMV) {
2322 nearmv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
2323 lower_mv_precision(&nearmv[0].as_mv, allow_hp);
2324 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002325
Zoe Liu85b66462017-04-20 14:28:19 -07002326 if (compound_ref1_mode(mbmi->mode) == NEARMV) {
2327 nearmv[1] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].comp_mv;
2328 lower_mv_precision(&nearmv[1].as_mv, allow_hp);
2329 }
2330#if CONFIG_COMPOUND_SINGLEREF
2331 } else {
2332 assert(is_singleref_comp_mode);
2333 if (compound_ref0_mode(mbmi->mode) == NEARMV ||
2334 compound_ref1_mode(mbmi->mode) == NEARMV) {
2335 nearmv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
2336 lower_mv_precision(&nearmv[0].as_mv, allow_hp);
2337 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002338 }
Zoe Liu85b66462017-04-20 14:28:19 -07002339#endif // CONFIG_COMPOUND_SINGLEREF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002340 }
Zoe Liu85b66462017-04-20 14:28:19 -07002341#else // !CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002342 if (xd->ref_mv_count[ref_frame_type] > 1) {
2343 int ref_mv_idx = 1 + mbmi->ref_mv_idx;
2344 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
2345 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
2346 nearmv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
2347 nearmv[1] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].comp_mv;
2348 }
2349#endif // CONFIG_EXT_INTER
2350 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002351
Yue Chen19e7aa82016-11-30 14:05:39 -08002352#if !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION && !CONFIG_GLOBAL_MOTION
Angie Chiang9c4f8952016-11-21 11:13:19 -08002353 read_mb_interp_filter(cm, xd, mbmi, r);
Angie Chiang1733f6b2017-01-05 09:52:20 -08002354#endif // !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002355
Jingning Han5cfa6712016-12-14 09:53:38 -08002356 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002357 const int num_4x4_w = 1 << xd->bmode_blocks_wl;
2358 const int num_4x4_h = 1 << xd->bmode_blocks_hl;
2359 int idx, idy;
2360 PREDICTION_MODE b_mode;
2361 int_mv nearest_sub8x8[2], near_sub8x8[2];
2362#if CONFIG_EXT_INTER
2363 int_mv ref_mv[2][2];
2364#endif // CONFIG_EXT_INTER
2365 for (idy = 0; idy < 2; idy += num_4x4_h) {
2366 for (idx = 0; idx < 2; idx += num_4x4_w) {
2367 int_mv block[2];
2368 const int j = idy * 2 + idx;
2369 int_mv ref_mv_s8[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002370#if CONFIG_EXT_INTER
2371 if (!is_compound)
2372#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07002373 mode_ctx = av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame,
2374 bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002375#if CONFIG_EXT_INTER
2376 if (is_compound)
2377 b_mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
2378 else
2379#endif // CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07002380 b_mode = read_inter_mode(ec_ctx, xd, r, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002381
2382#if CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002383 if (b_mode != ZEROMV && b_mode != ZERO_ZEROMV) {
2384#else
2385 if (b_mode != ZEROMV) {
2386#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002387 CANDIDATE_MV ref_mv_stack[2][MAX_REF_MV_STACK_SIZE];
2388 uint8_t ref_mv_count[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002389 for (ref = 0; ref < 1 + is_compound; ++ref)
2390#if CONFIG_EXT_INTER
2391 {
2392 int_mv mv_ref_list[MAX_MV_REF_CANDIDATES];
Yaowu Xu531d6af2017-03-07 17:48:52 -08002393 av1_update_mv_context(cm, xd, mi, mbmi->ref_frame[ref], mv_ref_list,
2394 j, mi_row, mi_col, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002395#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07002396 av1_append_sub8x8_mvs_for_idx(cm, xd, j, ref, mi_row, mi_col,
Yaowu Xuf883b422016-08-30 14:01:10 -07002397 ref_mv_stack[ref], &ref_mv_count[ref],
Yaowu Xuc27fc142016-08-22 16:08:15 -07002398#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07002399 mv_ref_list,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002400#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07002401 &nearest_sub8x8[ref],
2402 &near_sub8x8[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002403#if CONFIG_EXT_INTER
2404 if (have_newmv_in_inter_mode(b_mode)) {
2405 mv_ref_list[0].as_int = nearest_sub8x8[ref].as_int;
2406 mv_ref_list[1].as_int = near_sub8x8[ref].as_int;
Yaowu Xuf883b422016-08-30 14:01:10 -07002407 av1_find_best_ref_mvs(allow_hp, mv_ref_list, &ref_mv[0][ref],
2408 &ref_mv[1][ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002409 }
2410 }
2411#endif // CONFIG_EXT_INTER
2412 }
2413
2414 for (ref = 0; ref < 1 + is_compound && b_mode != ZEROMV; ++ref) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002415 ref_mv_s8[ref] = nearest_sub8x8[ref];
2416 lower_mv_precision(&ref_mv_s8[ref].as_mv, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002417 }
2418#if CONFIG_EXT_INTER
2419 (void)ref_mv_s8;
2420#endif
2421
Jingning Han5c60cdf2016-09-30 09:37:46 -07002422 if (!assign_mv(cm, xd, b_mode, mbmi->ref_frame, j, block,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002423#if CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07002424 ref_mv[0],
2425#else // !CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002426 ref_mv_s8,
2427#endif // CONFIG_EXT_INTER
David Barker45390c12017-02-20 14:44:40 +00002428 nearest_sub8x8, near_sub8x8, mi_row, mi_col, is_compound,
2429 allow_hp, r)) {
Angie Chiangd0916d92017-03-10 17:54:18 -08002430 aom_merge_corrupted_flag(&xd->corrupted, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002431 break;
2432 };
2433
2434 mi->bmi[j].as_mv[0].as_int = block[0].as_int;
Sarah Parkerd7fa8542016-10-11 11:51:59 -07002435 mi->bmi[j].as_mode = b_mode;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002436 if (is_compound) mi->bmi[j].as_mv[1].as_int = block[1].as_int;
2437
2438 if (num_4x4_h == 2) mi->bmi[j + 2] = mi->bmi[j];
2439 if (num_4x4_w == 2) mi->bmi[j + 1] = mi->bmi[j];
2440 }
2441 }
2442
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07002443 mbmi->pred_mv[0].as_int = mi->bmi[3].pred_mv[0].as_int;
2444 mbmi->pred_mv[1].as_int = mi->bmi[3].pred_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002445 mi->mbmi.mode = b_mode;
2446
2447 mbmi->mv[0].as_int = mi->bmi[3].as_mv[0].as_int;
2448 mbmi->mv[1].as_int = mi->bmi[3].as_mv[1].as_int;
2449 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002450 int_mv ref_mv[2];
2451 ref_mv[0] = nearestmv[0];
2452 ref_mv[1] = nearestmv[1];
2453
David Barker404b2e82017-03-27 13:07:47 +01002454#if CONFIG_EXT_INTER
David Barker3dfba992017-04-03 16:10:09 +01002455 if (is_compound) {
David Barker3dfba992017-04-03 16:10:09 +01002456 int ref_mv_idx = mbmi->ref_mv_idx;
2457 // Special case: NEAR_NEWMV and NEW_NEARMV modes use
2458 // 1 + mbmi->ref_mv_idx (like NEARMV) instead of
2459 // mbmi->ref_mv_idx (like NEWMV)
2460 if (mbmi->mode == NEAR_NEWMV || mbmi->mode == NEW_NEARMV)
2461 ref_mv_idx = 1 + mbmi->ref_mv_idx;
David Barker3dfba992017-04-03 16:10:09 +01002462
2463 if (compound_ref0_mode(mbmi->mode) == NEWMV) {
David Barker404b2e82017-03-27 13:07:47 +01002464 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
2465 if (xd->ref_mv_count[ref_frame_type] > 1) {
David Barker3dfba992017-04-03 16:10:09 +01002466 ref_mv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
Jingning Han1b5bd002017-04-20 08:48:30 -07002467 clamp_mv_ref(&ref_mv[0].as_mv, xd->n8_w << MI_SIZE_LOG2,
David Barker404b2e82017-03-27 13:07:47 +01002468 xd->n8_h << MI_SIZE_LOG2, xd);
2469 }
David Barker3dfba992017-04-03 16:10:09 +01002470 nearestmv[0] = ref_mv[0];
David Barker404b2e82017-03-27 13:07:47 +01002471 }
David Barker3dfba992017-04-03 16:10:09 +01002472 if (compound_ref1_mode(mbmi->mode) == NEWMV) {
David Barker3dfba992017-04-03 16:10:09 +01002473 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
2474 if (xd->ref_mv_count[ref_frame_type] > 1) {
2475 ref_mv[1] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].comp_mv;
Jingning Han1b5bd002017-04-20 08:48:30 -07002476 clamp_mv_ref(&ref_mv[1].as_mv, xd->n8_w << MI_SIZE_LOG2,
David Barker3dfba992017-04-03 16:10:09 +01002477 xd->n8_h << MI_SIZE_LOG2, xd);
2478 }
David Barker3dfba992017-04-03 16:10:09 +01002479 nearestmv[1] = ref_mv[1];
2480 }
Zoe Liu85b66462017-04-20 14:28:19 -07002481#if CONFIG_COMPOUND_SINGLEREF
2482 } else if (is_singleref_comp_mode) {
2483 int ref_mv_idx = mbmi->ref_mv_idx;
2484 // Special case: SR_NEAR_NEWMV use 1 + mbmi->ref_mv_idx (like NEARMV)
2485 // instead of mbmi->ref_mv_idx (like NEWMV)
2486 if (mbmi->mode == SR_NEAR_NEWMV) ref_mv_idx = 1 + mbmi->ref_mv_idx;
2487
2488 if (compound_ref0_mode(mbmi->mode) == NEWMV ||
2489 compound_ref1_mode(mbmi->mode) == NEWMV) {
2490 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
2491 if (xd->ref_mv_count[ref_frame_type] > 1) {
2492 ref_mv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
2493 clamp_mv_ref(&ref_mv[0].as_mv, xd->n8_w << MI_SIZE_LOG2,
2494 xd->n8_h << MI_SIZE_LOG2, xd);
2495 }
2496 // TODO(zoeliu): To further investigate why this would not cause a
2497 // mismatch for the mode of SR_NEAREST_NEWMV.
2498 nearestmv[0] = ref_mv[0];
2499 }
2500#endif // CONFIG_COMPOUND_SINGLEREF
David Barker3dfba992017-04-03 16:10:09 +01002501 } else {
2502#endif // CONFIG_EXT_INTER
2503 if (mbmi->mode == NEWMV) {
2504 for (ref = 0; ref < 1 + is_compound; ++ref) {
David Barker3dfba992017-04-03 16:10:09 +01002505 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
2506 if (xd->ref_mv_count[ref_frame_type] > 1) {
2507 ref_mv[ref] =
2508 (ref == 0)
2509 ? xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx].this_mv
2510 : xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx]
2511 .comp_mv;
2512 clamp_mv_ref(&ref_mv[ref].as_mv, xd->n8_w << MI_SIZE_LOG2,
2513 xd->n8_h << MI_SIZE_LOG2, xd);
2514 }
David Barker3dfba992017-04-03 16:10:09 +01002515 nearestmv[ref] = ref_mv[ref];
2516 }
2517 }
2518#if CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002519 }
David Barker3dfba992017-04-03 16:10:09 +01002520#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002521
Angie Chiangd0916d92017-03-10 17:54:18 -08002522 int mv_corrupted_flag =
Zoe Liu7f24e1b2017-03-17 17:42:05 -07002523 !assign_mv(cm, xd, mbmi->mode, mbmi->ref_frame, 0, mbmi->mv, ref_mv,
David Barker45390c12017-02-20 14:44:40 +00002524 nearestmv, nearmv, mi_row, mi_col, is_compound, allow_hp, r);
Angie Chiangd0916d92017-03-10 17:54:18 -08002525 aom_merge_corrupted_flag(&xd->corrupted, mv_corrupted_flag);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526 }
2527
Yue Chen4d26acb2017-05-01 12:28:34 -07002528#if CONFIG_EXT_INTER && CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07002529 mbmi->use_wedge_interintra = 0;
2530 if (cm->reference_mode != COMPOUND_REFERENCE &&
2531#if CONFIG_SUPERTX
2532 !supertx_enabled &&
2533#endif
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002534 cm->allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 const int bsize_group = size_group_lookup[bsize];
Michael Bebenita6048d052016-08-25 14:40:54 -07002536 const int interintra =
2537 aom_read(r, cm->fc->interintra_prob[bsize_group], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002538 if (xd->counts) xd->counts->interintra[bsize_group][interintra]++;
Emil Keyder01770b32017-01-20 18:03:11 -05002539 assert(mbmi->ref_frame[1] == NONE_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002540 if (interintra) {
2541 const INTERINTRA_MODE interintra_mode =
2542 read_interintra_mode(cm, xd, r, bsize_group);
2543 mbmi->ref_frame[1] = INTRA_FRAME;
2544 mbmi->interintra_mode = interintra_mode;
2545#if CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07002546 mbmi->angle_delta[0] = 0;
2547 mbmi->angle_delta[1] = 0;
hui sueda3d762016-12-06 16:58:23 -08002548#if CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002549 mbmi->intra_filter = INTRA_FILTER_LINEAR;
hui sueda3d762016-12-06 16:58:23 -08002550#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002551#endif // CONFIG_EXT_INTRA
hui su5db97432016-10-14 16:10:14 -07002552#if CONFIG_FILTER_INTRA
2553 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
2554 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
2555#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07002556 if (is_interintra_wedge_used(bsize)) {
2557 mbmi->use_wedge_interintra =
Michael Bebenita6048d052016-08-25 14:40:54 -07002558 aom_read(r, cm->fc->wedge_interintra_prob[bsize], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002559 if (xd->counts)
2560 xd->counts->wedge_interintra[bsize][mbmi->use_wedge_interintra]++;
2561 if (mbmi->use_wedge_interintra) {
2562 mbmi->interintra_wedge_index =
Michael Bebenita6048d052016-08-25 14:40:54 -07002563 aom_read_literal(r, get_wedge_bits_lookup(bsize), ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002564 mbmi->interintra_wedge_sign = 0;
2565 }
2566 }
2567 }
2568 }
Yue Chen4d26acb2017-05-01 12:28:34 -07002569#endif // CONFIG_EXT_INTER && CONFIG_INTERINTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07002570
Yue Chencb60b182016-10-13 15:18:22 -07002571#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
2572 mbmi->motion_mode = SIMPLE_TRANSLATION;
Yue Chen69f18e12016-09-08 14:48:15 -07002573#if CONFIG_WARPED_MOTION
2574 if (mbmi->sb_type >= BLOCK_8X8 && !has_second_ref(mbmi))
2575 mbmi->num_proj_ref[0] = findSamples(cm, xd, mi_row, mi_col, pts, pts_inref);
2576#endif // CONFIG_WARPED_MOTION
Yue Chen5329a2b2017-02-28 17:33:00 +08002577#if CONFIG_MOTION_VAR
2578 av1_count_overlappable_neighbors(cm, xd, mi_row, mi_col);
2579#endif
Yue Chen69f18e12016-09-08 14:48:15 -07002580
Yaowu Xuc27fc142016-08-22 16:08:15 -07002581#if CONFIG_SUPERTX
Yue Chen69f18e12016-09-08 14:48:15 -07002582 if (!supertx_enabled) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002583#endif // CONFIG_SUPERTX
2584#if CONFIG_EXT_INTER
2585 if (mbmi->ref_frame[1] != INTRA_FRAME)
2586#endif // CONFIG_EXT_INTER
Sarah Parker19234cc2017-03-10 16:43:25 -08002587 mbmi->motion_mode = read_motion_mode(cm, xd, mi, r);
Wei-Ting Lin85a8f702017-06-22 13:55:15 -07002588
2589#if CONFIG_NCOBMC_ADAPT_WEIGHT
2590 read_ncobmc_mode(cm, xd, mi, mbmi->ncobmc_mode, r);
2591#endif
2592
Zoe Liu85b66462017-04-20 14:28:19 -07002593#if CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
2594 if (is_singleref_comp_mode) assert(mbmi->motion_mode == SIMPLE_TRANSLATION);
2595#endif // CONFIG_EXT_INTER && CONFIG_COMPOUND_SINGLEREF
Yue Chen69f18e12016-09-08 14:48:15 -07002596#if CONFIG_WARPED_MOTION
2597 if (mbmi->motion_mode == WARPED_CAUSAL) {
2598 mbmi->wm_params[0].wmtype = DEFAULT_WMTYPE;
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07002599 if (find_projection(mbmi->num_proj_ref[0], pts, pts_inref, bsize,
2600 mbmi->mv[0].as_mv.row, mbmi->mv[0].as_mv.col,
2601 &mbmi->wm_params[0], mi_row, mi_col)) {
Yunqing Wang8657ad72017-06-20 14:46:08 -07002602 aom_internal_error(&cm->error, AOM_CODEC_ERROR, "Invalid Warped Model");
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07002603 }
Yue Chen69f18e12016-09-08 14:48:15 -07002604 }
2605#endif // CONFIG_WARPED_MOTION
2606#if CONFIG_SUPERTX
2607 }
2608#endif // CONFIG_SUPERTX
Yue Chencb60b182016-10-13 15:18:22 -07002609#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002610
2611#if CONFIG_EXT_INTER
Sarah Parker2d0e9b72017-05-04 01:34:16 +00002612 mbmi->interinter_compound_type = COMPOUND_AVERAGE;
Zoe Liu85b66462017-04-20 14:28:19 -07002613 if (
2614#if CONFIG_COMPOUND_SINGLEREF
Zoe Liu0c634c72017-06-19 07:06:28 -07002615 is_inter_anyref_comp_mode(mbmi->mode)
Zoe Liu85b66462017-04-20 14:28:19 -07002616#else // !CONFIG_COMPOUND_SINGLEREF
2617 cm->reference_mode != SINGLE_REFERENCE &&
Sarah Parker6fdc8532016-11-16 17:47:13 -08002618 is_inter_compound_mode(mbmi->mode)
Zoe Liu85b66462017-04-20 14:28:19 -07002619#endif // CONFIG_COMPOUND_SINGLEREF
Yue Chencb60b182016-10-13 15:18:22 -07002620#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Sarah Parker6fdc8532016-11-16 17:47:13 -08002621 && mbmi->motion_mode == SIMPLE_TRANSLATION
Yue Chencb60b182016-10-13 15:18:22 -07002622#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Sarah Parker6fdc8532016-11-16 17:47:13 -08002623 ) {
Sarah Parker42d96102017-01-31 21:05:27 -08002624 if (is_any_masked_compound_used(bsize)) {
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002625#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002626 if (cm->allow_masked_compound) {
2627 mbmi->interinter_compound_type =
2628 aom_read_tree(r, av1_compound_type_tree,
2629 cm->fc->compound_type_prob[bsize], ACCT_STR);
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002630#if CONFIG_WEDGE
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002631 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
2632 mbmi->wedge_index =
2633 aom_read_literal(r, get_wedge_bits_lookup(bsize), ACCT_STR);
2634 mbmi->wedge_sign = aom_read_bit(r, ACCT_STR);
2635 }
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002636#endif // CONFIG_WEDGE
Sarah Parker42d96102017-01-31 21:05:27 -08002637#if CONFIG_COMPOUND_SEGMENT
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002638 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
2639 mbmi->mask_type = aom_read_literal(r, MAX_SEG_MASK_BITS, ACCT_STR);
2640 }
Sarah Parker42d96102017-01-31 21:05:27 -08002641#endif // CONFIG_COMPOUND_SEGMENT
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002642 }
2643#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Sarah Parker42d96102017-01-31 21:05:27 -08002644 } else {
Sarah Parker2d0e9b72017-05-04 01:34:16 +00002645 mbmi->interinter_compound_type = COMPOUND_AVERAGE;
Sarah Parker42d96102017-01-31 21:05:27 -08002646 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002647 if (xd->counts)
Sarah Parker2d0e9b72017-05-04 01:34:16 +00002648 xd->counts->compound_interinter[bsize][mbmi->interinter_compound_type]++;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002649 }
2650#endif // CONFIG_EXT_INTER
2651
Yue Chen19e7aa82016-11-30 14:05:39 -08002652#if CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07002653 read_mb_interp_filter(cm, xd, mbmi, r);
Angie Chiang1733f6b2017-01-05 09:52:20 -08002654#endif // CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION
Zoe Liu85b66462017-04-20 14:28:19 -07002655
Di Chen56586622017-06-09 13:49:44 -07002656#if DEC_MISMATCH_DEBUG
2657 // NOTE(zoeliu): For debug
Zoe Liuf9333f52017-07-03 10:52:01 -07002658 dec_dump_logs(cm, mi, xd, mi_row, mi_col, inter_mode_ctx, mode_ctx);
Di Chen56586622017-06-09 13:49:44 -07002659#endif // DEC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002660}
2661
Yaowu Xuf883b422016-08-30 14:01:10 -07002662static void read_inter_frame_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002663 MACROBLOCKD *const xd,
2664#if CONFIG_SUPERTX
2665 int supertx_enabled,
2666#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002667 int mi_row, int mi_col, aom_reader *r) {
2668 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002669 MODE_INFO *const mi = xd->mi[0];
2670 MB_MODE_INFO *const mbmi = &mi->mbmi;
2671 int inter_block = 1;
2672#if CONFIG_VAR_TX
2673 BLOCK_SIZE bsize = mbmi->sb_type;
2674#endif // CONFIG_VAR_TX
2675
2676 mbmi->mv[0].as_int = 0;
2677 mbmi->mv[1].as_int = 0;
2678 mbmi->segment_id = read_inter_segment_id(cm, xd, mi_row, mi_col, r);
2679#if CONFIG_SUPERTX
David Barker3aec8d62017-01-31 14:55:32 +00002680 if (!supertx_enabled)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002681#endif // CONFIG_SUPERTX
2682 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
David Barker3aec8d62017-01-31 14:55:32 +00002683
Arild Fuldseth07441162016-08-15 15:07:52 +02002684#if CONFIG_DELTA_Q
David Barker3aec8d62017-01-31 14:55:32 +00002685 if (cm->delta_q_present_flag) {
2686 xd->current_qindex =
2687 xd->prev_qindex +
2688 read_delta_qindex(cm, xd, r, mbmi, mi_col, mi_row) * cm->delta_q_res;
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01002689 /* Normative: Clamp to [1,MAXQ] to not interfere with lossless mode */
2690 xd->current_qindex = clamp(xd->current_qindex, 1, MAXQ);
David Barker3aec8d62017-01-31 14:55:32 +00002691 xd->prev_qindex = xd->current_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07002692#if CONFIG_EXT_DELTA_Q
2693 if (cm->delta_lf_present_flag) {
2694 mbmi->current_delta_lf_from_base = xd->current_delta_lf_from_base =
2695 xd->prev_delta_lf_from_base +
2696 read_delta_lflevel(cm, xd, r, mbmi, mi_col, mi_row) *
2697 cm->delta_lf_res;
2698 xd->prev_delta_lf_from_base = xd->current_delta_lf_from_base;
2699 }
2700#endif
David Barker3aec8d62017-01-31 14:55:32 +00002701 }
Arild Fuldseth07441162016-08-15 15:07:52 +02002702#endif
David Barker3aec8d62017-01-31 14:55:32 +00002703
2704#if CONFIG_SUPERTX
2705 if (!supertx_enabled) {
2706#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707 inter_block = read_is_inter_block(cm, xd, mbmi->segment_id, r);
2708
2709#if CONFIG_VAR_TX
Jingning Han331662e2017-05-30 17:03:32 -07002710 xd->above_txfm_context =
2711 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
2712 xd->left_txfm_context = xd->left_txfm_context_buffer +
2713 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
Jingning Han581d1692017-01-05 16:03:54 -08002714
2715 if (cm->tx_mode == TX_MODE_SELECT &&
2716#if CONFIG_CB4X4
Jingning Han3daa4fd2017-01-20 10:33:50 -08002717 bsize > BLOCK_4X4 &&
Jingning Han581d1692017-01-05 16:03:54 -08002718#else
2719 bsize >= BLOCK_8X8 &&
2720#endif
2721 !mbmi->skip && inter_block) {
Jingning Han70e5f3f2016-11-09 17:03:07 -08002722 const TX_SIZE max_tx_size = max_txsize_rect_lookup[bsize];
Jingning Hanf64062f2016-11-02 16:22:18 -07002723 const int bh = tx_size_high_unit[max_tx_size];
2724 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han65abc312016-10-27 13:04:21 -07002725 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
2726 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002727 int idx, idy;
Yue Chena1e48dc2016-08-29 17:29:33 -07002728
Jingning Hanfe45b212016-11-22 10:30:23 -08002729 mbmi->min_tx_size = TX_SIZES_ALL;
2730 for (idy = 0; idy < height; idy += bh)
2731 for (idx = 0; idx < width; idx += bw)
2732 read_tx_size_vartx(cm, xd, mbmi, xd->counts, max_tx_size,
2733 height != width, idy, idx, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002734 } else {
Urvang Joshifeb925f2016-12-05 10:37:29 -08002735 mbmi->tx_size = read_tx_size(cm, xd, inter_block, !mbmi->skip, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002736
2737 if (inter_block) {
Jingning Han9ca05b72017-01-03 14:41:36 -08002738 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
2739 const int height = block_size_high[bsize] >> tx_size_high_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002740 int idx, idy;
2741 for (idy = 0; idy < height; ++idy)
2742 for (idx = 0; idx < width; ++idx)
2743 mbmi->inter_tx_size[idy >> 1][idx >> 1] = mbmi->tx_size;
2744 }
Jingning Hane67b38a2016-11-04 10:30:00 -07002745 mbmi->min_tx_size = get_min_tx_size(mbmi->tx_size);
Jingning Han1b1dc932016-11-09 10:55:30 -08002746 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002747 }
2748#else
Urvang Joshifeb925f2016-12-05 10:37:29 -08002749 mbmi->tx_size = read_tx_size(cm, xd, inter_block, !mbmi->skip, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002750#endif // CONFIG_VAR_TX
2751#if CONFIG_SUPERTX
2752 }
2753#if CONFIG_VAR_TX
2754 else if (inter_block) {
2755 const int width = num_4x4_blocks_wide_lookup[bsize];
2756 const int height = num_4x4_blocks_high_lookup[bsize];
2757 int idx, idy;
2758 xd->mi[0]->mbmi.tx_size = xd->supertx_size;
2759 for (idy = 0; idy < height; ++idy)
2760 for (idx = 0; idx < width; ++idx)
2761 xd->mi[0]->mbmi.inter_tx_size[idy >> 1][idx >> 1] = xd->supertx_size;
2762 }
2763#endif // CONFIG_VAR_TX
2764#endif // CONFIG_SUPERTX
2765
2766 if (inter_block)
2767 read_inter_block_mode_info(pbi, xd,
David Barker491983d2016-11-10 13:22:17 +00002768#if (CONFIG_MOTION_VAR || CONFIG_EXT_INTER || CONFIG_WARPED_MOTION) && \
2769 CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002770
2771 mi, mi_row, mi_col, r, supertx_enabled);
2772#else
2773 mi, mi_row, mi_col, r);
Yue Chencb60b182016-10-13 15:18:22 -07002774#endif // CONFIG_MOTION_VAR && CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002775 else
Jingning Han36fe3202017-02-20 22:31:49 -08002776 read_intra_block_mode_info(cm, mi_row, mi_col, xd, mi, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002777
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002778#if !CONFIG_TXK_SEL
Angie Chianga9f9a312017-04-13 16:40:43 -07002779 av1_read_tx_type(cm, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002780#if CONFIG_SUPERTX
Angie Chianga9f9a312017-04-13 16:40:43 -07002781 supertx_enabled,
Nathan E. Egge93878c42016-05-03 10:01:32 -04002782#endif
Angie Chianga9f9a312017-04-13 16:40:43 -07002783 r);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002784#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002785}
2786
Yaowu Xuf883b422016-08-30 14:01:10 -07002787void av1_read_mode_info(AV1Decoder *const pbi, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002788#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002789 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002790#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002791 int mi_row, int mi_col, aom_reader *r, int x_mis,
2792 int y_mis) {
2793 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002794 MODE_INFO *const mi = xd->mi[0];
2795 MV_REF *frame_mvs = cm->cur_frame->mvs + mi_row * cm->mi_cols + mi_col;
2796 int w, h;
2797
Alex Converse28744302017-04-13 14:46:22 -07002798#if CONFIG_INTRABC
2799 mi->mbmi.use_intrabc = 0;
2800#endif // CONFIG_INTRABC
2801
Yaowu Xuc27fc142016-08-22 16:08:15 -07002802 if (frame_is_intra_only(cm)) {
2803 read_intra_frame_mode_info(cm, xd, mi_row, mi_col, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002804 for (h = 0; h < y_mis; ++h) {
2805 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
2806 for (w = 0; w < x_mis; ++w) {
2807 MV_REF *const mv = frame_mv + w;
Emil Keyder01770b32017-01-20 18:03:11 -05002808 mv->ref_frame[0] = NONE_FRAME;
2809 mv->ref_frame[1] = NONE_FRAME;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002810 }
2811 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002812 } else {
2813 read_inter_frame_mode_info(pbi, xd,
2814#if CONFIG_SUPERTX
2815 supertx_enabled,
2816#endif // CONFIG_SUPERTX
2817 mi_row, mi_col, r);
2818 for (h = 0; h < y_mis; ++h) {
2819 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
2820 for (w = 0; w < x_mis; ++w) {
2821 MV_REF *const mv = frame_mv + w;
2822 mv->ref_frame[0] = mi->mbmi.ref_frame[0];
2823 mv->ref_frame[1] = mi->mbmi.ref_frame[1];
2824 mv->mv[0].as_int = mi->mbmi.mv[0].as_int;
2825 mv->mv[1].as_int = mi->mbmi.mv[1].as_int;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002826 mv->pred_mv[0].as_int = mi->mbmi.pred_mv[0].as_int;
2827 mv->pred_mv[1].as_int = mi->mbmi.pred_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002828 }
2829 }
2830 }
2831}